1
0
mirror of https://github.com/dart-lang/sdk synced 2024-07-08 12:06:26 +00:00

Revert Reland '[vm/ffi] Implement FFI callbacks on AOT for ELF and Asm snapshots (excl. blobs).' as it breaks flutter profile execution on ios 32-bit platform.

Fixes https://github.com/flutter/flutter/issues/40114

Change-Id: If8d71e9c19c2e794d29f7ecbacb87457890a2fd5
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/116883
Reviewed-by: Alexander Markov <alexmarkov@google.com>
Commit-Queue: Alexander Aprelev <aam@google.com>
This commit is contained in:
Alexander Aprelev 2019-09-12 00:10:09 +00:00 committed by commit-bot@chromium.org
parent 1d01d91c33
commit 0c481a196c
58 changed files with 360 additions and 2118 deletions

View File

@ -40,23 +40,19 @@ export '../fasta/fasta_codes.dart'
show
LocatedMessage,
messageBytecodeLimitExceededTooManyArguments,
messageFfiExceptionalReturnNull,
messageFfiExpectedConstant,
noLength,
templateFfiDartTypeMismatch,
templateFfiExpectedExceptionalReturn,
templateFfiExpectedNoExceptionalReturn,
templateFfiExtendsOrImplementsSealedClass,
templateFfiFieldAnnotation,
templateFfiFieldInitializer,
templateFfiFieldNoAnnotation,
templateFfiNotStatic,
templateFfiStructGeneric,
templateFfiTypeInvalid,
templateFfiTypeMismatch,
templateFfiTypeUnsized,
templateFfiWrongStructInheritance,
templateIllegalRecursiveType;
templateFfiFieldInitializer,
templateIllegalRecursiveType,
templateFfiDartTypeMismatch,
templateFfiExtendsOrImplementsSealedClass,
templateFfiStructGeneric,
templateFfiWrongStructInheritance;
export '../fasta/hybrid_file_system.dart' show HybridFileSystem;

View File

@ -3754,81 +3754,6 @@ Message _withArgumentsFfiDartTypeMismatch(DartType _type, DartType _type2) {
arguments: {'type': _type, 'type2': _type2});
}
// DO NOT EDIT. THIS FILE IS GENERATED. SEE TOP OF FILE.
const Code<Null> codeFfiExceptionalReturnNull = messageFfiExceptionalReturnNull;
// DO NOT EDIT. THIS FILE IS GENERATED. SEE TOP OF FILE.
const MessageCode messageFfiExceptionalReturnNull = const MessageCode(
"FfiExceptionalReturnNull",
message: r"""Exceptional return value must not be null.""");
// DO NOT EDIT. THIS FILE IS GENERATED. SEE TOP OF FILE.
const Code<Null> codeFfiExpectedConstant = messageFfiExpectedConstant;
// DO NOT EDIT. THIS FILE IS GENERATED. SEE TOP OF FILE.
const MessageCode messageFfiExpectedConstant = const MessageCode(
"FfiExpectedConstant",
message: r"""Exceptional return value must be a constant.""");
// DO NOT EDIT. THIS FILE IS GENERATED. SEE TOP OF FILE.
const Template<
Message Function(
DartType
_type)> templateFfiExpectedExceptionalReturn = const Template<
Message Function(DartType _type)>(
messageTemplate:
r"""Expected an exceptional return value for a native callback returning '#type'.""",
withArguments: _withArgumentsFfiExpectedExceptionalReturn);
// DO NOT EDIT. THIS FILE IS GENERATED. SEE TOP OF FILE.
const Code<Message Function(DartType _type)> codeFfiExpectedExceptionalReturn =
const Code<Message Function(DartType _type)>(
"FfiExpectedExceptionalReturn",
templateFfiExpectedExceptionalReturn,
);
// DO NOT EDIT. THIS FILE IS GENERATED. SEE TOP OF FILE.
Message _withArgumentsFfiExpectedExceptionalReturn(DartType _type) {
TypeLabeler labeler = new TypeLabeler();
List<Object> typeParts = labeler.labelType(_type);
String type = typeParts.join();
return new Message(codeFfiExpectedExceptionalReturn,
message:
"""Expected an exceptional return value for a native callback returning '${type}'.""" +
labeler.originMessages,
arguments: {'type': _type});
}
// DO NOT EDIT. THIS FILE IS GENERATED. SEE TOP OF FILE.
const Template<
Message Function(
DartType
_type)> templateFfiExpectedNoExceptionalReturn = const Template<
Message Function(DartType _type)>(
messageTemplate:
r"""Exceptional return value cannot be provided for a native callback returning '#type'.""",
withArguments: _withArgumentsFfiExpectedNoExceptionalReturn);
// DO NOT EDIT. THIS FILE IS GENERATED. SEE TOP OF FILE.
const Code<Message Function(DartType _type)>
codeFfiExpectedNoExceptionalReturn =
const Code<Message Function(DartType _type)>(
"FfiExpectedNoExceptionalReturn",
templateFfiExpectedNoExceptionalReturn,
);
// DO NOT EDIT. THIS FILE IS GENERATED. SEE TOP OF FILE.
Message _withArgumentsFfiExpectedNoExceptionalReturn(DartType _type) {
TypeLabeler labeler = new TypeLabeler();
List<Object> typeParts = labeler.labelType(_type);
String type = typeParts.join();
return new Message(codeFfiExpectedNoExceptionalReturn,
message:
"""Exceptional return value cannot be provided for a native callback returning '${type}'.""" +
labeler.originMessages,
arguments: {'type': _type});
}
// DO NOT EDIT. THIS FILE IS GENERATED. SEE TOP OF FILE.
const Template<Message Function(String name)>
templateFfiExtendsOrImplementsSealedClass =
@ -3859,7 +3784,7 @@ const Template<
Message Function(String name)> templateFfiFieldAnnotation = const Template<
Message Function(String name)>(
messageTemplate:
r"""Field '#name' requires exactly one annotation to declare its native type, which cannot be Void. dart:ffi Structs cannot have regular Dart fields.""",
r"""Field '#name' requires exactly one annotation to declare its C++ type, which cannot be Void. dart:ffi Structs cannot have regular Dart fields.""",
withArguments: _withArgumentsFfiFieldAnnotation);
// DO NOT EDIT. THIS FILE IS GENERATED. SEE TOP OF FILE.
@ -3875,7 +3800,7 @@ Message _withArgumentsFfiFieldAnnotation(String name) {
name = demangleMixinApplicationName(name);
return new Message(codeFfiFieldAnnotation,
message:
"""Field '${name}' requires exactly one annotation to declare its native type, which cannot be Void. dart:ffi Structs cannot have regular Dart fields.""",
"""Field '${name}' requires exactly one annotation to declare its C++ type, which cannot be Void. dart:ffi Structs cannot have regular Dart fields.""",
arguments: {'name': name});
}
@ -3911,7 +3836,7 @@ const Template<
name)> templateFfiFieldNoAnnotation = const Template<
Message Function(String name)>(
messageTemplate:
r"""Field '#name' requires no annotation to declare its native type, it is a Pointer which is represented by the same type in Dart and native code.""",
r"""Field '#name' requires no annotation to declare its C++ type, it is a Pointer which is represented by the same type in Dart and C++.""",
withArguments: _withArgumentsFfiFieldNoAnnotation);
// DO NOT EDIT. THIS FILE IS GENERATED. SEE TOP OF FILE.
@ -3927,7 +3852,7 @@ Message _withArgumentsFfiFieldNoAnnotation(String name) {
name = demangleMixinApplicationName(name);
return new Message(codeFfiFieldNoAnnotation,
message:
"""Field '${name}' requires no annotation to declare its native type, it is a Pointer which is represented by the same type in Dart and native code.""",
"""Field '${name}' requires no annotation to declare its C++ type, it is a Pointer which is represented by the same type in Dart and C++.""",
arguments: {'name': name});
}
@ -3936,7 +3861,7 @@ const Template<
Message Function(String name)> templateFfiNotStatic = const Template<
Message Function(String name)>(
messageTemplate:
r"""#name expects a static function as parameter. dart:ffi only supports calling static Dart functions from native code.""",
r"""#name expects a static function as parameter. dart:ffi only supports calling static Dart functions from c.""",
withArguments: _withArgumentsFfiNotStatic);
// DO NOT EDIT. THIS FILE IS GENERATED. SEE TOP OF FILE.
@ -3952,7 +3877,7 @@ Message _withArgumentsFfiNotStatic(String name) {
name = demangleMixinApplicationName(name);
return new Message(codeFfiNotStatic,
message:
"""${name} expects a static function as parameter. dart:ffi only supports calling static Dart functions from native code.""",
"""${name} expects a static function as parameter. dart:ffi only supports calling static Dart functions from c.""",
arguments: {'name': name});
}

View File

@ -265,10 +265,6 @@ FfiDartTypeMismatch/analyzerCode: Fail
FfiExtendsOrImplementsSealedClass/analyzerCode: Fail
FfiStructGeneric/analyzerCode: Fail
FfiWrongStructInheritance/analyzerCode: Fail
FfiExpectedExceptionalReturn/analyzerCode: Fail
FfiExpectedNoExceptionalReturn/analyzerCode: Fail
FfiExpectedConstant/analyzerCode: Fail
FfiExceptionalReturnNull/analyzerCode: Fail
FieldInitializedOutsideDeclaringClass/part_wrapped_script1: Fail
FieldInitializedOutsideDeclaringClass/script1: Fail
FieldInitializerOutsideConstructor/part_wrapped_script1: Fail

View File

@ -3495,17 +3495,17 @@ FfiTypeUnsized:
FfiFieldAnnotation:
# Used by dart:ffi
template: "Field '#name' requires exactly one annotation to declare its native type, which cannot be Void. dart:ffi Structs cannot have regular Dart fields."
template: "Field '#name' requires exactly one annotation to declare its C++ type, which cannot be Void. dart:ffi Structs cannot have regular Dart fields."
external: test/ffi_test.dart
FfiFieldNoAnnotation:
# Used by dart:ffi
template: "Field '#name' requires no annotation to declare its native type, it is a Pointer which is represented by the same type in Dart and native code."
template: "Field '#name' requires no annotation to declare its C++ type, it is a Pointer which is represented by the same type in Dart and C++."
external: test/ffi_test.dart
FfiNotStatic:
# Used by dart:ffi
template: "#name expects a static function as parameter. dart:ffi only supports calling static Dart functions from native code."
template: "#name expects a static function as parameter. dart:ffi only supports calling static Dart functions from c."
external: test/ffi_test.dart
FfiFieldInitializer:
@ -3533,26 +3533,6 @@ FfiDartTypeMismatch:
template: "Expected '#type' to be a subtype of '#type2'."
external: test/ffi_test.dart
FfiExpectedExceptionalReturn:
# Used by dart:ffi
template: "Expected an exceptional return value for a native callback returning '#type'."
external: test/ffi_test.dart
FfiExpectedNoExceptionalReturn:
# Used by dart:ffi
template: "Exceptional return value cannot be provided for a native callback returning '#type'."
external: test/ffi_test.dart
FfiExpectedConstant:
# Used by dart:ffi
template: "Exceptional return value must be a constant."
external: test/ffi_test.dart
FfiExceptionalReturnNull:
# Used by dart:ffi
template: "Exceptional return value must not be null."
external: test/ffi_test.dart
SpreadTypeMismatch:
template: "Unexpected type '#type' of a spread. Expected 'dynamic' or an Iterable."
script:

View File

@ -941,7 +941,6 @@ examples
exceeded
except
exception
exceptional
exceptions
exclamation
exclude

View File

@ -186,8 +186,6 @@ class FfiTransformer extends Transformer {
final Constructor structFromPointer;
final Procedure libraryLookupMethod;
final Procedure abiMethod;
final Procedure pointerFromFunctionProcedure;
final Procedure nativeCallbackFunctionProcedure;
/// Classes corresponding to [NativeType], indexed by [NativeType].
final List<Class> nativeTypesClasses;
@ -222,10 +220,6 @@ class FfiTransformer extends Transformer {
libraryLookupMethod =
index.getMember('dart:ffi', 'DynamicLibrary', 'lookup'),
abiMethod = index.getTopLevelMember('dart:ffi', '_abi'),
pointerFromFunctionProcedure =
index.getTopLevelMember('dart:ffi', '_pointerFromFunction'),
nativeCallbackFunctionProcedure =
index.getTopLevelMember('dart:ffi', '_nativeCallbackFunction'),
nativeTypesClasses = nativeTypeClassNames
.map((name) => index.getClass('dart:ffi', name))
.toList();

View File

@ -6,16 +6,12 @@ library vm.transformations.ffi_use_sites;
import 'package:front_end/src/api_unstable/vm.dart'
show
messageFfiExceptionalReturnNull,
messageFfiExpectedConstant,
templateFfiDartTypeMismatch,
templateFfiExpectedExceptionalReturn,
templateFfiExpectedNoExceptionalReturn,
templateFfiExtendsOrImplementsSealedClass,
templateFfiNotStatic,
templateFfiTypeInvalid,
templateFfiTypeMismatch,
templateFfiTypeUnsized;
templateFfiDartTypeMismatch,
templateFfiTypeUnsized,
templateFfiNotStatic,
templateFfiExtendsOrImplementsSealedClass;
import 'package:kernel/ast.dart';
import 'package:kernel/class_hierarchy.dart' show ClassHierarchy;
@ -59,12 +55,7 @@ class _FfiUseSiteTransformer extends FfiTransformer {
final Map<Field, Procedure> replacedGetters;
final Map<Field, Procedure> replacedSetters;
Library currentLibrary;
bool get isFfiLibrary => currentLibrary == ffiLibrary;
// Used to create private top-level fields with unique names for each
// callback.
int callbackCount = 0;
bool isFfiLibrary;
_FfiUseSiteTransformer(
LibraryIndex index,
@ -77,8 +68,7 @@ class _FfiUseSiteTransformer extends FfiTransformer {
@override
TreeNode visitLibrary(Library node) {
currentLibrary = node;
callbackCount = 0;
isFfiLibrary = node == ffiLibrary;
return super.visitLibrary(node);
}
@ -88,11 +78,6 @@ class _FfiUseSiteTransformer extends FfiTransformer {
try {
_ensureNotExtendsOrImplementsSealedClass(node);
return super.visitClass(node);
} on _FfiStaticTypeError {
// It's OK to swallow the exception because the diagnostics issued will
// cause compilation to fail. By continuing, we can report more
// diagnostics before compilation ends.
return super.visitClass(node);
} finally {
env.thisType = null;
}
@ -130,89 +115,31 @@ class _FfiUseSiteTransformer extends FfiTransformer {
Member target = node.target;
try {
if (target == fromFunctionMethod) {
final DartType nativeType =
DartType nativeType =
InterfaceType(nativeFunctionClass, [node.arguments.types[0]]);
final Expression func = node.arguments.positional[0];
final DartType dartType = func.getStaticType(env);
_ensureIsStaticFunction(func);
Expression func = node.arguments.positional[0];
DartType dartType = func.getStaticType(env);
_ensureIsStatic(func);
// TODO(36730): Allow passing/returning structs by value.
_ensureNativeTypeValid(nativeType, node);
_ensureNativeTypeToDartType(nativeType, dartType, node);
// Check `exceptionalReturn`'s type.
final FunctionType funcType = dartType;
final NativeType expectedReturn = getType(
((node.arguments.types[0] as FunctionType).returnType
as InterfaceType)
.classNode);
final Expression exceptionalReturn = node.arguments.positional[1];
final DartType returnType = exceptionalReturn.getStaticType(env);
if (expectedReturn == NativeType.kVoid ||
expectedReturn == NativeType.kPointer) {
if (node.arguments.positional.length > 1) {
diagnosticReporter.report(
templateFfiExpectedNoExceptionalReturn
.withArguments(funcType.returnType),
node.fileOffset,
1,
node.location.file);
return node;
}
node.arguments.positional.add(NullLiteral()..parent = node);
} else {
// The exceptional return value is not optional for other return
// types.
if (node.arguments.positional.length < 2) {
diagnosticReporter.report(
templateFfiExpectedExceptionalReturn
.withArguments(funcType.returnType),
node.fileOffset,
1,
node.location.file);
return node;
}
final Expression exceptionalReturn = node.arguments.positional[1];
// The exceptional return value must be a constant so that it be
// referenced by precompiled trampoline's object pool.
if (exceptionalReturn is! BasicLiteral &&
!(exceptionalReturn is ConstantExpression &&
exceptionalReturn.constant is PrimitiveConstant)) {
diagnosticReporter.report(messageFfiExpectedConstant,
node.fileOffset, 1, node.location.file);
return node;
}
// Moreover it may not be null.
if (exceptionalReturn is NullLiteral ||
(exceptionalReturn is ConstantExpression &&
exceptionalReturn.constant is NullConstant)) {
diagnosticReporter.report(messageFfiExceptionalReturnNull,
node.fileOffset, 1, node.location.file);
return node;
}
final DartType returnType = exceptionalReturn.getStaticType(env);
if (!env.isSubtypeOf(returnType, funcType.returnType)) {
diagnosticReporter.report(
templateFfiDartTypeMismatch.withArguments(
returnType, funcType.returnType),
exceptionalReturn.fileOffset,
1,
exceptionalReturn.location.file);
return node;
}
if (!env.isSubtypeOf(returnType, funcType.returnType)) {
diagnosticReporter.report(
templateFfiDartTypeMismatch.withArguments(
returnType, funcType.returnType),
exceptionalReturn.fileOffset,
1,
exceptionalReturn.location.file);
}
return _replaceFromFunction(node);
}
} on _FfiStaticTypeError {
// It's OK to swallow the exception because the diagnostics issued will
// cause compilation to fail. By continuing, we can report more
// diagnostics before compilation ends.
}
} on _FfiStaticTypeError {}
return node;
}
@ -224,10 +151,10 @@ class _FfiUseSiteTransformer extends FfiTransformer {
// 'lookupFunction' are constants, so by inlining the call to 'asFunction' at
// the call-site, we ensure that there are no generic calls to 'asFunction'.
//
// We will not detect dynamic invocations of 'asFunction' and
// 'lookupFunction': these are handled by the stubs in 'ffi_patch.dart' and
// 'dynamic_library_patch.dart'. Dynamic invocations of 'lookupFunction' (and
// 'asFunction') are not legal and throw a runtime exception.
// We will not detect dynamic invocations of 'asFunction' -- these are handled
// by the stub in 'dynamic_library_patch.dart'. Dynamic invocations of
// 'lookupFunction' (and 'asFunction') are not legal and throw a runtime
// exception.
Expression _replaceLookupFunction(MethodInvocation node) {
// The generated code looks like:
//
@ -249,39 +176,6 @@ class _FfiUseSiteTransformer extends FfiTransformer {
Arguments([lookupResult], types: [dartSignature, nativeSignature]));
}
// We need to rewrite calls to 'fromFunction' into two calls, representing the
// compile-time and run-time aspects of creating the closure:
//
// final dynamic _#ffiCallback0 = Pointer.fromFunction<T>(f, e) =>
// _pointerFromFunction<NativeFunction<T>>(
// _nativeCallbackFunction<T>(f, e));
//
// ... _#ffiCallback0 ...
//
// We must implement this as a Kernel rewrite because <T> must be a
// compile-time constant to any invocation of '_nativeCallbackFunction'.
//
// Creating this closure requires a runtime call, so we save the result in a
// synthetic top-level field to avoid recomputing it.
Expression _replaceFromFunction(StaticInvocation node) {
final nativeFunctionType =
InterfaceType(nativeFunctionClass, node.arguments.types);
final Field field = Field(
Name("_#ffiCallback${callbackCount++}", currentLibrary),
type: InterfaceType(pointerClass, [nativeFunctionType]),
initializer: StaticInvocation(
pointerFromFunctionProcedure,
Arguments([
StaticInvocation(nativeCallbackFunctionProcedure, node.arguments)
], types: [
nativeFunctionType
])),
isStatic: true,
isFinal: true);
currentLibrary.addMember(field);
return StaticGet(field);
}
@override
visitMethodInvocation(MethodInvocation node) {
super.visitMethodInvocation(node);
@ -339,11 +233,7 @@ class _FfiUseSiteTransformer extends FfiTransformer {
_ensureNativeTypeSized(nativeType, node, target.name);
_ensureNativeTypeToDartType(nativeType, dartType, node);
}
} on _FfiStaticTypeError {
// It's OK to swallow the exception because the diagnostics issued will
// cause compilation to fail. By continuing, we can report more
// diagnostics before compilation ends.
}
} on _FfiStaticTypeError {}
return node;
}
@ -432,17 +322,22 @@ class _FfiUseSiteTransformer extends FfiTransformer {
return false;
}
void _ensureIsStaticFunction(Expression node) {
if ((node is StaticGet && node.target is Procedure) ||
(node is ConstantExpression && node.constant is TearOffConstant)) {
return;
void _ensureIsStatic(Expression node) {
if (!_isStatic(node)) {
diagnosticReporter.report(
templateFfiNotStatic.withArguments(fromFunctionMethod.name.name),
node.fileOffset,
1,
node.location.file);
throw _FfiStaticTypeError();
}
diagnosticReporter.report(
templateFfiNotStatic.withArguments(fromFunctionMethod.name.name),
node.fileOffset,
1,
node.location.file);
throw _FfiStaticTypeError();
}
bool _isStatic(Expression node) {
if (node is StaticGet) {
return node.target is Procedure;
}
return node is ConstantExpression;
}
Class _extendsOrImplementsSealedClass(Class klass) {
@ -473,7 +368,6 @@ class _FfiUseSiteTransformer extends FfiTransformer {
klass.fileOffset,
1,
klass.location.file);
throw _FfiStaticTypeError();
}
}
}

View File

@ -759,7 +759,7 @@ DART_EXPORT int TestThrowExceptionDouble(double (*fn)()) {
}
DART_EXPORT int TestThrowExceptionPointer(void* (*fn)()) {
CHECK_EQ(fn(), nullptr);
CHECK_EQ(fn(), reinterpret_cast<void*>(42));
return 0;
}

View File

@ -429,6 +429,88 @@ DEFINE_NATIVE_ENTRY(Ffi_sizeOf, 1, 0) {
return Integer::New(SizeOf(type_arg));
}
#if !defined(DART_PRECOMPILED_RUNTIME) && !defined(DART_PRECOMPILER) && \
!defined(TARGET_ARCH_DBC)
// Generates assembly to trampoline from native code into Dart.
static uword CompileNativeCallback(const Function& c_signature,
const Function& dart_target,
const Instance& exceptional_return) {
Thread* const thread = Thread::Current();
uword entry_point = 0;
const int32_t callback_id = thread->AllocateFfiCallbackId(&entry_point);
ASSERT(NativeCallbackTrampolines::Enabled() == (entry_point != 0));
// Create a new Function named 'FfiCallback' and stick it in the 'dart:ffi'
// library. Note that these functions will never be invoked by Dart, so it
// doesn't matter that they all have the same name.
Zone* const Z = thread->zone();
const String& name = String::Handle(Symbols::New(thread, "FfiCallback"));
const Library& lib = Library::Handle(Z, Library::FfiLibrary());
const Class& owner_class = Class::Handle(Z, lib.toplevel_class());
const Function& function =
Function::Handle(Z, Function::New(name, RawFunction::kFfiTrampoline,
/*is_static=*/true,
/*is_const=*/false,
/*is_abstract=*/false,
/*is_external=*/false,
/*is_native=*/false, owner_class,
TokenPosition::kMinSource));
function.set_is_debuggable(false);
// Set callback-specific fields which the flow-graph builder needs to generate
// the body.
function.SetFfiCSignature(c_signature);
function.SetFfiCallbackId(callback_id);
function.SetFfiCallbackTarget(dart_target);
// We require that the exceptional return value for functions returning 'Void'
// must be 'null', since native code should not look at the result.
if (compiler::ffi::NativeTypeIsVoid(
AbstractType::Handle(c_signature.result_type())) &&
!exceptional_return.IsNull()) {
Exceptions::ThrowUnsupportedError(
"Only 'null' may be used as the exceptional return value for a "
"callback returning void.");
}
// We need to load the exceptional return value as a constant in the generated
// function. This means we need to ensure that it's in old space and has no
// (transitively) mutable fields. This is done by checking (asserting) that
// it's a built-in FFI class, whose fields are all immutable, or a
// user-defined Pointer class, which has no fields.
//
// TODO(36730): We'll need to extend this when we support passing/returning
// structs by value.
ASSERT(exceptional_return.IsNull() || exceptional_return.IsNumber() ||
exceptional_return.IsPointer());
if (!exceptional_return.IsSmi() && exceptional_return.IsNew()) {
function.SetFfiCallbackExceptionalReturn(
Instance::Handle(exceptional_return.CopyShallowToOldSpace(thread)));
} else {
function.SetFfiCallbackExceptionalReturn(exceptional_return);
}
// We compile the callback immediately because we need to return a pointer to
// the entry-point. Native calls do not use patching like Dart calls, so we
// cannot compile it lazily.
const Object& result =
Object::Handle(Z, Compiler::CompileOptimizedFunction(thread, function));
if (result.IsError()) {
Exceptions::PropagateError(Error::Cast(result));
}
ASSERT(result.IsCode());
const Code& code = Code::Cast(result);
thread->SetFfiCallbackCode(callback_id, code);
if (entry_point != 0) {
return entry_point;
} else {
return code.EntryPoint();
}
}
#endif
// Static invocations to this method are translated directly in streaming FGB
// and bytecode FGB. However, we can still reach this entrypoint in the bytecode
@ -463,6 +545,80 @@ DEFINE_NATIVE_ENTRY(Ffi_asFunctionInternal, 2, 1) {
#endif
}
DEFINE_NATIVE_ENTRY(Ffi_fromFunction, 1, 2) {
#if defined(DART_PRECOMPILED_RUNTIME) || defined(DART_PRECOMPILER) || \
defined(TARGET_ARCH_DBC)
// https://github.com/dart-lang/sdk/issues/37295
// FFI is supported, but callbacks are not.
Exceptions::ThrowUnsupportedError(
"FFI callbacks are not yet supported in AOT or on DBC.");
#else
GET_NATIVE_TYPE_ARGUMENT(type_arg, arguments->NativeTypeArgAt(0));
GET_NON_NULL_NATIVE_ARGUMENT(Closure, closure, arguments->NativeArgAt(0));
GET_NON_NULL_NATIVE_ARGUMENT(Instance, exceptional_return,
arguments->NativeArgAt(1));
if (!type_arg.IsInstantiated() || !type_arg.IsFunctionType()) {
// TODO(35902): Remove this when dynamic invocations of fromFunction are
// prohibited.
Exceptions::ThrowUnsupportedError(
"Type argument to fromFunction must an instantiated function type.");
}
const Function& native_signature =
Function::Handle(Type::Cast(type_arg).signature());
Function& func = Function::Handle(closure.function());
TypeArguments& type_args = TypeArguments::Handle(zone);
type_args = TypeArguments::New(1);
type_args.SetTypeAt(Pointer::kNativeTypeArgPos, type_arg);
type_args = type_args.Canonicalize();
Class& native_function_class =
Class::Handle(isolate->class_table()->At(kFfiNativeFunctionCid));
const auto& error =
Error::Handle(native_function_class.EnsureIsFinalized(Thread::Current()));
if (!error.IsNull()) {
Exceptions::PropagateError(error);
}
Type& native_function_type = Type::Handle(
Type::New(native_function_class, type_args, TokenPosition::kNoSource));
native_function_type ^=
ClassFinalizer::FinalizeType(Class::Handle(), native_function_type);
native_function_type ^= native_function_type.Canonicalize();
// The FE verifies that the target of a 'fromFunction' is a static method, so
// the value we see here must be a static tearoff. See ffi_use_sites.dart for
// details.
//
// TODO(36748): Define hot-reload semantics of native callbacks. We may need
// to look up the target by name.
ASSERT(func.IsImplicitClosureFunction());
func = func.parent_function();
ASSERT(func.is_static());
const AbstractType& return_type =
AbstractType::Handle(native_signature.result_type());
if (compiler::ffi::NativeTypeIsVoid(return_type)) {
if (!exceptional_return.IsNull()) {
const String& error = String::Handle(
String::NewFormatted("Exceptional return argument to 'fromFunction' "
"must be null for functions returning void."));
Exceptions::ThrowArgumentError(error);
}
} else if (!compiler::ffi::NativeTypeIsPointer(return_type) &&
exceptional_return.IsNull()) {
const String& error = String::Handle(String::NewFormatted(
"Exceptional return argument to 'fromFunction' must not be null."));
Exceptions::ThrowArgumentError(error);
}
return Pointer::New(
native_function_type,
CompileNativeCallback(native_signature, func, exceptional_return));
#endif
}
DEFINE_NATIVE_ENTRY(Ffi_asExternalTypedData, 0, 2) {
GET_NON_NULL_NATIVE_ARGUMENT(Pointer, pointer, arguments->NativeArgAt(0));
GET_NON_NULL_NATIVE_ARGUMENT(Integer, count, arguments->NativeArgAt(1));
@ -536,8 +692,8 @@ DEFINE_NATIVE_ENTRY(Ffi_asExternalTypedData, 0, 2) {
const auto& typed_data_class =
Class::Handle(zone, isolate->class_table()->At(cid));
const auto& error =
Error::Handle(zone, typed_data_class.EnsureIsFinalized(thread));
const auto& error = Error::Handle(
zone, typed_data_class.EnsureIsFinalized(Thread::Current()));
if (!error.IsNull()) {
Exceptions::PropagateError(error);
}
@ -547,91 +703,6 @@ DEFINE_NATIVE_ENTRY(Ffi_asExternalTypedData, 0, 2) {
Heap::kNew);
}
DEFINE_NATIVE_ENTRY(Ffi_nativeCallbackFunction, 1, 2) {
#if defined(TARGET_ARCH_DBC)
Exceptions::ThrowUnsupportedError(
"FFI callbacks are not yet supported on DBC.");
#elif defined(DART_PRECOMPILED_RUNTIME) || defined(DART_PRECOMPILER)
// Calls to this function are removed by the flow-graph builder in AOT.
// See StreamingFlowGraphBuilder::BuildFfiNativeCallbackFunction().
UNREACHABLE();
#else
GET_NATIVE_TYPE_ARGUMENT(type_arg, arguments->NativeTypeArgAt(0));
GET_NON_NULL_NATIVE_ARGUMENT(Closure, closure, arguments->NativeArgAt(0));
GET_NON_NULL_NATIVE_ARGUMENT(Instance, exceptional_return,
arguments->NativeArgAt(1));
ASSERT(type_arg.IsInstantiated() && type_arg.IsFunctionType());
const Function& native_signature =
Function::Handle(zone, Type::Cast(type_arg).signature());
Function& func = Function::Handle(zone, closure.function());
// The FE verifies that the target of a 'fromFunction' is a static method, so
// the value we see here must be a static tearoff. See ffi_use_sites.dart for
// details.
//
// TODO(36748): Define hot-reload semantics of native callbacks. We may need
// to look up the target by name.
ASSERT(func.IsImplicitClosureFunction());
func = func.parent_function();
ASSERT(func.is_static());
// We are returning an object which is not an Instance here. This is only OK
// because we know that the result will be passed directly to
// _pointerFromFunction and will not leak out into user code.
arguments->SetReturn(
Function::Handle(zone, compiler::ffi::NativeCallbackFunction(
native_signature, func, exceptional_return)));
// Because we have already set the return value.
return Object::sentinel().raw();
#endif
}
DEFINE_NATIVE_ENTRY(Ffi_pointerFromFunction, 1, 1) {
GET_NATIVE_TYPE_ARGUMENT(type_arg, arguments->NativeTypeArgAt(0));
const Function& function =
Function::CheckedHandle(zone, arguments->NativeArg0());
Code& code = Code::Handle(zone);
#if defined(DART_PRECOMPILED_RUNTIME)
code = function.CurrentCode();
// Blobs snapshots don't support BSS-relative relocations required by native
// callbacks (yet). Issue an error if the code has an unpatched relocation.
if (!code.VerifyBSSRelocations()) {
Exceptions::ThrowUnsupportedError(
"FFI callbacks are not yet supported in blobs snapshots. Please use "
"ELF or Assembly snapshots instead.");
}
#else
// We compile the callback immediately because we need to return a pointer to
// the entry-point. Native calls do not use patching like Dart calls, so we
// cannot compile it lazily.
const Object& result = Object::Handle(
zone, Compiler::CompileOptimizedFunction(thread, function));
if (result.IsError()) {
Exceptions::PropagateError(Error::Cast(result));
}
ASSERT(result.IsCode());
code ^= result.raw();
#endif
ASSERT(!code.IsNull());
thread->SetFfiCallbackCode(function.FfiCallbackId(), code);
uword entry_point = code.EntryPoint();
#if !defined(DART_PRECOMPILED_RUNTIME) && !defined(TARGET_ARCH_DBC)
if (NativeCallbackTrampolines::Enabled()) {
entry_point = isolate->native_callback_trampolines()->TrampolineForId(
function.FfiCallbackId());
}
#endif
return Pointer::New(type_arg, entry_point);
}
#if defined(TARGET_ARCH_DBC)
void FfiMarshalledArguments::SetFunctionAddress(uint64_t value) const {

View File

@ -23,25 +23,6 @@ DS _asFunctionInternal<DS extends Function, NS extends Function>(
dynamic _asExternalTypedData(Pointer ptr, int count)
native "Ffi_asExternalTypedData";
// Returns a Function object for a native callback.
//
// Calls to [Pointer.fromFunction] are re-written by the FE into calls to this
// method + _pointerFromFunction. All three arguments must be constants.
//
// In AOT we evaluate calls to this function during precompilation and replace
// them with Constant instruction referencing the callback trampoline, to ensure
// that it will be precompiled.
//
// In all JIT modes we call a native runtime entry. We *cannot* use the IL
// implementation, since that would pull the callback trampoline into JIT
// snapshots. The callback trampolines can only be serialized into AOT snapshots
// because they embed the addresses of runtime routines in JIT mode.
Object _nativeCallbackFunction<NS extends Function>(Function target,
Object exceptionalReturn) native "Ffi_nativeCallbackFunction";
Pointer<NS> _pointerFromFunction<NS extends NativeFunction>(Object function)
native "Ffi_pointerFromFunction";
@patch
@pragma("vm:entry-point")
class Pointer<T extends NativeType> {
@ -51,18 +32,10 @@ class Pointer<T extends NativeType> {
@patch
factory Pointer.fromAddress(int ptr) => _fromAddress(ptr);
// All static calls to this method are replaced by the FE into
// _nativeCallbackFunction + _pointerFromFunction.
//
// We still need to throw an error on a dynamic invocations, invocations
// through tearoffs or reflective calls.
@patch
static Pointer<NativeFunction<T>> fromFunction<T extends Function>(
@DartRepresentationOf("T") Function f,
[Object exceptionalReturn]) {
throw UnsupportedError(
"Pointer.fromFunction cannot be called dynamically.");
}
Object exceptionalReturn) native "Ffi_fromFunction";
// TODO(sjindel): When NNBD is available, we should change `value` to be
// non-null.

View File

@ -381,8 +381,7 @@ namespace dart {
V(Ffi_cast, 1) \
V(Ffi_sizeOf, 0) \
V(Ffi_asFunctionInternal, 1) \
V(Ffi_nativeCallbackFunction, 2) \
V(Ffi_pointerFromFunction, 1) \
V(Ffi_fromFunction, 2) \
V(Ffi_dl_open, 1) \
V(Ffi_dl_lookup, 2) \
V(Ffi_dl_getHandle, 1) \

View File

@ -1,17 +0,0 @@
// Copyright (c) 2019, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
#include <vm/bss_relocs.h>
#include <vm/runtime_entry.h>
#include <vm/thread.h>
namespace dart {
void BSS::Initialize(Thread* current, uword* bss_start) {
bss_start[BSS::RelocationIndex(
BSS::Relocation::DRT_GetThreadForNativeCallback)] =
reinterpret_cast<uword>(DLRT_GetThreadForNativeCallback);
}
} // namespace dart

View File

@ -1,29 +0,0 @@
// Copyright (c) 2019, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
#ifndef RUNTIME_VM_BSS_RELOCS_H_
#define RUNTIME_VM_BSS_RELOCS_H_
#include <platform/allocation.h>
namespace dart {
class Thread;
class BSS : public AllStatic {
public:
enum class Relocation : intptr_t {
DRT_GetThreadForNativeCallback = 0,
NumRelocations = 1
};
static intptr_t RelocationIndex(Relocation reloc) {
return static_cast<intptr_t>(reloc);
}
static void Initialize(Thread* current, uword* bss);
};
} // namespace dart
#endif // RUNTIME_VM_BSS_RELOCS_H_

View File

@ -6,7 +6,6 @@
#include "platform/assert.h"
#include "vm/bootstrap.h"
#include "vm/bss_relocs.h"
#include "vm/class_id.h"
#include "vm/code_observers.h"
#include "vm/compiler/backend/code_statistics.h"
@ -801,12 +800,9 @@ class FfiTrampolineDataSerializationCluster : public SerializationCluster {
AutoTraceObject(data);
WriteFromTo(data);
if (s->kind() == Snapshot::kFullAOT) {
s->WriteUnsigned(data->ptr()->callback_id_);
} else {
// FFI callbacks can only be written to AOT snapshots.
ASSERT(data->ptr()->callback_target_ == Object::null());
}
// TODO(37295): FFI callbacks shouldn't be written to a snapshot. They
// should only be referenced by the callback registry in Thread.
ASSERT(data->ptr()->callback_id_ == 0);
}
}
@ -838,8 +834,7 @@ class FfiTrampolineDataDeserializationCluster : public DeserializationCluster {
Deserializer::InitializeHeader(data, kFfiTrampolineDataCid,
FfiTrampolineData::InstanceSize());
ReadFromTo(data);
data->ptr()->callback_id_ =
d->kind() == Snapshot::kFullAOT ? d->ReadUnsigned() : 0;
data->ptr()->callback_id_ = 0;
}
}
};
@ -5752,17 +5747,6 @@ RawApiError* FullSnapshotReader::ReadIsolateSnapshot() {
}
}
}
// Initialize symbols in the BSS, if present.
ASSERT(Snapshot::IncludesCode(kind_));
Image image(instructions_image_);
if (image.bss_offset() != 0) {
// The const cast is safe because we're translating from the start of the
// instructions (read-only) to the start of the BSS (read-write).
uword* const bss_start = const_cast<uword*>(reinterpret_cast<const uword*>(
instructions_image_ + image.bss_offset()));
BSS::Initialize(thread_, bss_start);
}
#endif // defined(DART_PRECOMPILED_RUNTIME)
return ApiError::null();

View File

@ -15,13 +15,10 @@ void DescriptorList::AddDescriptor(RawPcDescriptors::Kind kind,
TokenPosition token_pos,
intptr_t try_index) {
ASSERT((kind == RawPcDescriptors::kRuntimeCall) ||
(kind == RawPcDescriptors::kBSSRelocation) ||
(kind == RawPcDescriptors::kOther) || (deopt_id != DeoptId::kNone));
// When precompiling, we only use pc descriptors for exceptions and
// relocations.
if (!FLAG_precompiled_mode || try_index != -1 ||
kind == RawPcDescriptors::kBSSRelocation) {
// When precompiling, we only use pc descriptors for exceptions.
if (!FLAG_precompiled_mode || try_index != -1) {
int32_t merged_kind_try =
RawPcDescriptors::MergedKindTry::Encode(kind, try_index);

View File

@ -30,31 +30,6 @@ DEFINE_FLAG(bool, use_far_branches, false, "Enable far branches for ARM.");
namespace compiler {
AssemblerBase::~AssemblerBase() {}
intptr_t AssemblerBase::InsertAlignedRelocation(BSS::Relocation reloc) {
// We cannot put a relocation at the very start (it's not a valid
// instruction)!
ASSERT(CodeSize() != 0);
// Align to a target word boundary.
const intptr_t offset =
Utils::RoundUp(CodeSize(), compiler::target::kWordSize);
while (CodeSize() < offset) {
Breakpoint();
}
ASSERT(CodeSize() == offset);
AssemblerBuffer::EnsureCapacity ensured(&buffer_);
buffer_.Emit<compiler::target::word>(BSS::RelocationIndex(reloc) *
compiler::target::kWordSize);
ASSERT(CodeSize() == (offset + compiler::target::kWordSize));
return offset;
}
static uword NewContents(intptr_t capacity) {
Zone* zone = Thread::Current()->zone();
uword result = zone->AllocUnsafe(capacity);

View File

@ -305,7 +305,7 @@ class AssemblerBase : public StackResource {
prologue_offset_(-1),
has_single_entry_point_(true),
object_pool_builder_(object_pool_builder) {}
virtual ~AssemblerBase();
virtual ~AssemblerBase() {}
intptr_t CodeSize() const { return buffer_.Size(); }
@ -320,10 +320,6 @@ class AssemblerBase : public StackResource {
void Comment(const char* format, ...) PRINTF_ATTRIBUTE(2, 3);
static bool EmittingComments();
virtual void Breakpoint() = 0;
intptr_t InsertAlignedRelocation(BSS::Relocation reloc);
void Unimplemented(const char* message);
void Untested(const char* message);
void Unreachable(const char* message);

View File

@ -392,7 +392,7 @@ class Assembler : public AssemblerBase {
#endif // TESTING || DEBUG
// Debugging and bringup support.
void Breakpoint() override { bkpt(0); }
void Breakpoint() { bkpt(0); }
void Stop(const char* message) override;
static void InitializeMemoryWithBreakpoints(uword data, intptr_t length);

View File

@ -485,7 +485,7 @@ class Assembler : public AssemblerBase {
void set_use_far_branches(bool b) { use_far_branches_ = b; }
// Debugging and bringup support.
void Breakpoint() override { brk(0); }
void Breakpoint() { brk(0); }
void Stop(const char* message) override;
static void InitializeMemoryWithBreakpoints(uword data, intptr_t length);

View File

@ -43,7 +43,6 @@ class Assembler : public AssemblerBase {
void MonomorphicCheckedEntryAOT() {}
// Debugging and bringup support.
void Breakpoint() override { Stop("Breakpoint!"); }
void Stop(const char* message) override;
static void InitializeMemoryWithBreakpoints(uword data, intptr_t length);

View File

@ -835,7 +835,7 @@ class Assembler : public AssemblerBase {
Register temp);
// Debugging and bringup support.
void Breakpoint() override { int3(); }
void Breakpoint() { int3(); }
void Stop(const char* message) override;
static void InitializeMemoryWithBreakpoints(uword data, intptr_t length);

View File

@ -936,7 +936,7 @@ class Assembler : public AssemblerBase {
void GenerateUnRelocatedPcRelativeCall(intptr_t offset_into_target = 0);
// Debugging and bringup support.
void Breakpoint() override { int3(); }
void Breakpoint() { int3(); }
void Stop(const char* message) override;
static void InitializeMemoryWithBreakpoints(uword data, intptr_t length);

View File

@ -242,13 +242,6 @@ bool FlowGraphCompiler::CanOSRFunction() const {
return isolate()->use_osr() && CanOptimizeFunction() && !is_optimizing();
}
void FlowGraphCompiler::InsertBSSRelocation(BSS::Relocation reloc) {
const intptr_t offset = assembler()->InsertAlignedRelocation(reloc);
AddDescriptor(RawPcDescriptors::kBSSRelocation, /*pc_offset=*/offset,
/*deopt_id=*/DeoptId::kNone, TokenPosition::kNoSource,
/*try_index=*/-1);
}
bool FlowGraphCompiler::ForceSlowPathForStackOverflow() const {
#if !defined(PRODUCT)
if ((FLAG_stacktrace_every > 0) || (FLAG_deoptimize_every > 0) ||
@ -1408,9 +1401,7 @@ void FlowGraphCompiler::GenerateStaticCall(intptr_t deopt_id,
: ic_data.arguments_descriptor());
ASSERT(ArgumentsDescriptor(arguments_descriptor).TypeArgsLen() ==
args_info.type_args_len);
// Force-optimized functions lack the deopt info which allows patching of
// optimized static calls.
if (is_optimizing() && (!ForcedOptimization() || FLAG_precompiled_mode)) {
if (is_optimizing() && !ForcedOptimization()) {
EmitOptimizedStaticCall(function, arguments_descriptor,
args_info.count_with_type_args, deopt_id, token_pos,
locs, entry_kind);

View File

@ -428,8 +428,6 @@ class FlowGraphCompiler : public ValueObject {
bool CanOSRFunction() const;
bool is_optimizing() const { return is_optimizing_; }
void InsertBSSRelocation(BSS::Relocation reloc);
// The function was fully intrinsified, so the body is unreachable.
//
// We still need to compile the body in unoptimized mode because the

View File

@ -1137,6 +1137,10 @@ void NativeEntryInstr::SaveArgument(FlowGraphCompiler* compiler,
}
void NativeEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
if (FLAG_precompiled_mode) {
UNREACHABLE();
}
// Constant pool cannot be used until we enter the actual Dart frame.
__ set_constant_pool_allowed(false);
@ -1161,41 +1165,8 @@ void NativeEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
// Load the thread object. If we were called by a trampoline, the thread is
// already loaded.
if (FLAG_precompiled_mode) {
compiler::Label skip_reloc;
__ b(&skip_reloc);
compiler->InsertBSSRelocation(
BSS::Relocation::DRT_GetThreadForNativeCallback);
__ Bind(&skip_reloc);
// For historical reasons, the PC on ARM points 8 bytes (two instructions)
// past the current instruction.
__ sub(
R0, PC,
compiler::Operand(Instr::kPCReadOffset + compiler::target::kWordSize));
// R0 holds the address of the relocation.
__ ldr(R1, compiler::Address(R0));
// R1 holds the relocation itself: R0 - bss_start.
// R0 = R0 + (bss_start - R0) = bss_start
__ add(R0, R0, compiler::Operand(R1));
// R0 holds the start of the BSS section.
// Load the "get-thread" routine: *bss_start.
__ ldr(R1, compiler::Address(R0));
} else if (!NativeCallbackTrampolines::Enabled()) {
// In JIT mode, we can just paste the address of the runtime entry into the
// generated code directly. This is not a problem since we don't save
// callbacks into JIT snapshots.
ASSERT(kWordSize == compiler::target::kWordSize);
__ LoadImmediate(
R1, static_cast<compiler::target::uword>(
reinterpret_cast<uword>(DLRT_GetThreadForNativeCallback)));
}
// Load the thread object. If we were called by a trampoline, the thread is
// already loaded.
//
// TODO(35765): Fix linking issue on AOT.
if (!NativeCallbackTrampolines::Enabled()) {
// Create another frame to align the frame before continuing in "native"
// code.
@ -1203,6 +1174,8 @@ void NativeEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
__ ReserveAlignedFrameSpace(0);
__ LoadImmediate(R0, callback_id_);
__ LoadImmediate(
R1, reinterpret_cast<int64_t>(DLRT_GetThreadForNativeCallback));
__ blx(R1);
__ mov(THR, compiler::Operand(R0));
@ -6981,8 +6954,7 @@ LocationSummary* BitCastInstr::MakeLocationSummary(Zone* zone, bool opt) const {
break;
case kUnboxedFloat:
case kUnboxedDouble:
// Choose an FPU register with corresponding D and S registers.
summary->set_in(0, Location::FpuRegisterLocation(Q0));
summary->set_in(0, Location::RequiresFpuRegister());
break;
default:
UNREACHABLE();
@ -6998,8 +6970,7 @@ LocationSummary* BitCastInstr::MakeLocationSummary(Zone* zone, bool opt) const {
break;
case kUnboxedFloat:
case kUnboxedDouble:
// Choose an FPU register with corresponding D and S registers.
summary->set_out(0, Location::FpuRegisterLocation(Q0));
summary->set_out(0, Location::RequiresFpuRegister());
break;
default:
UNREACHABLE();

View File

@ -1020,6 +1020,10 @@ void NativeEntryInstr::SaveArgument(FlowGraphCompiler* compiler,
}
void NativeEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
if (FLAG_precompiled_mode) {
UNREACHABLE();
}
// Constant pool cannot be used until we enter the actual Dart frame.
__ set_constant_pool_allowed(false);
@ -1048,33 +1052,8 @@ void NativeEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
// Load the thread object. If we were called by a trampoline, the thread is
// already loaded.
if (FLAG_precompiled_mode) {
compiler::Label skip_reloc;
__ b(&skip_reloc);
compiler->InsertBSSRelocation(
BSS::Relocation::DRT_GetThreadForNativeCallback);
__ Bind(&skip_reloc);
__ adr(R0, compiler::Immediate(-compiler::target::kWordSize));
// R0 holds the address of the relocation.
__ ldr(R1, compiler::Address(R0));
// R1 holds the relocation itself: R0 - bss_start.
// R0 = R0 + (bss_start - R0) = bss_start
__ add(R0, R0, compiler::Operand(R1));
// R0 holds the start of the BSS section.
// Load the "get-thread" routine: *bss_start.
__ ldr(R1, compiler::Address(R0));
} else if (!NativeCallbackTrampolines::Enabled()) {
// In JIT mode, we can just paste the address of the runtime entry into the
// generated code directly. This is not a problem since we don't save
// callbacks into JIT snapshots.
__ LoadImmediate(
R1, reinterpret_cast<int64_t>(DLRT_GetThreadForNativeCallback));
}
//
// TODO(35765): Fix linking issue on AOT.
if (!NativeCallbackTrampolines::Enabled()) {
// Create another frame to align the frame before continuing in "native"
// code.
@ -1082,6 +1061,8 @@ void NativeEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
__ ReserveAlignedFrameSpace(0);
__ LoadImmediate(R0, callback_id_);
__ LoadImmediate(
R1, reinterpret_cast<int64_t>(DLRT_GetThreadForNativeCallback));
__ blr(R1);
__ mov(THR, R0);

View File

@ -1036,16 +1036,15 @@ void NativeEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
__ pushl(EDI);
// Load the thread object.
//
// Linking in AOT is not relevant here since we don't support AOT for IA32.
// Create another frame to align the frame before continuing in "native" code.
// If we were called by a trampoline, it has already loaded the thread.
ASSERT(!FLAG_precompiled_mode); // No relocation for AOT linking.
if (!NativeCallbackTrampolines::Enabled()) {
__ EnterFrame(0);
__ ReserveAlignedFrameSpace(compiler::target::kWordSize);
__ movl(compiler::Address(SPREG, 0), compiler::Immediate(callback_id_));
__ movl(EAX, compiler::Immediate(reinterpret_cast<intptr_t>(
__ movl(EAX, compiler::Immediate(reinterpret_cast<int64_t>(
DLRT_GetThreadForNativeCallback)));
__ call(EAX);
__ movl(THR, EAX);

View File

@ -1024,6 +1024,10 @@ void NativeEntryInstr::SaveArgument(FlowGraphCompiler* compiler,
}
void NativeEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
if (FLAG_precompiled_mode) {
UNREACHABLE();
}
__ Bind(compiler->GetJumpLabel(this));
// Create a dummy frame holding the pushed arguments. This simplifies
@ -1051,38 +1055,9 @@ void NativeEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
__ PushRegisters(CallingConventions::kCalleeSaveCpuRegisters,
CallingConventions::kCalleeSaveXmmRegisters);
// Load the address of DLRT_GetThreadForNativeCallback without using Thread.
if (FLAG_precompiled_mode) {
compiler::Label skip_reloc;
__ jmp(&skip_reloc);
compiler->InsertBSSRelocation(
BSS::Relocation::DRT_GetThreadForNativeCallback);
const intptr_t reloc_end = __ CodeSize();
__ Bind(&skip_reloc);
const intptr_t kLeaqLength = 7;
__ leaq(RAX, compiler::Address::AddressRIPRelative(
-kLeaqLength - compiler::target::kWordSize));
ASSERT((__ CodeSize() - reloc_end) == kLeaqLength);
// RAX holds the address of the relocation.
__ movq(RCX, compiler::Address(RAX, 0));
// RCX holds the relocation itself: RAX - bss_start.
// RAX = RAX + (bss_start - RAX) = bss_start
__ addq(RAX, RCX);
// RAX holds the start of the BSS section.
// Load the "get-thread" routine: *bss_start.
__ movq(RAX, compiler::Address(RAX, 0));
} else if (!NativeCallbackTrampolines::Enabled()) {
// In JIT mode, we can just paste the address of the runtime entry into the
// generated code directly. This is not a problem since we don't save
// callbacks into JIT snapshots.
__ movq(RAX, compiler::Immediate(reinterpret_cast<intptr_t>(
DLRT_GetThreadForNativeCallback)));
}
// Load the thread object.
// TODO(35765): Fix linking issue on AOT.
//
// Create another frame to align the frame before continuing in "native" code.
// If we were called by a trampoline, it has already loaded the thread.
if (!NativeCallbackTrampolines::Enabled()) {
@ -1091,6 +1066,8 @@ void NativeEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
COMPILE_ASSERT(RAX != CallingConventions::kArg1Reg);
__ movq(CallingConventions::kArg1Reg, compiler::Immediate(callback_id_));
__ movq(RAX, compiler::Immediate(reinterpret_cast<int64_t>(
DLRT_GetThreadForNativeCallback)));
__ CallCFunction(RAX);
__ movq(THR, RAX);

View File

@ -210,58 +210,6 @@ Representation ResultRepresentationBase(const Function& signature) {
#if !defined(TARGET_ARCH_DBC)
RawFunction* NativeCallbackFunction(const Function& c_signature,
const Function& dart_target,
const Instance& exceptional_return) {
Thread* const thread = Thread::Current();
const int32_t callback_id = thread->AllocateFfiCallbackId();
// Create a new Function named '<target>_FfiCallback' and stick it in the
// 'dart:ffi' library. Note that these functions will never be invoked by
// Dart, so they have may have duplicate names.
Zone* const zone = thread->zone();
const auto& name = String::Handle(
zone, Symbols::FromConcat(thread, Symbols::FfiCallback(),
String::Handle(zone, dart_target.name())));
const Library& lib = Library::Handle(zone, Library::FfiLibrary());
const Class& owner_class = Class::Handle(zone, lib.toplevel_class());
const Function& function =
Function::Handle(zone, Function::New(name, RawFunction::kFfiTrampoline,
/*is_static=*/true,
/*is_const=*/false,
/*is_abstract=*/false,
/*is_external=*/false,
/*is_native=*/false, owner_class,
TokenPosition::kNoSource));
function.set_is_debuggable(false);
// Set callback-specific fields which the flow-graph builder needs to generate
// the body.
function.SetFfiCSignature(c_signature);
function.SetFfiCallbackId(callback_id);
function.SetFfiCallbackTarget(dart_target);
// We need to load the exceptional return value as a constant in the generated
// function. Even though the FE ensures that it is a constant, it could still
// be a literal allocated in new space. We need to copy it into old space in
// that case.
//
// Exceptional return values currently cannot be pointers because we don't
// have constant pointers.
//
// TODO(36730): We'll need to extend this when we support passing/returning
// structs by value.
ASSERT(exceptional_return.IsNull() || exceptional_return.IsNumber());
if (!exceptional_return.IsSmi() && exceptional_return.IsNew()) {
function.SetFfiCallbackExceptionalReturn(Instance::Handle(
zone, exceptional_return.CopyShallowToOldSpace(thread)));
} else {
function.SetFfiCallbackExceptionalReturn(exceptional_return);
}
return function.raw();
}
ZoneGrowableArray<Representation>* ArgumentRepresentations(
const Function& signature) {
return ArgumentRepresentationsBase<CallingConventions>(signature);
@ -702,6 +650,27 @@ Representation FfiSignatureDescriptor::ResultRepresentation() const {
#endif // defined(TARGET_ARCH_DBC)
bool IsAsFunctionInternal(Zone* zone, Isolate* isolate, const Function& func) {
Object& asFunctionInternal =
Object::Handle(zone, isolate->object_store()->ffi_as_function_internal());
if (asFunctionInternal.raw() == Object::null()) {
// Cache the reference.
const Library& ffi =
Library::Handle(zone, isolate->object_store()->ffi_library());
asFunctionInternal =
ffi.LookupFunctionAllowPrivate(Symbols::AsFunctionInternal());
// Cannot assert that 'asFunctionInternal' is found because it may have been
// tree-shaken.
if (asFunctionInternal.IsNull()) {
// Set the entry in the object store to a sentinel so we don't try to look
// it up again.
asFunctionInternal = Object::sentinel().raw();
}
isolate->object_store()->set_ffi_as_function_internal(asFunctionInternal);
}
return func.raw() == asFunctionInternal.raw();
}
#endif // !defined(DART_PRECOMPILED_RUNTIME)
} // namespace ffi

View File

@ -56,10 +56,6 @@ RawFunction* TrampolineFunction(const Function& dart_signature,
#if !defined(TARGET_ARCH_DBC)
RawFunction* NativeCallbackFunction(const Function& c_signature,
const Function& dart_target,
const Instance& exceptional_return);
// Unboxed representations of the arguments to a C signature function.
ZoneGrowableArray<Representation>* ArgumentRepresentations(
const Function& signature);
@ -149,8 +145,11 @@ class CallbackArgumentTranslator : public ValueObject {
intptr_t argument_slots_used_ = 0;
intptr_t argument_slots_required_ = 0;
};
#endif // defined(TARGET_ARCH_DBC)
bool IsAsFunctionInternal(Zone* zone, Isolate* isolate, const Function& func);
} // namespace ffi
} // namespace compiler

View File

@ -807,14 +807,9 @@ void BytecodeFlowGraphBuilder::BuildDirectCall() {
const Function& target = Function::Cast(ConstantAt(DecodeOperandD()).value());
const intptr_t argc = DecodeOperandF().value();
const auto recognized_kind = MethodRecognizer::RecognizeKind(target);
if (recognized_kind == MethodRecognizer::kFfiAsFunctionInternal) {
if (compiler::ffi::IsAsFunctionInternal(Z, isolate(), target)) {
BuildFfiAsFunction();
return;
} else if (FLAG_precompiled_mode &&
recognized_kind == MethodRecognizer::kFfiNativeCallbackFunction) {
BuildFfiNativeCallbackFunction();
return;
}
// Recognize identical() call.
@ -831,7 +826,7 @@ void BytecodeFlowGraphBuilder::BuildDirectCall() {
}
if (!FLAG_causal_async_stacks &&
recognized_kind == MethodRecognizer::kAsyncStackTraceHelper) {
target.recognized_kind() == MethodRecognizer::kAsyncStackTraceHelper) {
ASSERT(argc == 1);
// Drop the ignored parameter to _asyncStackTraceHelper(:async_op).
code_ += B->Drop();
@ -839,7 +834,7 @@ void BytecodeFlowGraphBuilder::BuildDirectCall() {
return;
}
if (recognized_kind == MethodRecognizer::kStringBaseInterpolate) {
if (target.recognized_kind() == MethodRecognizer::kStringBaseInterpolate) {
ASSERT(argc == 1);
code_ += B->StringInterpolate(position_);
return;
@ -1829,45 +1824,6 @@ void BytecodeFlowGraphBuilder::BuildFfiAsFunction() {
code_ += B->BuildFfiAsFunctionInternalCall(type_args);
}
// Builds graph for a call to 'dart:ffi::_nativeCallbackFunction'.
// The call-site must look like this (guaranteed by the FE which inserts it):
//
// _nativeCallbackFunction<NativeSignatureType>(target, exceptionalReturn)
//
// Therefore the stack shall look like:
//
// <exceptional return value> => ensured (by FE) to be a constant
// <target> => closure, ensured (by FE) to be a (non-partially-instantiated)
// static tearoff
// <type args> => [NativeSignatureType]
void BytecodeFlowGraphBuilder::BuildFfiNativeCallbackFunction() {
#if defined(TARGET_ARCH_DBC)
UNREACHABLE();
#else
const TypeArguments& type_args =
TypeArguments::Cast(B->Peek(/*depth=*/2)->AsConstant()->value());
ASSERT(type_args.IsInstantiated() && type_args.Length() == 1);
const Function& native_sig = Function::Handle(
Z, Type::Cast(AbstractType::Handle(Z, type_args.TypeAt(0))).signature());
const Closure& target_closure =
Closure::Cast(B->Peek(/*depth=*/1)->AsConstant()->value());
ASSERT(!target_closure.IsNull());
Function& target = Function::Handle(Z, target_closure.function());
ASSERT(!target.IsNull() && target.IsImplicitClosureFunction());
target = target.parent_function();
const Instance& exceptional_return =
Instance::Cast(B->Peek(/*depth=*/0)->AsConstant()->value());
const Function& result =
Function::ZoneHandle(Z, compiler::ffi::NativeCallbackFunction(
native_sig, target, exceptional_return));
code_ += B->Constant(result);
code_ += B->DropTempsPreserveTop(3);
#endif
}
void BytecodeFlowGraphBuilder::BuildDebugStepCheck() {
#if !defined(PRODUCT)
if (build_debug_step_checks_) {

View File

@ -172,7 +172,6 @@ class BytecodeFlowGraphBuilder {
void BuildInstruction(KernelBytecode::Opcode opcode);
void BuildFfiAsFunction();
void BuildFfiNativeCallbackFunction();
void BuildDebugStepCheck();
#define DECLARE_BUILD_METHOD(name, encoding, kind, op1, op2, op3) \

View File

@ -3059,12 +3059,8 @@ Fragment StreamingFlowGraphBuilder::BuildStaticInvocation(bool is_const,
++argument_count;
}
const auto recognized_kind = MethodRecognizer::RecognizeKind(target);
if (recognized_kind == MethodRecognizer::kFfiAsFunctionInternal) {
if (compiler::ffi::IsAsFunctionInternal(Z, H.isolate(), target)) {
return BuildFfiAsFunctionInternal();
} else if (FLAG_precompiled_mode &&
recognized_kind == MethodRecognizer::kFfiNativeCallbackFunction) {
return BuildFfiNativeCallbackFunction();
}
Fragment instructions;
@ -3072,7 +3068,7 @@ Fragment StreamingFlowGraphBuilder::BuildStaticInvocation(bool is_const,
const bool special_case_nop_async_stack_trace_helper =
!FLAG_causal_async_stacks &&
recognized_kind == MethodRecognizer::kAsyncStackTraceHelper;
target.recognized_kind() == MethodRecognizer::kAsyncStackTraceHelper;
const bool special_case_unchecked_cast =
klass.IsTopLevel() && (klass.library() == Library::InternalLibrary()) &&
@ -5047,65 +5043,6 @@ Fragment StreamingFlowGraphBuilder::BuildFfiAsFunctionInternal() {
return code;
}
Fragment StreamingFlowGraphBuilder::BuildFfiNativeCallbackFunction() {
#if defined(TARGET_ARCH_DBC)
UNREACHABLE();
#else
// The call-site must look like this (guaranteed by the FE which inserts it):
//
// _nativeCallbackFunction<NativeSignatureType>(target, exceptionalReturn)
//
// The FE also guarantees that all three arguments are constants.
const intptr_t argc = ReadUInt(); // read argument count
ASSERT(argc == 2); // target, exceptionalReturn
const intptr_t list_length = ReadListLength(); // read types list length
ASSERT(list_length == 1); // native signature
const TypeArguments& type_arguments =
T.BuildTypeArguments(list_length); // read types.
ASSERT(type_arguments.Length() == 1 && type_arguments.IsInstantiated());
const Function& native_sig = Function::Handle(
Z, Type::Cast(AbstractType::Handle(Z, type_arguments.TypeAt(0)))
.signature());
Fragment code;
const intptr_t positional_count =
ReadListLength(); // read positional argument count
ASSERT(positional_count == 2);
// Read target expression and extract the target function.
code += BuildExpression(); // build first positional argument (target)
Definition* target_def = B->Peek();
ASSERT(target_def->IsConstant());
const Closure& target_closure =
Closure::Cast(target_def->AsConstant()->value());
ASSERT(!target_closure.IsNull());
Function& target = Function::Handle(Z, target_closure.function());
ASSERT(!target.IsNull() && target.IsImplicitClosureFunction());
target = target.parent_function();
code += Drop();
// Build second positional argument (exceptionalReturn).
code += BuildExpression();
Definition* exceptional_return_def = B->Peek();
ASSERT(exceptional_return_def->IsConstant());
const Instance& exceptional_return =
Instance::Cast(exceptional_return_def->AsConstant()->value());
code += Drop();
const intptr_t named_args_len =
ReadListLength(); // skip (empty) named arguments list
ASSERT(named_args_len == 0);
const Function& result =
Function::ZoneHandle(Z, compiler::ffi::NativeCallbackFunction(
native_sig, target, exceptional_return));
code += Constant(result);
return code;
#endif
}
} // namespace kernel
} // namespace dart

View File

@ -353,10 +353,6 @@ class StreamingFlowGraphBuilder : public KernelReaderHelper {
// Kernel buffer and pushes the resulting closure.
Fragment BuildFfiAsFunctionInternal();
// Build build FG for '_nativeCallbackFunction'. Reads an Arguments from the
// Kernel buffer and pushes the resulting Function object.
Fragment BuildFfiNativeCallbackFunction();
FlowGraphBuilder* flow_graph_builder_;
ActiveClass* const active_class_;
TypeTranslator type_translator_;

View File

@ -2397,7 +2397,6 @@ Fragment FlowGraphBuilder::FfiPointerFromAddress(const Type& result_type) {
// do not appear in the type arguments to a any Pointer classes in an FFI
// signature.
ASSERT(args.IsNull() || args.IsInstantiated());
args = args.Canonicalize();
Fragment code;
code += Constant(args);
@ -2451,6 +2450,14 @@ Fragment FlowGraphBuilder::FfiConvertArgumentToNative(
const Representation native_representation) {
Fragment body;
// Return 0 for void.
if (compiler::ffi::NativeTypeIsVoid(ffi_type)) {
body += Drop();
body += IntConstant(0);
body += UnboxTruncate(kUnboxedFfiIntPtr);
return body;
}
// Check for 'null'.
body += LoadLocal(MakeTemporary());
body <<= new (Z) CheckNullInstr(Pop(), String::ZoneHandle(Z, function.name()),
@ -2607,20 +2614,9 @@ FlowGraph* FlowGraphBuilder::BuildGraphOfFfiCallback(const Function& function) {
/*needs_stacktrace=*/false, /*is_synthesized=*/true);
// Return the "exceptional return" value given in 'fromFunction'.
//
// For pointer and void return types, the exceptional return is always null --
// return 0 instead.
if (compiler::ffi::NativeTypeIsPointer(ffi_type) ||
compiler::ffi::NativeTypeIsVoid(ffi_type)) {
ASSERT(function.FfiCallbackExceptionalReturn() == Object::null());
catch_body += IntConstant(0);
catch_body += UnboxTruncate(kUnboxedFfiIntPtr);
} else {
catch_body += Constant(
Instance::ZoneHandle(Z, function.FfiCallbackExceptionalReturn()));
catch_body += FfiConvertArgumentToNative(function, ffi_type, result_rep);
}
catch_body += Constant(
Instance::ZoneHandle(Z, function.FfiCallbackExceptionalReturn()));
catch_body += FfiConvertArgumentToNative(function, ffi_type, result_rep);
catch_body += NativeReturn(result_rep);
--catch_depth_;

View File

@ -143,8 +143,6 @@ namespace dart {
V(::, _classRangeCheck, ClassRangeCheck, 0xca52e30a) \
V(::, _asyncStackTraceHelper, AsyncStackTraceHelper, 0xaeaed5cb) \
V(::, _abi, FfiAbi, 0xf2e89620) \
V(::, _asFunctionInternal, FfiAsFunctionInternal, 0x82525e9e) \
V(::, _nativeCallbackFunction, FfiNativeCallbackFunction, 0x591fb33c) \
// List of intrinsics:
// (class-name, function-name, intrinsification method, fingerprint).

View File

@ -21,7 +21,6 @@
#include "platform/globals.h"
#include "vm/allocation.h"
#include "vm/bitfield.h"
#include "vm/bss_relocs.h"
#include "vm/class_id.h"
#include "vm/code_entry_kind.h"
#include "vm/constants.h"

View File

@ -82,6 +82,8 @@ static const intptr_t kElfDynamicTableEntrySize = 16;
static const intptr_t kElfSymbolHashTableEntrySize = 4;
#endif
static const intptr_t kPageSize = 4096;
class Section : public ZoneAllocated {
public:
Section() {}
@ -113,34 +115,23 @@ class ProgramBits : public Section {
public:
ProgramBits(bool allocate,
bool executable,
bool writable,
const uint8_t* bytes,
intptr_t filesz,
intptr_t memsz = -1) {
if (memsz == -1) memsz = filesz;
intptr_t size) {
section_type = SHT_PROGBITS;
if (allocate) {
section_flags = SHF_ALLOC;
if (executable) section_flags |= SHF_EXECINSTR;
if (writable) section_flags |= SHF_WRITE;
segment_type = PT_LOAD;
segment_flags = PF_R;
if (executable) segment_flags |= PF_X;
if (writable) segment_flags |= PF_W;
}
bytes_ = bytes;
file_size = filesz;
memory_size = memsz;
file_size = memory_size = size;
}
void Write(Elf* stream) {
if (bytes_ != nullptr) {
stream->WriteBytes(bytes_, file_size);
}
}
void Write(Elf* stream) { stream->WriteBytes(bytes_, memory_size); }
const uint8_t* bytes_;
};
@ -380,7 +371,7 @@ static const intptr_t kNumInvalidSections = 1;
// Elf::segments_.
static const intptr_t kNumImplicitSegments = 3;
static const intptr_t kProgramTableSegmentSize = Elf::kPageSize;
static const intptr_t kProgramTableSegmentSize = kPageSize;
Elf::Elf(Zone* zone, StreamingWriteStream* stream)
: zone_(zone), stream_(stream), memory_offset_(0) {
@ -424,7 +415,7 @@ intptr_t Elf::NextMemoryOffset() {
}
intptr_t Elf::AddText(const char* name, const uint8_t* bytes, intptr_t size) {
ProgramBits* image = new (zone_) ProgramBits(true, true, false, bytes, size);
ProgramBits* image = new (zone_) ProgramBits(true, true, bytes, size);
image->section_name = shstrtab_->AddString(".text");
AddSection(image);
AddSegment(image);
@ -443,29 +434,8 @@ intptr_t Elf::AddText(const char* name, const uint8_t* bytes, intptr_t size) {
return symbol->offset;
}
intptr_t Elf::AddBSSData(const char* name, intptr_t size) {
ProgramBits* image = new (zone_)
ProgramBits(true, false, true, nullptr, /*filesz=*/0, /*memsz=*/size);
image->section_name = shstrtab_->AddString(".bss");
AddSection(image);
AddSegment(image);
Symbol* symbol = new (zone_) Symbol();
symbol->cstr = name;
symbol->name = symstrtab_->AddString(name);
symbol->info = (STB_GLOBAL << 4) | STT_OBJECT;
symbol->section = image->section_index;
// For shared libraries, this is the offset from the DSO base. For static
// libraries, this is section relative.
symbol->offset = image->memory_offset;
symbol->size = size;
symtab_->AddSymbol(symbol);
return symbol->offset;
}
intptr_t Elf::AddROData(const char* name, const uint8_t* bytes, intptr_t size) {
ProgramBits* image = new (zone_) ProgramBits(true, false, false, bytes, size);
ProgramBits* image = new (zone_) ProgramBits(true, false, bytes, size);
image->section_name = shstrtab_->AddString(".rodata");
AddSection(image);
AddSegment(image);
@ -485,8 +455,7 @@ intptr_t Elf::AddROData(const char* name, const uint8_t* bytes, intptr_t size) {
}
void Elf::AddDebug(const char* name, const uint8_t* bytes, intptr_t size) {
ProgramBits* image =
new (zone_) ProgramBits(false, false, false, bytes, size);
ProgramBits* image = new (zone_) ProgramBits(false, false, bytes, size);
image->section_name = shstrtab_->AddString(name);
AddSection(image);
}

View File

@ -23,12 +23,9 @@ class Elf : public ZoneAllocated {
public:
Elf(Zone* zone, StreamingWriteStream* stream);
static const intptr_t kPageSize = 4096;
intptr_t NextMemoryOffset();
intptr_t AddText(const char* name, const uint8_t* bytes, intptr_t size);
intptr_t AddROData(const char* name, const uint8_t* bytes, intptr_t size);
intptr_t AddBSSData(const char* name, intptr_t size);
void AddDebug(const char* name, const uint8_t* bytes, intptr_t size);
void Finalize();

View File

@ -14,22 +14,7 @@ namespace dart {
DECLARE_FLAG(bool, disassemble_stubs);
#if !defined(DART_PRECOMPILED_RUNTIME) && !defined(TARGET_ARCH_DBC)
uword NativeCallbackTrampolines::TrampolineForId(int32_t callback_id) {
#if defined(DART_PRECOMPILER)
ASSERT(!Enabled());
UNREACHABLE();
#else
const intptr_t trampolines_per_page = NumCallbackTrampolinesPerPage();
const intptr_t page_index = callback_id / trampolines_per_page;
const uword entry_point = trampoline_pages_[page_index]->start();
return entry_point +
(callback_id % trampolines_per_page) *
compiler::StubCodeCompiler::kNativeCallbackTrampolineSize;
#endif
}
void NativeCallbackTrampolines::AllocateTrampoline() {
uword NativeCallbackTrampolines::AllocateTrampoline() {
#if defined(DART_PRECOMPILER)
ASSERT(!Enabled());
UNREACHABLE();
@ -91,11 +76,16 @@ void NativeCallbackTrampolines::AllocateTrampoline() {
}
#endif
next_callback_trampoline_ = memory->start();
trampolines_left_on_page_ = NumCallbackTrampolinesPerPage();
}
trampolines_left_on_page_--;
next_callback_id_++;
const uword entrypoint = next_callback_trampoline_;
next_callback_trampoline_ +=
compiler::StubCodeCompiler::kNativeCallbackTrampolineSize;
return entrypoint;
#endif // defined(DART_PRECOMPILER)
}
#endif // !defined(DART_PRECOMPILED_RUNTIME) && !defined(TARGET_ARCH_DBC)

View File

@ -57,13 +57,11 @@ class NativeCallbackTrampolines : public ValueObject {
// Allocates a callback trampoline corresponding to the callback id
// 'next_callback_id()'. Returns an entrypoint to the trampoline.
void AllocateTrampoline();
// Get the entrypoint for a previously allocated callback ID.
uword TrampolineForId(int32_t callback_id);
uword AllocateTrampoline();
private:
MallocGrowableArray<VirtualMemory*> trampoline_pages_;
uword next_callback_trampoline_ = 0;
intptr_t trampolines_left_on_page_ = 0;
intptr_t next_callback_id_ = 0;

View File

@ -556,11 +556,6 @@ const char* NameOfStubIsolateSpecificStub(ObjectStore* object_store,
void AssemblyImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
Zone* zone = Thread::Current()->zone();
#if defined(DART_PRECOMPILER)
const char* bss_symbol =
vm ? "_kDartVmSnapshotBss" : "_kDartIsolateSnapshotBss";
#endif
const char* instructions_symbol =
vm ? "_kDartVmSnapshotInstructions" : "_kDartIsolateSnapshotInstructions";
assembly_stream_.Print(".text\n");
@ -575,16 +570,8 @@ void AssemblyImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
// look like a HeapPage.
intptr_t instructions_length = next_text_offset_;
WriteWordLiteralText(instructions_length);
#if defined(DART_PRECOMPILER)
assembly_stream_.Print("%s %s - %s\n", kLiteralPrefix, bss_symbol,
instructions_symbol);
#else
WriteWordLiteralText(0); // No relocations.
#endif
intptr_t header_words = Image::kHeaderSize / sizeof(compiler::target::uword);
for (intptr_t i = Image::kHeaderFields; i < header_words; i++) {
for (intptr_t i = 1; i < header_words; i++) {
WriteWordLiteralText(0);
}
@ -592,7 +579,6 @@ void AssemblyImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
Object& owner = Object::Handle(zone);
String& str = String::Handle(zone);
PcDescriptors& descriptors = PcDescriptors::Handle(zone);
ObjectStore* object_store = Isolate::Current()->object_store();
@ -627,7 +613,6 @@ void AssemblyImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
const Instructions& insns = *data.insns_;
const Code& code = *data.code_;
descriptors = data.code_->pc_descriptors();
if (profile_writer_ != nullptr) {
const intptr_t offset = Image::kHeaderSize + text_offset;
@ -732,27 +717,7 @@ void AssemblyImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
ASSERT(Utils::IsAligned(entry, sizeof(uword)));
ASSERT(Utils::IsAligned(end, sizeof(uword)));
#if defined(DART_PRECOMPILER)
PcDescriptors::Iterator iterator(descriptors,
RawPcDescriptors::kBSSRelocation);
uword next_reloc_offset = iterator.MoveNext() ? iterator.PcOffset() : -1;
for (uword cursor = entry; cursor < end;
cursor += sizeof(compiler::target::uword*)) {
compiler::target::uword data =
*reinterpret_cast<compiler::target::uword*>(cursor);
if ((cursor - entry) == next_reloc_offset) {
assembly_stream_.Print("%s %s - (.) + %" Pd "\n", kLiteralPrefix,
bss_symbol, /*addend=*/data);
next_reloc_offset = iterator.MoveNext() ? iterator.PcOffset() : -1;
} else {
WriteWordLiteralText(data);
}
}
text_offset += end - entry;
#else
text_offset += WriteByteSequence(entry, end);
#endif
}
ASSERT((text_offset - instr_start) == insns.raw()->HeapSize());
@ -760,16 +725,6 @@ void AssemblyImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
FrameUnwindEpilogue();
#if defined(DART_PRECOMPILER)
assembly_stream_.Print(".bss\n");
assembly_stream_.Print("%s:\n", bss_symbol);
// Currently we only put one symbol in the data section, the address of
// DLRT_GetThreadForNativeCallback, which is populated when the snapshot is
// loaded.
WriteWordLiteralText(0);
#endif
#if defined(TARGET_OS_LINUX) || defined(TARGET_OS_ANDROID) || \
defined(TARGET_OS_FUCHSIA)
assembly_stream_.Print(".section .rodata\n");
@ -905,42 +860,24 @@ intptr_t BlobImageWriter::WriteByteSequence(uword start, uword end) {
}
void BlobImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
const intptr_t instructions_length = next_text_offset_;
#ifdef DART_PRECOMPILER
intptr_t segment_base = 0;
if (elf_ != nullptr) {
segment_base = elf_->NextMemoryOffset();
}
// Calculate the start of the BSS section based on the known size of the
// text section and page alignment.
intptr_t bss_base = 0;
if (elf_ != nullptr) {
bss_base =
Utils::RoundUp(segment_base + instructions_length, Elf::kPageSize);
}
#endif
// This header provides the gap to make the instructions snapshot look like a
// HeapPage.
intptr_t instructions_length = next_text_offset_;
instructions_blob_stream_.WriteWord(instructions_length);
#if defined(DART_PRECOMPILER)
instructions_blob_stream_.WriteWord(elf_ != nullptr ? bss_base - segment_base
: 0);
#else
instructions_blob_stream_.WriteWord(0); // No relocations.
#endif
intptr_t header_words = Image::kHeaderSize / sizeof(uword);
for (intptr_t i = Image::kHeaderFields; i < header_words; i++) {
for (intptr_t i = 1; i < header_words; i++) {
instructions_blob_stream_.WriteWord(0);
}
intptr_t text_offset = 0;
#if defined(DART_PRECOMPILER)
PcDescriptors& descriptors = PcDescriptors::Handle();
#endif
NoSafepointScope no_safepoint;
for (intptr_t i = 0; i < instructions_.length(); i++) {
auto& data = instructions_[i];
@ -991,8 +928,6 @@ void BlobImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
marked_tags |= static_cast<uword>(insns.raw_ptr()->hash_) << 32;
#endif
intptr_t payload_stream_start = 0;
#if defined(IS_SIMARM_X64)
const intptr_t start_offset = instructions_blob_stream_.bytes_written();
const intptr_t size_in_bytes = InstructionsSizeInSnapshot(insns.Size());
@ -1003,7 +938,6 @@ void BlobImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
instructions_blob_stream_.WriteFixed<uint32_t>(
insns.raw_ptr()->unchecked_entrypoint_pc_offset_);
instructions_blob_stream_.Align(kSimarmX64InstructionsAlignment);
payload_stream_start = instructions_blob_stream_.Position();
instructions_blob_stream_.WriteBytes(
reinterpret_cast<const void*>(insns.PayloadStart()), insns.Size());
instructions_blob_stream_.Align(kSimarmX64InstructionsAlignment);
@ -1011,61 +945,16 @@ void BlobImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
text_offset += (end_offset - start_offset);
USE(end);
#else // defined(IS_SIMARM_X64)
payload_stream_start = instructions_blob_stream_.Position() +
(insns.PayloadStart() - beginning);
instructions_blob_stream_.WriteWord(marked_tags);
text_offset += sizeof(uword);
beginning += sizeof(uword);
text_offset += WriteByteSequence(beginning, end);
#endif // defined(IS_SIMARM_X64)
#if defined(DART_PRECOMPILER)
// Don't patch the relocation if we're not generating ELF. The regular blobs
// format does not yet support these relocations. Use
// Code::VerifyBSSRelocations to check whether the relocations are patched
// or not after loading.
if (elf_ != nullptr) {
const intptr_t current_stream_position =
instructions_blob_stream_.Position();
descriptors = data.code_->pc_descriptors();
PcDescriptors::Iterator iterator(
descriptors, /*kind_mask=*/RawPcDescriptors::kBSSRelocation);
while (iterator.MoveNext()) {
const intptr_t reloc_offset = iterator.PcOffset();
// The instruction stream at the relocation position holds an offset
// into BSS corresponding to the symbol being resolved. This addend is
// factored into the relocation.
const auto addend = *reinterpret_cast<compiler::target::word*>(
insns.PayloadStart() + reloc_offset);
// Overwrite the relocation position in the instruction stream with the
// (positive) offset of the start of the payload from the start of the
// BSS segment plus the addend in the relocation.
instructions_blob_stream_.SetPosition(payload_stream_start +
reloc_offset);
const uword offset =
bss_base - (segment_base + payload_stream_start + reloc_offset) +
addend;
instructions_blob_stream_.WriteTargetWord(offset);
}
// Restore stream position after the relocation was patched.
instructions_blob_stream_.SetPosition(current_stream_position);
}
#endif
ASSERT((text_offset - instr_start) ==
ImageWriter::SizeInSnapshot(insns.raw()));
}
ASSERT(instructions_blob_stream_.bytes_written() == instructions_length);
#ifdef DART_PRECOMPILER
if (elf_ != nullptr) {
const char* instructions_symbol = vm ? "_kDartVmSnapshotInstructions"
@ -1074,9 +963,6 @@ void BlobImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
elf_->AddText(instructions_symbol, instructions_blob_stream_.buffer(),
instructions_blob_stream_.bytes_written());
ASSERT(segment_base == segment_base2);
const intptr_t real_bss_base = elf_->AddBSSData("_kDartVMBSSData", 8);
ASSERT(bss_base == real_bss_base);
}
#endif
}

View File

@ -48,11 +48,6 @@ class Image : ValueObject {
return snapshot_size - kHeaderSize;
}
uword bss_offset() const {
return *(reinterpret_cast<const uword*>(raw_memory_) + 1);
}
static constexpr intptr_t kHeaderFields = 2;
static const intptr_t kHeaderSize = OS::kMaxPreferredCodeAlignment;
private:
@ -319,15 +314,8 @@ class AssemblyImageWriter : public ImageWriter {
void FrameUnwindPrologue();
void FrameUnwindEpilogue();
intptr_t WriteByteSequence(uword start, uword end);
#if defined(TARGET_ARCH_IS_64_BIT)
const char* kLiteralPrefix = ".quad";
#else
const char* kLiteralPrefix = ".long";
#endif
void WriteWordLiteralText(compiler::target::uword value) {
// Padding is helpful for comparing the .S with --disassemble.
// Padding is helpful for comparing the .S with --disassemble.
#if defined(TARGET_ARCH_IS_64_BIT)
assembly_stream_.Print(".quad 0x%0.16" Px "\n", value);
#else

View File

@ -35,6 +35,15 @@ class String;
#define NATIVE_ENTRY_FUNCTION(name) BootstrapNatives::DN_##name
#ifdef DEBUG
#define SET_NATIVE_RETVAL(args, value) \
RawObject* retval = value; \
ASSERT(retval->IsDartInstance()); \
arguments->SetReturnUnsafe(retval);
#else
#define SET_NATIVE_RETVAL(arguments, value) arguments->SetReturnUnsafe(value);
#endif
#define DEFINE_NATIVE_ENTRY(name, type_argument_count, argument_count) \
static RawObject* DN_Helper##name(Isolate* isolate, Thread* thread, \
Zone* zone, NativeArguments* arguments); \
@ -54,15 +63,9 @@ class String;
Isolate* isolate = thread->isolate(); \
TransitionGeneratedToVM transition(thread); \
StackZone zone(thread); \
/* Be careful holding return_value_unsafe without a handle here. */ \
/* A return of Object::sentinel means the return value has already */ \
/* been set. */ \
RawObject* return_value_unsafe = \
DN_Helper##name(isolate, thread, zone.GetZone(), arguments); \
if (return_value_unsafe != Object::sentinel().raw()) { \
ASSERT(return_value_unsafe->IsDartInstance()); \
arguments->SetReturnUnsafe(return_value_unsafe); \
} \
SET_NATIVE_RETVAL( \
arguments, \
DN_Helper##name(isolate, thread, zone.GetZone(), arguments)); \
DEOPTIMIZE_ALOT; \
} \
VERIFY_ON_TRANSITION; \

View File

@ -12437,8 +12437,8 @@ RawInstructions* Instructions::New(intptr_t size,
result ^= raw;
result.SetSize(size);
result.SetHasSingleEntryPoint(has_single_entry_point);
result.set_unchecked_entrypoint_pc_offset(unchecked_entrypoint_pc_offset);
result.set_stats(nullptr);
result.set_unchecked_entrypoint_pc_offset(unchecked_entrypoint_pc_offset);
}
return result.raw();
}
@ -12641,8 +12641,6 @@ const char* PcDescriptors::KindAsStr(RawPcDescriptors::Kind kind) {
return "osr-entry ";
case RawPcDescriptors::kRewind:
return "rewind ";
case RawPcDescriptors::kBSSRelocation:
return "bss reloc ";
case RawPcDescriptors::kOther:
return "other ";
case RawPcDescriptors::kAnyKind:
@ -15168,25 +15166,6 @@ void Code::DumpSourcePositions() const {
reader.DumpSourcePositions(PayloadStart());
}
bool Code::VerifyBSSRelocations() const {
const auto& descriptors = PcDescriptors::Handle(pc_descriptors());
const auto& insns = Instructions::Handle(instructions());
PcDescriptors::Iterator iterator(descriptors,
RawPcDescriptors::kBSSRelocation);
while (iterator.MoveNext()) {
const uword reloc = insns.PayloadStart() + iterator.PcOffset();
const word target = *reinterpret_cast<word*>(reloc);
// The relocation is in its original unpatched form -- the addend
// representing the target symbol itself.
if (target >= 0 &&
target <
BSS::RelocationIndex(BSS::Relocation::NumRelocations) * kWordSize) {
return false;
}
}
return true;
}
void Bytecode::Disassemble(DisassemblyFormatter* formatter) const {
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_DISASSEMBLER)
#if !defined(DART_PRECOMPILED_RUNTIME)

View File

@ -5409,9 +5409,6 @@ class Code : public Object {
void Disassemble(DisassemblyFormatter* formatter = NULL) const;
// Returns true if all BSS relocations in the code have been patched.
bool VerifyBSSRelocations() const;
class Comments : public ZoneAllocated {
public:
static Comments& New(intptr_t count);

View File

@ -1514,9 +1514,7 @@ class RawPcDescriptors : public RawObject {
V(OsrEntry, kRuntimeCall << 1) \
/* Call rewind target address. */ \
V(Rewind, kOsrEntry << 1) \
/* Target-word-size relocation. */ \
V(BSSRelocation, kRewind << 1) \
V(Other, kBSSRelocation << 1) \
V(Other, kRewind << 1) \
V(AnyKind, -1)
enum Kind {

View File

@ -117,7 +117,6 @@ class ObjectPointerVisitor;
V(ExternalTwoByteString, "_ExternalTwoByteString") \
V(FactoryResult, "factory result") \
V(FallThroughError, "FallThroughError") \
V(FfiCallback, "_FfiCallback") \
V(FfiDouble, "Double") \
V(FfiDynamicLibrary, "DynamicLibrary") \
V(FfiFloat, "Float") \

View File

@ -941,7 +941,7 @@ DisableThreadInterruptsScope::~DisableThreadInterruptsScope() {
}
const intptr_t kInitialCallbackIdsReserved = 1024;
int32_t Thread::AllocateFfiCallbackId() {
int32_t Thread::AllocateFfiCallbackId(uword* trampoline) {
Zone* Z = isolate()->current_zone();
if (ffi_callback_code_ == GrowableObjectArray::null()) {
ffi_callback_code_ = GrowableObjectArray::New(kInitialCallbackIdsReserved);
@ -955,7 +955,7 @@ int32_t Thread::AllocateFfiCallbackId() {
if (NativeCallbackTrampolines::Enabled()) {
auto* const tramps = isolate()->native_callback_trampolines();
ASSERT(tramps->next_callback_id() == id);
tramps->AllocateTrampoline();
*trampoline = tramps->AllocateTrampoline();
}
#endif
@ -964,25 +964,7 @@ int32_t Thread::AllocateFfiCallbackId() {
void Thread::SetFfiCallbackCode(int32_t callback_id, const Code& code) {
Zone* Z = isolate()->current_zone();
/// In AOT the callback ID might have been allocated during compilation but
/// 'ffi_callback_code_' is initialized to empty again when the program
/// starts. Therefore we may need to initialize or expand it to accomodate
/// the callback ID.
if (ffi_callback_code_ == GrowableObjectArray::null()) {
ffi_callback_code_ = GrowableObjectArray::New(kInitialCallbackIdsReserved);
}
const auto& array = GrowableObjectArray::Handle(Z, ffi_callback_code_);
if (callback_id >= array.Length()) {
if (callback_id >= array.Capacity()) {
array.Grow(callback_id + 1);
}
array.SetLength(callback_id + 1);
}
array.SetAt(callback_id, code);
}

View File

@ -14,7 +14,6 @@
#include "platform/atomic.h"
#include "platform/safe_stack.h"
#include "vm/bitfield.h"
#include "vm/compiler/runtime_api.h"
#include "vm/constants.h"
#include "vm/globals.h"
#include "vm/handles.h"
@ -24,6 +23,7 @@
#include "vm/runtime_entry_list.h"
#include "vm/thread_stack_resource.h"
#include "vm/thread_state.h"
namespace dart {
class AbstractType;
@ -786,11 +786,7 @@ class Thread : public ThreadState {
}
}
int32_t AllocateFfiCallbackId();
// Store 'code' for the native callback identified by 'callback_id'.
//
// Expands the callback code array as necessary to accomodate the callback ID.
int32_t AllocateFfiCallbackId(uword* trampoline);
void SetFfiCallbackCode(int32_t callback_id, const Code& code);
// Ensure that 'callback_id' refers to a valid callback in this isolate.

View File

@ -19,8 +19,6 @@ vm_sources = [
"bootstrap.h",
"bootstrap_natives.cc",
"bootstrap_natives.h",
"bss_relocs.cc",
"bss_relocs.h",
"class_finalizer.cc",
"class_finalizer.h",
"class_id.h",

View File

@ -58,8 +58,7 @@ class Pointer<T extends NativeType> extends NativeType {
/// Does not accept dynamic invocations -- where the type of the receiver is
/// [dynamic].
external static Pointer<NativeFunction<T>> fromFunction<T extends Function>(
@DartRepresentationOf("T") Function f,
[Object exceptionalReturn]);
@DartRepresentationOf("T") Function f, Object exceptionalReturn);
/// Store a Dart value into this location.
///

View File

@ -58,8 +58,7 @@ class Pointer<T extends NativeType> extends NativeType {
/// Does not accept dynamic invocations -- where the type of the receiver is
/// [dynamic].
external static Pointer<NativeFunction<T>> fromFunction<T extends Function>(
@DartRepresentationOf("T") Function f,
[Object exceptionalReturn]);
@DartRepresentationOf("T") Function f, Object exceptionalReturn);
/// Store a Dart value into this location.
///

View File

@ -5,15 +5,6 @@
[ $arch == simdbc || $arch == simdbc64 ]
*: Skip # SIMDBC will be deleted soon.
# Issue 37295, not yet supported in blobs snapshots at present.
[ $compiler == dartkp && $system == windows ]
function_callbacks_test: Skip
regress_37511_callbacks_test: Skip
stacktrace_regress_37910_test: Skip
[ $compiler != dartkp || $system != windows ]
function_callbacks_unsupported_test: SkipByDesign # See above
[ $builder_tag == asan ]
data_not_asan_test: SkipByDesign # This test tries to allocate too much memory on purpose.
@ -25,6 +16,11 @@ function_callbacks_test/01: Skip
function_callbacks_test/02: Skip
function_callbacks_test/03: Skip
[ $runtime == dart_precompiled ]
function_callbacks_test: Skip # Issue dartbug.com/37295
regress_37511_callbacks_test: Skip # Issue dartbug.com/37295
stacktrace_regress_37910_test: Skip # Issue dartbug.com/37295
[ $arch == arm && $system != android ]
*: Skip # "hardfp" calling convention is not yet supported (iOS is also supported but not tested): dartbug.com/36309

File diff suppressed because it is too large Load Diff

View File

@ -1,19 +0,0 @@
// Copyright (c) 2019, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
//
// Dart test program for testing that FFI callbacks report an appropriate
// runtime error for unsupported snapshot formats.
import 'dart:ffi';
import 'package:expect/expect.dart';
bool checkError(UnsupportedError err) {
return "$err".contains("callbacks are not yet supported in blobs");
}
void main() {
Expect.throws<UnsupportedError>(
() => Pointer.fromFunction<Void Function()>(main), checkError);
}

View File

@ -36,7 +36,6 @@ void main() {
testFromFunctionTypeMismatch();
testFromFunctionClosure();
testFromFunctionTearOff();
testFromFunctionAbstract();
testLookupFunctionGeneric();
testLookupFunctionGeneric2();
testLookupFunctionWrongNativeFunctionSignature();
@ -249,10 +248,6 @@ void testFromFunctionTearOff() {
p = ffi.fromFunction(fld); //# 75: compile-time error
}
void testFromFunctionAbstract() {
ffi.Pointer.fromFunction<Function>(testFromFunctionAbstract); //# 76: compile-time error
}
void testLookupFunctionGeneric() {
Function generic<T extends Function>() {
ffi.DynamicLibrary l = dlopenPlatformSpecific("ffi_test_dynamic_library");

View File

@ -35,5 +35,5 @@ MINOR 6
PATCH 0
PRERELEASE 0
PRERELEASE_PATCH 0
ABI_VERSION 14
OLDEST_SUPPORTED_ABI_VERSION 13
ABI_VERSION 15
OLDEST_SUPPORTED_ABI_VERSION 15