[vm/ffi] Use untagged pointer representations for FFI pointers.

Previously, the FFI used unboxed integers as a native representation
for pointers in FFI code, as the compiler only handled very specific
uses of untagged pointers flowing between instructions. Since then,
this restriction has been removed for untagged pointers that do not
point to memory managed by the GC, like FFI pointers, so now they can
have a more precise representation.

By being precise about when untagged (untagged pointers to freshly
allocated Handles and the contents of Pointer data fields) and tagged
(TypedData objects constructed to hold the byte representation of
compound data) values are expected, we can remove the need to have
untagged pointers to GC-movable objects and/or having untagged
pointers escape as unboxed integers in the generated IL.

This CL also renames kUnboxedFfiIntPtr -> kUnboxedAddress and limits
its uses specifically to where the unboxed integer represents the
numeric representation of an untagged pointer.

This CL changes CCall to take Representations for the arguments and
return value instead of what looks like an arbitrary
NativeCallingConvention. However, the serializer and deserializers for
CCall, used in IL tests, originally assumed that the argument and return
representations were kUnboxedFfiIntPtr, so providing an arbitrary
NativeCallingConvention which didn't match that assumption would cause
failures during IL tests. That assumption came from the fact that
the only creator of CCall instructions was in kernel_to_il.cc, and there
that was the case.

Now CCall builds the native calling convention during construction
and deserialization from the argument and return representations and
stores both the representations and built native calling convention
internally. In the future, if we want to create CCall instructions with
more arbitrary native calling conventions, then we'll need to handle
serialization/deserialization of arbitrary native calling conventions,
and also add consistency checks that the provided representations
appropriately match the native calling convention.

TEST=ffi vm/dart/regress_306327173_il vm/dart/address_local_pointer_il

Issue: https://github.com/dart-lang/sdk/issues/54710
Cq-Include-Trybots: luci.dart.try:vm-aot-android-release-arm64c-try,vm-aot-android-release-arm_x64-try,vm-aot-linux-debug-x64-try,vm-aot-linux-debug-x64c-try,vm-aot-mac-release-arm64-try,vm-aot-mac-release-x64-try,vm-aot-obfuscate-linux-release-x64-try,vm-aot-optimization-level-linux-release-x64-try,vm-aot-win-debug-arm64-try,vm-appjit-linux-debug-x64-try,vm-asan-linux-release-x64-try,vm-checked-mac-release-arm64-try,vm-eager-optimization-linux-release-ia32-try,vm-eager-optimization-linux-release-x64-try,vm-ffi-android-debug-arm-try,vm-ffi-android-debug-arm64c-try,vm-ffi-qemu-linux-release-arm-try,vm-ffi-qemu-linux-release-riscv64-try,vm-fuchsia-release-x64-try,vm-linux-debug-ia32-try,vm-linux-debug-x64-try,vm-linux-debug-x64c-try,vm-mac-debug-arm64-try,vm-mac-debug-x64-try,vm-msan-linux-release-x64-try,vm-reload-linux-debug-x64-try,vm-reload-rollback-linux-debug-x64-try,vm-ubsan-linux-release-x64-try,vm-win-debug-arm64-try,vm-win-debug-x64-try,vm-win-release-ia32-try
Change-Id: I34effe8fbdc80288b703e0152d5ba67ce2343400
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/353101
Reviewed-by: Daco Harkes <dacoharkes@google.com>
Commit-Queue: Tess Strickland <sstrickl@google.com>
This commit is contained in:
Tess Strickland 2024-03-22 10:20:48 +00:00 committed by Commit Queue
parent a6a3434404
commit 1cebdcb2da
26 changed files with 594 additions and 646 deletions

View file

@ -26,8 +26,8 @@ void matchIL$deref(FlowGraph graph) {
'ptr' << match.Parameter(index: 0),
'array' << match.LoadField('ptr', slot: 'PointerBase.data'),
'unboxed' << match.LoadIndexed('array', 'c0'),
// 'unboxed' is a kUnboxedFfiIntPtr, which is uint32 on 32-bit archs
// and int64 on 64-bit arches.
// 'unboxed' is uint32 on 32-bit archs and int64 on 64-bit archs, so
// that 32-bit addresses are zero-extended to a 64-bit Dart integer.
if (is32BitConfiguration) ...[
// 'unboxed' needs to be converted to int64 before returning.
'address' << match.IntConverter('unboxed', from: 'uint32', to: 'int64'),

View file

@ -3436,8 +3436,7 @@ void FlowGraphCompiler::EmitMoveConst(const compiler::ffi::NativeLocation& dst,
if (src.IsPairLocation()) {
for (intptr_t i : {0, 1}) {
const Representation src_type_split =
compiler::ffi::NativeType::FromUnboxedRepresentation(zone_,
src_type)
compiler::ffi::NativeType::FromRepresentation(zone_, src_type)
.Split(zone_, i)
.AsRepresentation();
const auto& intermediate_native =

View file

@ -3434,7 +3434,7 @@ Definition* IntConverterInstr::Canonicalize(FlowGraph* flow_graph) {
auto src_defn = first_converter->value()->definition();
if (intermediate_rep == kUntagged) {
// Both conversions are no-ops, as the other representations must be
// either kUnboxedIntPtr or kUnboxedFfiIntPtr.
// kUnboxedIntPtr.
} else if (!Range::Fits(src_defn->range(), intermediate_rep)) {
return this;
}
@ -7283,14 +7283,20 @@ void BitCastInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
Representation FfiCallInstr::RequiredInputRepresentation(intptr_t idx) const {
if (idx < TargetAddressIndex()) {
// All input handles are passed as Tagged values on the stack to
// FfiCallInstr, which passes "handles", i.e. pointers, to these.
// All input handles are passed as tagged values to FfiCallInstr and
// are given stack locations. FfiCallInstr then passes an untagged pointer
// to the handle on the stack (Dart_Handle) to the C function.
if (marshaller_.IsHandle(marshaller_.ArgumentIndex(idx))) {
return kTagged;
}
return marshaller_.RepInFfiCall(idx);
} else if (idx == TargetAddressIndex()) {
return kUnboxedFfiIntPtr;
#if defined(DEBUG)
auto const rep =
InputAt(TargetAddressIndex())->definition()->representation();
ASSERT(rep == kUntagged || rep == kUnboxedAddress);
#endif
return kNoRepresentation; // Allows kUntagged or kUnboxedAddress.
} else {
ASSERT(idx == CompoundReturnTypedDataIndex());
return kTagged;
@ -7338,12 +7344,8 @@ LocationSummary* FfiCallInstr::MakeLocationSummaryInternal(
if (marshaller_.ReturnsCompound()) {
summary->set_in(CompoundReturnTypedDataIndex(), Location::Any());
// We don't care about return location, but we need to pass a register.
summary->set_out(
0, Location::RegisterLocation(CallingConventions::kReturnReg));
} else {
summary->set_out(0, marshaller_.LocInFfiCall(compiler::ffi::kResultIndex));
}
summary->set_out(0, marshaller_.LocInFfiCall(compiler::ffi::kResultIndex));
return summary;
}
@ -7393,10 +7395,7 @@ void FfiCallInstr::EmitParamMoves(FlowGraphCompiler* compiler,
for (intptr_t i = 0; i < num_defs; i++) {
__ Comment(" def_index %" Pd, def_index);
const Location origin = rebase.Rebase(locs()->in(def_index));
const Representation origin_rep =
RequiredInputRepresentation(def_index) == kTagged
? kUnboxedFfiIntPtr // When arg_target.IsPointerToMemory().
: RequiredInputRepresentation(def_index);
const Representation origin_rep = RequiredInputRepresentation(def_index);
// Find the native location where this individual definition should be
// moved to.
@ -7418,11 +7417,13 @@ void FfiCallInstr::EmitParamMoves(FlowGraphCompiler* compiler,
origin.AsPairLocation()->At(1).IsConstant())) {
// Note: half of the pair can be constant.
__ Comment("origin.IsPairLocation() and constant");
ASSERT(!marshaller_.IsHandle(arg_index));
compiler->EmitMoveConst(def_target, origin, origin_rep, &temp_alloc);
} else if (marshaller_.IsHandle(arg_index)) {
__ Comment("marshaller_.IsHandle(arg_index)");
// Handles are passed into FfiCalls as Tagged values on the stack, and
// then we pass pointers to these handles to the native function here.
ASSERT(origin_rep == kTagged);
ASSERT(compiler::target::LocalHandle::ptr_offset() == 0);
ASSERT(compiler::target::LocalHandle::InstanceSize() ==
compiler::target::kWordSize);
@ -7472,7 +7473,7 @@ void FfiCallInstr::EmitParamMoves(FlowGraphCompiler* compiler,
const auto& pointer_loc =
arg_target.AsPointerToMemory().pointer_location();
// TypedData/Pointer data pointed to in temp.
// TypedData data pointed to in temp.
const auto& dst = compiler::ffi::NativeRegistersLocation(
compiler->zone(), pointer_loc.payload_type(),
pointer_loc.container_type(), temp0);
@ -7520,10 +7521,8 @@ void FfiCallInstr::EmitReturnMoves(FlowGraphCompiler* compiler,
const Location dst_loc = locs()->out(0);
const Representation dst_type = representation();
compiler->EmitMoveFromNative(dst_loc, dst_type, src, &no_temp);
} else if (returnLocation.IsPointerToMemory() ||
returnLocation.IsMultiple()) {
} else if (marshaller_.ReturnsCompound()) {
ASSERT(returnLocation.payload_type().IsCompound());
ASSERT(marshaller_.ReturnsCompound());
// Get the typed data pointer which we have pinned to a stack slot.
const Location typed_data_loc = locs()->in(CompoundReturnTypedDataIndex());
@ -7551,11 +7550,8 @@ void FfiCallInstr::EmitReturnMoves(FlowGraphCompiler* compiler,
// TypedData in IL.
const intptr_t sp_offset =
marshaller_.PassByPointerStackOffset(compiler::ffi::kResultIndex);
for (intptr_t i = 0; i < marshaller_.CompoundReturnSizeInBytes();
i += compiler::target::kWordSize) {
__ LoadMemoryValue(temp1, SPREG, i + sp_offset);
__ StoreMemoryValue(temp1, temp0, i);
}
__ UnrolledMemCopy(temp0, 0, SPREG, sp_offset,
marshaller_.CompoundReturnSizeInBytes(), temp1);
} else {
ASSERT(returnLocation.IsMultiple());
// Copy to the struct from the native locations.
@ -7900,6 +7896,11 @@ Representation FfiCallInstr::representation() const {
// Don't care, we're discarding the value.
return kTagged;
}
if (marshaller_.IsHandle(compiler::ffi::kResultIndex)) {
// The call returns a Dart_Handle, from which we need to extract the
// tagged pointer using RawLoadField.
return kUntagged;
}
return marshaller_.RepInFfiCall(compiler::ffi::kResultIndex);
}
@ -7952,26 +7953,33 @@ LocationSummary* CCallInstr::MakeLocationSummaryInternal(
}
CCallInstr::CCallInstr(
Representation return_representation,
const ZoneGrowableArray<Representation>& argument_representations,
const compiler::ffi::NativeCallingConvention& native_calling_convention,
InputsArray&& inputs)
: VariadicDefinition(std::move(inputs), DeoptId::kNone),
return_representation_(return_representation),
argument_representations_(argument_representations),
native_calling_convention_(native_calling_convention) {
#ifdef DEBUG
const intptr_t num_inputs =
native_calling_convention.argument_locations().length() + 1;
ASSERT(num_inputs == InputCount());
#if defined(DEBUG)
const intptr_t num_inputs = argument_representations.length() + 1;
ASSERT_EQUAL(InputCount(), num_inputs);
#endif
}
Representation CCallInstr::RequiredInputRepresentation(intptr_t idx) const {
if (idx < native_calling_convention_.argument_locations().length()) {
const auto& argument_type =
native_calling_convention_.argument_locations().At(idx)->payload_type();
ASSERT(argument_type.IsExpressibleAsRepresentation());
return argument_type.AsRepresentation();
}
ASSERT(idx == TargetAddressIndex());
return kUnboxedFfiIntPtr;
CCallInstr* CCallInstr::Make(
Zone* zone,
Representation return_representation,
const ZoneGrowableArray<Representation>& argument_representations,
InputsArray&& inputs) {
const auto& native_function_type =
*compiler::ffi::NativeFunctionType::FromRepresentations(
zone, return_representation, argument_representations);
const auto& native_calling_convention =
compiler::ffi::NativeCallingConvention::FromSignature(
zone, native_function_type);
return new (zone) CCallInstr(return_representation, argument_representations,
native_calling_convention, std::move(inputs));
}
void CCallInstr::EmitParamMoves(FlowGraphCompiler* compiler,
@ -8015,13 +8023,6 @@ void CCallInstr::EmitParamMoves(FlowGraphCompiler* compiler,
__ Comment("EmitParamMovesEnd");
}
Representation CCallInstr::representation() const {
const auto& return_type =
native_calling_convention_.return_location().payload_type();
ASSERT(return_type.IsExpressibleAsRepresentation());
return return_type.AsRepresentation();
}
// SIMD
SimdOpInstr::Kind SimdOpInstr::KindForOperator(MethodRecognizer::Kind kind) {

View file

@ -5604,6 +5604,7 @@ class CachableIdempotentCallInstr : public TemplateDartCall<0> {
// (Right now the inputs are eagerly pushed and therefore have to be also
// poped on the fast path.)
CachableIdempotentCallInstr(const InstructionSource& source,
Representation representation,
const Function& function,
intptr_t type_args_len,
const Array& argument_names,
@ -5614,9 +5615,13 @@ class CachableIdempotentCallInstr : public TemplateDartCall<0> {
argument_names,
std::move(arguments),
source),
representation_(representation),
function_(function),
identity_(AliasIdentity::Unknown()) {
DEBUG_ASSERT(function.IsNotTemporaryScopedHandle());
// We use kUntagged for the internal use in FfiNativeLookupAddress
// and kUnboxedAddress for pragma-annotated functions.
ASSERT(representation == kUntagged || representation == kUnboxedAddress);
ASSERT(AbstractType::Handle(function.result_type()).IsIntType());
ASSERT(!function.IsNull());
#if defined(TARGET_ARCH_IA32)
@ -5656,11 +5661,7 @@ class CachableIdempotentCallInstr : public TemplateDartCall<0> {
virtual Representation RequiredInputRepresentation(intptr_t idx) const;
virtual Representation representation() const {
// If other representations are supported in the future, the location
// summary needs to be updated as well to stay consistent with static calls.
return kUnboxedFfiIntPtr;
}
virtual Representation representation() const { return representation_; }
virtual AliasIdentity Identity() const { return identity_; }
virtual void SetIdentity(AliasIdentity identity) { identity_ = identity; }
@ -5668,6 +5669,7 @@ class CachableIdempotentCallInstr : public TemplateDartCall<0> {
PRINT_OPERANDS_TO_SUPPORT
#define FIELD_LIST(F) \
F(const Representation, representation_) \
F(const Function&, function_) \
F(AliasIdentity, identity_)
@ -6039,8 +6041,10 @@ class FfiCallInstr : public VariadicDefinition {
// Has the target address in a register passed as the last input in IL.
class CCallInstr : public VariadicDefinition {
public:
CCallInstr(
const compiler::ffi::NativeCallingConvention& native_calling_convention,
static CCallInstr* Make(
Zone* zone,
Representation return_representation,
const ZoneGrowableArray<Representation>& argument_representations,
InputsArray&& inputs);
DECLARE_INSTRUCTION(CCall)
@ -6050,7 +6054,7 @@ class CCallInstr : public VariadicDefinition {
// Input index of the function pointer to invoke.
intptr_t TargetAddressIndex() const {
return native_calling_convention_.argument_locations().length();
return argument_representations_.length();
}
virtual bool MayThrow() const { return false; }
@ -6061,8 +6065,23 @@ class CCallInstr : public VariadicDefinition {
virtual bool CanCallDart() const { return false; }
virtual Representation RequiredInputRepresentation(intptr_t idx) const;
virtual Representation representation() const;
virtual Representation RequiredInputRepresentation(intptr_t idx) const {
if (idx < argument_representations_.length()) {
return argument_representations_.At(idx);
}
ASSERT_EQUAL(idx, TargetAddressIndex());
return kUntagged;
}
virtual Representation representation() const {
return return_representation_;
}
virtual CompileType ComputeType() const {
return RepresentationUtils::IsUnboxed(representation())
? CompileType::FromUnboxedRepresentation(representation())
: CompileType::Object();
}
void EmitParamMoves(FlowGraphCompiler* compiler,
Register saved_fp,
@ -6070,15 +6089,20 @@ class CCallInstr : public VariadicDefinition {
PRINT_OPERANDS_TO_SUPPORT
#define FIELD_LIST(F) \
F(const compiler::ffi::NativeCallingConvention&, native_calling_convention_)
DECLARE_INSTRUCTION_SERIALIZABLE_FIELDS(CCallInstr,
VariadicDefinition,
FIELD_LIST)
#undef FIELD_LIST
DECLARE_CUSTOM_SERIALIZATION(CCallInstr)
private:
CCallInstr(
Representation return_representation,
const ZoneGrowableArray<Representation>& argument_representations,
const compiler::ffi::NativeCallingConvention& native_calling_convention,
InputsArray&& inputs);
// Serialized in the custom serializer.
const Representation return_representation_;
const ZoneGrowableArray<Representation>& argument_representations_;
// Not serialized.
const compiler::ffi::NativeCallingConvention& native_calling_convention_;
DISALLOW_COPY_AND_ASSIGN(CCallInstr);
};
@ -6649,16 +6673,7 @@ class LoadIndexedInstr : public TemplateDefinition<2, NoThrow> {
// The array may be tagged or untagged (for external arrays).
if (idx == kArrayPos) return kNoRepresentation;
ASSERT_EQUAL(idx, kIndexPos);
if (index_unboxed_) {
#if defined(TARGET_ARCH_IS_64_BIT)
return kUnboxedInt64;
#else
return kUnboxedUint32;
#endif
} else {
return kTagged; // Index is a smi.
}
return index_unboxed_ ? kUnboxedIntPtr : kTagged;
}
bool IsUntagged() const {
@ -10782,10 +10797,9 @@ class IntConverterInstr : public TemplateDefinition<1, NoThrow, Pure> {
from == kUnboxedInt32 || from == kUntagged);
ASSERT(to == kUnboxedInt64 || to == kUnboxedUint32 || to == kUnboxedInt32 ||
to == kUntagged);
ASSERT(from != kUntagged ||
(to == kUnboxedIntPtr || to == kUnboxedFfiIntPtr));
ASSERT(to != kUntagged ||
(from == kUnboxedIntPtr || from == kUnboxedFfiIntPtr));
ASSERT(from != kUntagged || to == kUnboxedIntPtr || to == kUnboxedAddress);
ASSERT(to != kUntagged || from == kUnboxedIntPtr ||
from == kUnboxedAddress);
SetInputAt(0, value);
}

View file

@ -46,6 +46,7 @@ LocationSummary* Instruction::MakeCallSummary(Zone* zone,
const auto representation = instr->representation();
switch (representation) {
case kTagged:
case kUntagged:
case kUnboxedUint32:
case kUnboxedInt32:
result->set_out(

View file

@ -44,6 +44,7 @@ LocationSummary* Instruction::MakeCallSummary(Zone* zone,
const auto representation = instr->representation();
switch (representation) {
case kTagged:
case kUntagged:
case kUnboxedInt64:
result->set_out(
0, Location::RegisterLocation(CallingConventions::kReturnReg));

View file

@ -1409,18 +1409,11 @@ void FfiCallInstr::PrintOperandsTo(BaseTextBuffer* f) const {
}
void CCallInstr::PrintOperandsTo(BaseTextBuffer* f) const {
f->AddString(" target_address=");
f->AddString("target_address=");
InputAt(TargetAddressIndex())->PrintTo(f);
const auto& argument_locations =
native_calling_convention_.argument_locations();
for (intptr_t i = 0; i < argument_locations.length(); i++) {
const auto& arg_location = *argument_locations.At(i);
for (intptr_t i = 0, n = argument_representations_.length(); i < n; ++i) {
f->AddString(", ");
InputAt(i)->PrintTo(f);
f->AddString(" (@");
arg_location.PrintTo(f);
f->AddString(")");
}
}

View file

@ -44,6 +44,7 @@ LocationSummary* Instruction::MakeCallSummary(Zone* zone,
const auto representation = instr->representation();
switch (representation) {
case kTagged:
case kUntagged:
case kUnboxedUint32:
case kUnboxedInt32:
result->set_out(

View file

@ -4,6 +4,7 @@
#include "vm/compiler/backend/il_serializer.h"
#include "vm/class_id.h"
#include "vm/closure_functions_cache.h"
#if defined(DART_PRECOMPILER)
#include "vm/compiler/aot/precompiler.h"
@ -1376,37 +1377,6 @@ void MoveOperands::Write(FlowGraphSerializer* s) const {
MoveOperands::MoveOperands(FlowGraphDeserializer* d)
: dest_(Location::Read(d)), src_(Location::Read(d)) {}
template <>
void FlowGraphSerializer::
WriteTrait<const compiler::ffi::NativeCallingConvention&>::Write(
FlowGraphSerializer* s,
const compiler::ffi::NativeCallingConvention& x) {
// A subset of NativeCallingConvention currently used by CCallInstr.
const auto& args = x.argument_locations();
for (intptr_t i = 0, n = args.length(); i < n; ++i) {
if (args.At(i)->payload_type().AsRepresentation() != kUnboxedFfiIntPtr) {
UNIMPLEMENTED();
}
}
if (x.return_location().payload_type().AsRepresentation() !=
kUnboxedFfiIntPtr) {
UNIMPLEMENTED();
}
s->Write<intptr_t>(args.length());
}
template <>
const compiler::ffi::NativeCallingConvention& FlowGraphDeserializer::ReadTrait<
const compiler::ffi::NativeCallingConvention&>::Read(FlowGraphDeserializer*
d) {
const intptr_t num_args = d->Read<intptr_t>();
const auto& native_function_type =
*compiler::ffi::NativeFunctionType::FromUnboxedRepresentation(
d->zone(), num_args, kUnboxedFfiIntPtr);
return compiler::ffi::NativeCallingConvention::FromSignature(
d->zone(), native_function_type);
}
template <>
void FlowGraphSerializer::WriteTrait<const Object&>::Write(
FlowGraphSerializer* s,
@ -2194,6 +2164,25 @@ PhiInstr::PhiInstr(FlowGraphDeserializer* d)
is_alive_(d->Read<bool>()),
is_receiver_(d->Read<int8_t>()) {}
void CCallInstr::WriteTo(FlowGraphSerializer* s) {
VariadicDefinition::WriteTo(s);
s->Write<Representation>(return_representation_);
s->Write<const ZoneGrowableArray<Representation>&>(argument_representations_);
}
CCallInstr::CCallInstr(FlowGraphDeserializer* d)
: VariadicDefinition(d),
return_representation_(d->Read<Representation>()),
argument_representations_(
d->Read<const ZoneGrowableArray<Representation>&>()),
native_calling_convention_(
compiler::ffi::NativeCallingConvention::FromSignature(
d->zone(),
*compiler::ffi::NativeFunctionType::FromRepresentations(
d->zone(),
return_representation_,
argument_representations_))) {}
template <>
void FlowGraphSerializer::WriteTrait<Range*>::Write(FlowGraphSerializer* s,
Range* x) {

View file

@ -102,7 +102,6 @@ class NativeCallingConvention;
V(LocationSummary*) \
V(MoveOperands*) \
V(const MoveSchedule*) \
V(const compiler::ffi::NativeCallingConvention&) \
V(const Object&) \
V(ParallelMoveInstr*) \
V(PhiInstr*) \

View file

@ -918,12 +918,12 @@ ISOLATE_UNIT_TEST_CASE(IRTest_LoadThread) {
auto load_thread_value = Value(load_thread_instr);
auto* const convert_instr = new (zone) IntConverterInstr(
kUntagged, kUnboxedFfiIntPtr, &load_thread_value, DeoptId::kNone);
kUntagged, kUnboxedAddress, &load_thread_value, DeoptId::kNone);
flow_graph->InsertBefore(return_instr, convert_instr, nullptr,
FlowGraph::kValue);
auto convert_value = Value(convert_instr);
auto* const box_instr = BoxInstr::Create(kUnboxedFfiIntPtr, &convert_value);
auto* const box_instr = BoxInstr::Create(kUnboxedAddress, &convert_value);
flow_graph->InsertBefore(return_instr, box_instr, nullptr, FlowGraph::kValue);
return_instr->InputAt(0)->definition()->ReplaceUsesWith(box_instr);
@ -1016,8 +1016,9 @@ ISOLATE_UNIT_TEST_CASE(IRTest_CachableIdempotentCall) {
InputsArray args;
CachableIdempotentCallInstr* call = new CachableIdempotentCallInstr(
InstructionSource(), increment_function, static_call->type_args_len(),
Array::empty_array(), std::move(args), DeoptId::kNone);
InstructionSource(), kUnboxedAddress, increment_function,
static_call->type_args_len(), Array::empty_array(), std::move(args),
DeoptId::kNone);
static_call->ReplaceWith(call, nullptr);
pipeline.RunForcedOptimizedAfterSSAPasses();
@ -1056,25 +1057,13 @@ ISOLATE_UNIT_TEST_CASE(IRTest_CachableIdempotentCall) {
// Helper to set up an inlined FfiCall by replacing a StaticCall.
FlowGraph* SetupFfiFlowgraph(TestPipeline* pipeline,
Zone* zone,
const compiler::ffi::CallMarshaller& marshaller,
uword native_entry,
bool is_leaf) {
FlowGraph* flow_graph = pipeline->RunPasses({CompilerPass::kComputeSSA});
// Make an FfiCall based on ffi_trampoline that calls our native function.
auto ffi_call = new FfiCallInstr(DeoptId::kNone, marshaller, is_leaf);
RELEASE_ASSERT(ffi_call->InputCount() == 1);
// TargetAddress is the function pointer called.
const Representation address_repr =
compiler::target::kWordSize == 4 ? kUnboxedUint32 : kUnboxedInt64;
ffi_call->SetInputAt(
ffi_call->TargetAddressIndex(),
new Value(flow_graph->GetConstant(
Integer::Handle(Integer::NewCanonical(native_entry)), address_repr)));
// Replace the placeholder StaticCall with an FfiCall to our native function.
{
// Locate the placeholder call.
StaticCallInstr* static_call = nullptr;
{
ILMatcher cursor(flow_graph, flow_graph->graph_entry()->normal_entry(),
@ -1083,8 +1072,28 @@ FlowGraph* SetupFfiFlowgraph(TestPipeline* pipeline,
}
RELEASE_ASSERT(static_call != nullptr);
// Store the native entry as an unboxed constant and convert it to an
// untagged pointer for the FfiCall.
Zone* const Z = flow_graph->zone();
auto* const load_entry_point = new (Z) IntConverterInstr(
kUnboxedIntPtr, kUntagged,
new (Z) Value(flow_graph->GetConstant(
Integer::Handle(Z, Integer::NewCanonical(native_entry)),
kUnboxedIntPtr)),
DeoptId::kNone);
flow_graph->InsertBefore(static_call, load_entry_point, /*env=*/nullptr,
FlowGraph::kValue);
// Make an FfiCall based on ffi_trampoline that calls our native function.
auto* const ffi_call =
new (Z) FfiCallInstr(DeoptId::kNone, marshaller, is_leaf);
RELEASE_ASSERT(ffi_call->InputCount() == 1);
ffi_call->SetInputAt(ffi_call->TargetAddressIndex(),
new (Z) Value(load_entry_point));
flow_graph->InsertBefore(static_call, ffi_call, /*env=*/nullptr,
FlowGraph::kEffect);
// Remove the placeholder call.
static_call->RemoveFromGraph(/*return_previous=*/false);
}
@ -1212,8 +1221,8 @@ ISOLATE_UNIT_TEST_CASE(IRTest_FfiCallInstrLeafDoesntSpill) {
[&](bool is_leaf, std::function<void(ParallelMoveInstr*)> verify) {
// Build the SSA graph for "doFfiCall"
TestPipeline pipeline(do_ffi_call, CompilerPass::kJIT);
FlowGraph* flow_graph = SetupFfiFlowgraph(
&pipeline, thread->zone(), marshaller, native_entry, is_leaf);
FlowGraph* flow_graph =
SetupFfiFlowgraph(&pipeline, marshaller, native_entry, is_leaf);
{
ParallelMoveInstr* parallel_move = nullptr;

View file

@ -48,6 +48,7 @@ LocationSummary* Instruction::MakeCallSummary(Zone* zone,
const auto representation = instr->representation();
switch (representation) {
case kTagged:
case kUntagged:
case kUnboxedInt64:
result->set_out(
0, Location::RegisterLocation(CallingConventions::kReturnReg));

View file

@ -165,24 +165,22 @@ static constexpr Representation kUnboxedWord =
compiler::target::kWordSize == 4 ? kUnboxedInt32 : kUnboxedInt64;
// The representation for unsigned word-sized unboxed fields.
//
// Note: kUnboxedUword is identical to kUnboxedWord until range analysis can
// handle unsigned 64-bit ranges. This means that range analysis will give
// Note: 64-bit kUnboxedUword is identical to kUnboxedWord until range analysis
// can handle unsigned 64-bit ranges. This means that range analysis will give
// signed results for unboxed uword field values.
static constexpr Representation kUnboxedUword = kUnboxedWord;
// 'UnboxedFfiIntPtr' should be able to hold a pointer of the target word-size.
// On a 32-bit platform, it's an unsigned 32-bit int because it should be
// zero-extended to 64-bits, not sign-extended (pointers are inherently
// unsigned).
//
// Issue(36370): Use [kUnboxedIntPtr] instead.
static constexpr Representation kUnboxedFfiIntPtr =
static constexpr Representation kUnboxedUword =
compiler::target::kWordSize == 4 ? kUnboxedUint32 : kUnboxedInt64;
// The representation which can be used for native pointers. We use signed 32/64
// bit representation to be able to do arithmetic on pointers.
static constexpr Representation kUnboxedIntPtr = kUnboxedWord;
// The representation used for pointers being exposed to users as Dart integers,
// or stored in a way that could be eventually exposed to users. In particular,
// this ensures that a 32-bit address, when extended to a 64-bit Dart integer,
// is zero-extended, not sign extended.
static constexpr Representation kUnboxedAddress = kUnboxedUword;
// Location objects are used to connect register allocator and code generator.
// Instruction templates used by code generator have a corresponding
// LocationSummary object which specifies expected location for every input

View file

@ -309,12 +309,28 @@ static Representation SelectRepresentationInIL(Zone* zone,
return location.payload_type().AsRepresentationOverApprox(zone);
}
Representation BaseMarshaller::RepInDart(intptr_t arg_index) const {
// This should never be called on Pointers or Handles, which are specially
// handled during marshalling/unmarshalling.
ASSERT(!IsHandle(arg_index));
ASSERT(!IsPointer(arg_index));
return Location(arg_index).payload_type().AsRepresentationOverApprox(zone_);
}
// Implemented partially in BaseMarshaller because most Representations are
// the same in Calls and Callbacks.
Representation BaseMarshaller::RepInFfiCall(intptr_t def_index_global) const {
intptr_t arg_index = ArgumentIndex(def_index_global);
const auto& location = Location(arg_index);
// Handled appropriately in the subclasses.
ASSERT(!IsHandle(arg_index));
// The IL extracts the address stored in the Pointer object as an untagged
// pointer before passing it to C, and creates a new Pointer object to store
// the received untagged pointer when receiving a pointer from C.
if (IsPointer(arg_index)) return kUntagged;
const auto& location = Location(arg_index);
if (location.container_type().IsPrimitive()) {
return SelectRepresentationInIL(zone_, location);
}
@ -333,26 +349,36 @@ Representation BaseMarshaller::RepInFfiCall(intptr_t def_index_global) const {
return SelectRepresentationInIL(zone_, def_loc);
}
ASSERT(location.IsPointerToMemory());
UNREACHABLE(); // Implemented in subclasses.
}
Representation CallMarshaller::RepInFfiCall(intptr_t def_index_global) const {
intptr_t arg_index = ArgumentIndex(def_index_global);
if (IsHandle(arg_index)) {
// For FfiCall arguments, the FfiCall instruction takes a tagged pointer
// from the IL. (It then creates a handle on the stack and passes a
// pointer to the newly allocated handle to C.)
//
// For FfiCall returns, FfiCall returns the untagged pointer to the handle
// to the IL, which then extracts the ptr field of the handle to retrieve
// the tagged pointer.
return ArgumentIndexIsReturn(arg_index) ? kUntagged : kTagged;
}
if (ArgumentIndexIsReturn(arg_index) && ReturnsCompound()) {
// The IL creates a TypedData object which is stored on the stack, and the
// FfiCall copies the compound value, however it is returned into that
// TypedData object. In order to make the return value of the definition
// defined, the same TypedData object is returned from the FfiCall.
return kTagged;
}
const auto& location = Location(arg_index);
if (location.IsPointerToMemory()) {
if (ArgumentIndexIsReturn(arg_index)) {
// The IL type is the unboxed pointer.
const auto& pointer_location = location.AsPointerToMemory();
const auto& rep = pointer_location.pointer_location().payload_type();
ASSERT(rep.Equals(
pointer_location.pointer_return_location().payload_type()));
return rep.AsRepresentation();
} else {
// We're passing Pointer/TypedData object, the GC might move TypedData so
// we can't load the address from it eagerly.
return kTagged;
}
// For arguments, the compound data being passed as a pointer is first
// collected into a TypedData object by the IL, and that object is what is
// passed to the FfiCall instruction. (The machine code generated by
// FfiCall handles copying the data into non-GC-moveable memory and
// passing a pointer to that memory to the C code.)
return kTagged;
}
return BaseMarshaller::RepInFfiCall(def_index_global);
}
@ -360,19 +386,27 @@ Representation CallMarshaller::RepInFfiCall(intptr_t def_index_global) const {
Representation CallbackMarshaller::RepInFfiCall(
intptr_t def_index_global) const {
intptr_t arg_index = ArgumentIndex(def_index_global);
if (IsHandle(arg_index)) {
// Dart objects are passed to C as untagged pointers to newly created
// handles in the IL, and the ptr field of untagged pointers to handles are
// extracted when the IL receives handles from C code.
return kUntagged;
}
const auto& location = Location(arg_index);
if (location.IsPointerToMemory()) {
// The IL type is the unboxed pointer, and FFI callback return. In the
// latter we've already copied the data into the result location in IL.
const auto& pointer_location = location.AsPointerToMemory();
const auto& rep = pointer_location.pointer_location().payload_type();
ASSERT(
rep.Equals(pointer_location.pointer_return_location().payload_type()));
return rep.AsRepresentation();
// The IL gets an untagged pointer to memory both for arguments and for
// returns. If this is an argument, then the IL creates a Dart
// representation of the compound object from the pointed at memory.
// For returns, the IL copies the data from the compound object into
// the memory being pointed at before returning to C.
return kUntagged;
}
if (ArgumentIndexIsReturn(arg_index) && location.IsMultiple()) {
// We're passing a TypedData.
return Representation::kTagged;
// To return a compound object broken up over multiple native locations,
// the IL loads the compound object into a single TypedData object and
// passes that TypedData object to NativeReturn, which handles extracting
// the data to the appropriate native locations.
return kTagged;
}
return BaseMarshaller::RepInFfiCall(def_index_global);
}
@ -431,19 +465,13 @@ Location CallMarshaller::LocInFfiCall(intptr_t def_index_global) const {
const NativeLocation& loc = this->Location(arg_index);
if (ArgumentIndexIsReturn(arg_index)) {
const intptr_t def = kResultIndex - def_index_global;
if (loc.IsMultiple()) {
ASSERT(loc.AsMultiple().locations()[def]->IsExpressibleAsLocation());
return loc.AsMultiple().locations()[def]->AsLocation();
if (loc.IsRegisters() || loc.IsFpuRegisters()) {
return loc.AsLocation();
}
if (loc.IsPointerToMemory()) {
// No location at all, because we store into TypedData passed to the
// FfiCall instruction. But we have to supply a location.
return Location::RegisterLocation(CallingConventions::kReturnReg);
}
return loc.AsLocation();
ASSERT(ReturnsCompound());
// No location at all, because we store into TypedData passed to the
// FfiCall instruction. But we have to supply a location.
return Location::RegisterLocation(CallingConventions::kReturnReg);
}
// Force all handles to be Stack locations.

View file

@ -80,9 +80,7 @@ class BaseMarshaller : public ZoneAllocated {
//
// Implemented in BaseMarshaller because most Representations are the same
// in Calls and Callbacks.
Representation RepInDart(intptr_t arg_index) const {
return Location(arg_index).payload_type().AsRepresentationOverApprox(zone_);
}
Representation RepInDart(intptr_t arg_index) const;
// Representation on how the value is passed to or received from the FfiCall
// instruction or StaticCall, NativeParameter, and NativeReturn instructions.

View file

@ -69,13 +69,6 @@ static const NativeType& ConvertIfSoftFp(Zone* zone,
return type;
}
// The native dual of `kUnboxedFfiIntPtr`.
//
// It has the same signedness as `kUnboxedFfiIntPtr` to avoid sign conversions
// when converting between both.
const PrimitiveType kFfiIntPtr =
compiler::target::kWordSize == 8 ? kInt64 : kUint32;
static PrimitiveType TypeForSize(intptr_t size) {
switch (size) {
case 8:
@ -338,7 +331,7 @@ class ArgumentAllocator : public ValueObject {
} else if (size > 0) {
// Pointer in register if available, else pointer on stack.
const auto& pointer_type = *new (zone_) NativePrimitiveType(kFfiIntPtr);
const auto& pointer_type = *new (zone_) NativePrimitiveType(kAddress);
const auto& pointer_location = AllocateArgument(pointer_type);
return *new (zone_)
PointerToMemoryLocation(pointer_location, compound_type);
@ -548,7 +541,7 @@ class ArgumentAllocator : public ValueObject {
}
// Otherwise, passed by reference.
const auto& pointer_type = *new (zone_) NativePrimitiveType(kFfiIntPtr);
const auto& pointer_type = *new (zone_) NativePrimitiveType(kAddress);
const auto& pointer_location = AllocateArgument(pointer_type);
return *new (zone_)
PointerToMemoryLocation(pointer_location, compound_type);
@ -738,7 +731,7 @@ static NativeLocations& ArgumentLocations(
static const NativeLocation& PointerToMemoryResultLocation(
Zone* zone,
const NativeCompoundType& payload_type) {
const auto& pointer_type = *new (zone) NativePrimitiveType(kFfiIntPtr);
const auto& pointer_type = *new (zone) NativePrimitiveType(kAddress);
const auto& pointer_location = *new (zone) NativeRegistersLocation(
zone, pointer_type, pointer_type,
CallingConventions::kPointerToReturnStructRegisterCall);
@ -755,7 +748,7 @@ static const NativeLocation& PointerToMemoryResultLocation(
static const NativeLocation& PointerToMemoryResultLocation(
Zone* zone,
const NativeCompoundType& payload_type) {
const auto& pointer_type = *new (zone) NativePrimitiveType(kFfiIntPtr);
const auto& pointer_type = *new (zone) NativePrimitiveType(kAddress);
const auto& pointer_location = *new (zone) NativeStackLocation(
pointer_type, pointer_type, CallingConventions::kStackPointerRegister, 0);
const auto& pointer_return_location = *new (zone) NativeRegistersLocation(

View file

@ -34,8 +34,7 @@ NativeLocation& NativeLocation::FromLocation(Zone* zone,
Representation rep) {
ASSERT(LocationCanBeExpressed(loc, rep));
const NativeType& native_rep =
NativeType::FromUnboxedRepresentation(zone, rep);
const NativeType& native_rep = NativeType::FromRepresentation(zone, rep);
switch (loc.kind()) {
case Location::Kind::kRegister:
@ -65,10 +64,9 @@ NativeLocation& NativeLocation::FromPairLocation(Zone* zone,
intptr_t index) {
ASSERT(pair_loc.IsPairLocation());
ASSERT(index == 0 || index == 1);
const Representation rep =
NativeType::FromUnboxedRepresentation(zone, pair_rep)
.Split(zone, index)
.AsRepresentation();
const Representation rep = NativeType::FromRepresentation(zone, pair_rep)
.Split(zone, index)
.AsRepresentation();
const Location loc = pair_loc.AsPairLocation()->At(index);
return FromLocation(zone, loc, rep);
}

View file

@ -280,10 +280,6 @@ NativeUnionType& NativeUnionType::FromNativeTypes(Zone* zone,
#if !defined(DART_PRECOMPILED_RUNTIME) && !defined(FFI_UNIT_TESTS)
bool NativePrimitiveType::IsExpressibleAsRepresentation() const {
switch (representation_) {
case kInt8:
case kUint8:
case kInt16:
case kUint16:
case kInt24:
case kUint24:
case kInt40:
@ -294,13 +290,16 @@ bool NativePrimitiveType::IsExpressibleAsRepresentation() const {
case kUint56:
case kHalfDouble:
return false;
case kInt8:
case kUint8:
case kInt16:
case kUint16:
case kInt32:
case kUint32:
case kInt64:
case kUint64: // We don't actually have a kUnboxedUint64.
case kFloat:
case kDouble:
return true;
case kVoid:
return true;
default:
@ -311,6 +310,14 @@ bool NativePrimitiveType::IsExpressibleAsRepresentation() const {
Representation NativePrimitiveType::AsRepresentation() const {
ASSERT(IsExpressibleAsRepresentation());
switch (representation_) {
case kInt8:
return kUnboxedInt8;
case kUint8:
return kUnboxedUint8;
case kInt16:
return kUnboxedInt16;
case kUint16:
return kUnboxedUint16;
case kInt32:
return kUnboxedInt32;
case kUint32:
@ -323,7 +330,7 @@ Representation NativePrimitiveType::AsRepresentation() const {
case kDouble:
return kUnboxedDouble;
case kVoid:
return kUnboxedFfiIntPtr;
return kUnboxedIntPtr;
default:
UNREACHABLE_THIS();
}
@ -413,13 +420,10 @@ static PrimitiveType TypeRepresentation(classid_t class_id) {
case kFfiDoubleCid:
return kDouble;
case kPointerCid:
return compiler::target::kWordSize == 4 ? kUint32 : kInt64;
case kFfiHandleCid:
return kAddress;
case kFfiVoidCid:
return kVoid;
case kFfiHandleCid:
// We never expose this pointer as a Dart int, so no need to make it
// unsigned on 32 bit architectures.
return compiler::target::kWordSize == 4 ? kInt32 : kInt64;
default:
UNREACHABLE();
}
@ -613,32 +617,34 @@ static PrimitiveType fundamental_rep(Representation rep) {
return kUint32;
case kUnboxedInt64:
return kInt64;
case kUntagged:
case kTagged:
return TypeRepresentation(kPointerCid);
default:
break;
}
UNREACHABLE();
FATAL("Unhandled representation %u", rep);
}
NativePrimitiveType& NativeType::FromUnboxedRepresentation(Zone* zone,
Representation rep) {
NativePrimitiveType& NativeType::FromRepresentation(Zone* zone,
Representation rep) {
return *new (zone) NativePrimitiveType(fundamental_rep(rep));
}
const NativeFunctionType* NativeFunctionType::FromUnboxedRepresentation(
const NativeFunctionType* NativeFunctionType::FromRepresentations(
Zone* zone,
intptr_t num_arguments,
Representation representation) {
const auto& intptr_type =
compiler::ffi::NativePrimitiveType::FromUnboxedRepresentation(
zone, representation);
auto& argument_representations =
Representation return_representation,
const ZoneGrowableArray<Representation>& argument_representations) {
const auto& return_type =
NativePrimitiveType::FromRepresentation(zone, return_representation);
auto& argument_types =
*new (zone) ZoneGrowableArray<const compiler::ffi::NativeType*>(
zone, num_arguments);
for (intptr_t i = 0; i < num_arguments; i++) {
argument_representations.Add(&intptr_type);
zone, argument_representations.length());
for (intptr_t i = 0; i < argument_representations.length(); i++) {
argument_types.Add(&NativePrimitiveType::FromRepresentation(
zone, argument_representations.At(i)));
}
return new (zone)
compiler::ffi::NativeFunctionType(argument_representations, intptr_type);
return new (zone) NativeFunctionType(argument_types, return_type);
}
#endif // !defined(DART_PRECOMPILED_RUNTIME) && !defined(FFI_UNIT_TESTS)

View file

@ -73,8 +73,8 @@ class NativeType : public ZoneAllocated {
static const NativeType& FromTypedDataClassId(Zone* zone, classid_t class_id);
#if !defined(DART_PRECOMPILED_RUNTIME) && !defined(FFI_UNIT_TESTS)
static NativePrimitiveType& FromUnboxedRepresentation(Zone* zone,
Representation rep);
static NativePrimitiveType& FromRepresentation(Zone* zone,
Representation rep);
#endif // !defined(DART_PRECOMPILED_RUNTIME) && !defined(FFI_UNIT_TESTS)
virtual bool IsPrimitive() const { return false; }
@ -200,6 +200,16 @@ enum PrimitiveType {
// TODO(37470): Add packed data structures.
};
// Used for the type representation of kUntagged (Pointer, where the untagged
// pointer in the data field is extracted by IL) and kTagged (Handle, turned
// into untagged pointers to the stack during the FfiCall) values. Should
// be kept consistent with the Representation kUnboxedAddress.
#if defined(TARGET_ARCH_IS_32_BIT)
constexpr PrimitiveType kAddress = kUint32;
#else
constexpr PrimitiveType kAddress = kInt64;
#endif
PrimitiveType PrimitiveTypeFromSizeInBytes(intptr_t size);
// Represents a primitive native type.
@ -455,10 +465,10 @@ class NativeFunctionType : public ZoneAllocated {
variadic_arguments_index_(variadic_arguments_index) {}
#if !defined(DART_PRECOMPILED_RUNTIME) && !defined(FFI_UNIT_TESTS)
static const NativeFunctionType* FromUnboxedRepresentation(
static const NativeFunctionType* FromRepresentations(
Zone* zone,
intptr_t num_arguments,
Representation representation);
Representation return_representation,
const ZoneGrowableArray<Representation>& argument_representations);
#endif
const NativeTypes& argument_types() const { return argument_types_; }

View file

@ -404,25 +404,19 @@ Fragment BaseFlowGraphBuilder::LoadUntagged(intptr_t offset) {
return Fragment(load);
}
Fragment BaseFlowGraphBuilder::ConvertUntaggedToUnboxed(
Representation to_representation) {
ASSERT(to_representation == kUnboxedIntPtr ||
to_representation == kUnboxedFfiIntPtr);
Fragment BaseFlowGraphBuilder::ConvertUntaggedToUnboxed() {
Value* value = Pop();
auto converted = new (Z)
IntConverterInstr(kUntagged, to_representation, value, DeoptId::kNone);
IntConverterInstr(kUntagged, kUnboxedAddress, value, DeoptId::kNone);
converted->mark_truncating();
Push(converted);
return Fragment(converted);
}
Fragment BaseFlowGraphBuilder::ConvertUnboxedToUntagged(
Representation from_representation) {
ASSERT(from_representation == kUnboxedIntPtr ||
from_representation == kUnboxedFfiIntPtr);
Fragment BaseFlowGraphBuilder::ConvertUnboxedToUntagged() {
Value* value = Pop();
auto converted = new (Z)
IntConverterInstr(from_representation, kUntagged, value, DeoptId::kNone);
IntConverterInstr(kUnboxedAddress, kUntagged, value, DeoptId::kNone);
converted->mark_truncating();
Push(converted);
return Fragment(converted);

View file

@ -190,8 +190,8 @@ class BaseFlowGraphBuilder {
AlignmentType alignment = kAlignedAccess);
Fragment LoadUntagged(intptr_t offset);
Fragment ConvertUntaggedToUnboxed(Representation to);
Fragment ConvertUnboxedToUntagged(Representation from);
Fragment ConvertUntaggedToUnboxed();
Fragment ConvertUnboxedToUntagged();
Fragment FloatToDouble();
Fragment DoubleToFloat();

View file

@ -6132,7 +6132,6 @@ Fragment StreamingFlowGraphBuilder::BuildFfiCall() {
// This can only be Pointer, so the data field points to unmanaged memory.
code += LoadNativeField(Slot::PointerBase_data(),
InnerPointerAccess::kCannotBeInnerPointer);
code += B->ConvertUntaggedToUnboxed(kUnboxedFfiIntPtr);
// Skip (empty) named arguments list.
const intptr_t named_args_len = ReadListLength();
@ -6214,9 +6213,8 @@ Fragment StreamingFlowGraphBuilder::BuildCachableIdempotentCall(
code += BuildArgumentsCachableIdempotentCall(&argument_count);
code += flow_graph_builder_->CachableIdempotentCall(
position, target, argument_count, argument_names,
position, kUnboxedAddress, target, argument_count, argument_names,
/*type_args_len=*/0);
code += flow_graph_builder_->Box(kUnboxedFfiIntPtr);
return code;
}
@ -6328,7 +6326,6 @@ Fragment StreamingFlowGraphBuilder::BuildFfiNativeAddressOf() {
// unoptimized mode because then there is no reordering and we're consuming
// the value directly.
code += flow_graph_builder_->FfiNativeLookupAddress(native_annotation);
code += flow_graph_builder_->ConvertUnboxedToUntagged(kUnboxedFfiIntPtr);
code += flow_graph_builder_->StoreNativeField(
Slot::PointerBase_data(), InnerPointerAccess::kCannotBeInnerPointer,
StoreFieldInstr::Kind::kInitializing);

View file

@ -419,37 +419,25 @@ Fragment FlowGraphBuilder::FfiCall(
return body;
}
Fragment FlowGraphBuilder::CCall(
const compiler::ffi::NativeCallingConvention& native_calling_convention) {
Fragment FlowGraphBuilder::CallRuntimeEntry(
const RuntimeEntry& entry,
Representation return_representation,
const ZoneGrowableArray<Representation>& argument_representations) {
Fragment body;
const intptr_t num_arguments =
native_calling_convention.argument_locations().length() + 1;
InputsArray arguments(num_arguments);
arguments.FillWith(nullptr, 0, num_arguments);
for (intptr_t i = num_arguments - 1; i >= 0; --i) {
arguments[i] = Pop();
}
auto* const call =
new (Z) CCallInstr(native_calling_convention, std::move(arguments));
body += LoadThread();
body += LoadUntagged(compiler::target::Thread::OffsetFromThread(&entry));
const intptr_t num_arguments = argument_representations.length() + 1;
InputsArray arguments = GetArguments(num_arguments);
auto* const call = CCallInstr::Make(
Z, return_representation, argument_representations, std::move(arguments));
Push(call);
body <<= call;
return body;
}
Fragment FlowGraphBuilder::CCall(intptr_t num_arguments,
Representation representation) {
const auto& native_function_type =
*compiler::ffi::NativeFunctionType::FromUnboxedRepresentation(
Z, num_arguments, representation);
const auto& native_calling_convention =
compiler::ffi::NativeCallingConvention::FromSignature(
Z, native_function_type);
return CCall(native_calling_convention);
}
Fragment FlowGraphBuilder::RethrowException(TokenPosition position,
int catch_try_index) {
Fragment instructions;
@ -664,6 +652,7 @@ Fragment FlowGraphBuilder::StaticCall(TokenPosition position,
}
Fragment FlowGraphBuilder::CachableIdempotentCall(TokenPosition position,
Representation representation,
const Function& target,
intptr_t argument_count,
const Array& argument_names,
@ -671,8 +660,8 @@ Fragment FlowGraphBuilder::CachableIdempotentCall(TokenPosition position,
const intptr_t total_count = argument_count + (type_args_count > 0 ? 1 : 0);
InputsArray arguments = GetArguments(total_count);
CachableIdempotentCallInstr* call = new (Z) CachableIdempotentCallInstr(
InstructionSource(position), target, type_args_count, argument_names,
std::move(arguments), GetNextDeoptId());
InstructionSource(position), representation, target, type_args_count,
argument_names, std::move(arguments), GetNextDeoptId());
Push(call);
return Fragment(call);
}
@ -1452,19 +1441,13 @@ FlowGraph* FlowGraphBuilder::BuildGraphOfRecognizedMethod(
parsed_function_->RawParameterVariable(0);
LocalVariable* arg_offset = parsed_function_->RawParameterVariable(1);
body += LoadLocal(arg_offset);
body += CheckNullOptimized(String::ZoneHandle(Z, function.name()));
LocalVariable* arg_offset_not_null = MakeTemporary();
body += LoadLocal(arg_typed_data_base);
body += CheckNullOptimized(String::ZoneHandle(Z, function.name()));
body += LoadLocal(arg_offset_not_null);
body += UnboxTruncate(kUnboxedFfiIntPtr);
body += LoadLocal(arg_offset);
body += CheckNullOptimized(String::ZoneHandle(Z, function.name()));
body += UnboxTruncate(kUnboxedIntPtr);
body += LoadIndexed(typed_data_cid, /*index_scale=*/1,
/*index_unboxed=*/true, alignment);
// Avoid any unnecessary (and potentially deoptimizing) int
// conversions by using the representation returned from LoadIndexed.
body += Box(LoadIndexedInstr::ReturnRepresentation(typed_data_cid));
if (kind == MethodRecognizer::kFfiLoadPointer) {
const auto& pointer_class =
Class::ZoneHandle(Z, IG->object_store()->ffi_pointer_class());
@ -1479,14 +1462,18 @@ FlowGraph* FlowGraphBuilder::BuildGraphOfRecognizedMethod(
LocalVariable* pointer = MakeTemporary();
body += LoadLocal(pointer);
body += LoadLocal(address);
body += UnboxTruncate(kUnboxedFfiIntPtr);
body += ConvertUnboxedToUntagged(kUnboxedFfiIntPtr);
ASSERT_EQUAL(LoadIndexedInstr::ReturnRepresentation(typed_data_cid),
kUnboxedAddress);
body += ConvertUnboxedToUntagged();
body += StoreNativeField(Slot::PointerBase_data(),
InnerPointerAccess::kCannotBeInnerPointer,
StoreFieldInstr::Kind::kInitializing);
body += DropTempsPreserveTop(1); // Drop [address] keep [pointer].
} else {
// Avoid any unnecessary (and potentially deoptimizing) int
// conversions by using the representation returned from LoadIndexed.
body += Box(LoadIndexedInstr::ReturnRepresentation(typed_data_cid));
}
body += DropTempsPreserveTop(1); // Drop [arg_offset].
} break;
case MethodRecognizer::kFfiStoreInt8:
case MethodRecognizer::kFfiStoreInt16:
@ -1515,23 +1502,21 @@ FlowGraph* FlowGraphBuilder::BuildGraphOfRecognizedMethod(
LocalVariable* arg_value = parsed_function_->RawParameterVariable(2);
ASSERT_EQUAL(function.NumParameters(), 3);
body += LoadLocal(arg_offset);
body += CheckNullOptimized(String::ZoneHandle(Z, function.name()));
LocalVariable* arg_offset_not_null = MakeTemporary();
body += LoadLocal(arg_value);
body += CheckNullOptimized(String::ZoneHandle(Z, function.name()));
LocalVariable* arg_value_not_null = MakeTemporary();
body += LoadLocal(arg_typed_data_base); // Pointer.
body += CheckNullOptimized(String::ZoneHandle(Z, function.name()));
body += LoadLocal(arg_offset_not_null);
body += UnboxTruncate(kUnboxedFfiIntPtr);
body += LoadLocal(arg_value_not_null);
body += LoadLocal(arg_offset);
body += CheckNullOptimized(String::ZoneHandle(Z, function.name()));
body += UnboxTruncate(kUnboxedIntPtr);
body += LoadLocal(arg_value);
body += CheckNullOptimized(String::ZoneHandle(Z, function.name()));
if (kind == MethodRecognizer::kFfiStorePointer) {
// This can only be Pointer, so it is safe to load the data field.
body += LoadNativeField(Slot::PointerBase_data(),
InnerPointerAccess::kCannotBeInnerPointer);
body += ConvertUntaggedToUnboxed(kUnboxedFfiIntPtr);
body += ConvertUntaggedToUnboxed();
ASSERT_EQUAL(StoreIndexedInstr::ValueRepresentation(typed_data_cid),
kUnboxedAddress);
} else {
// Avoid any unnecessary (and potentially deoptimizing) int
// conversions by using the representation consumed by StoreIndexed.
@ -1540,8 +1525,6 @@ FlowGraph* FlowGraphBuilder::BuildGraphOfRecognizedMethod(
}
body += StoreIndexedTypedData(typed_data_cid, /*index_scale=*/1,
/*index_unboxed=*/true, alignment);
body += Drop(); // Drop [arg_value].
body += Drop(); // Drop [arg_offset].
body += NullConstant();
} break;
case MethodRecognizer::kFfiFromAddress: {
@ -1557,8 +1540,11 @@ FlowGraph* FlowGraphBuilder::BuildGraphOfRecognizedMethod(
body += LoadLocal(MakeTemporary()); // Duplicate Pointer.
body += LoadLocal(parsed_function_->RawParameterVariable(0)); // Address.
body += CheckNullOptimized(String::ZoneHandle(Z, function.name()));
body += UnboxTruncate(kUnboxedFfiIntPtr);
body += ConvertUnboxedToUntagged(kUnboxedFfiIntPtr);
// Use the same representation as FfiGetAddress so that the conversions
// in Pointer.fromAddress(address).address cancel out if the temporary
// Pointer allocation is removed.
body += UnboxTruncate(kUnboxedAddress);
body += ConvertUnboxedToUntagged();
body += StoreNativeField(Slot::PointerBase_data(),
InnerPointerAccess::kCannotBeInnerPointer,
StoreFieldInstr::Kind::kInitializing);
@ -1570,8 +1556,8 @@ FlowGraph* FlowGraphBuilder::BuildGraphOfRecognizedMethod(
// This can only be Pointer, so it is safe to load the data field.
body += LoadNativeField(Slot::PointerBase_data(),
InnerPointerAccess::kCannotBeInnerPointer);
body += ConvertUntaggedToUnboxed(kUnboxedFfiIntPtr);
body += Box(kUnboxedFfiIntPtr);
body += ConvertUntaggedToUnboxed();
body += Box(kUnboxedAddress);
} break;
case MethodRecognizer::kHas63BitSmis: {
#if defined(HAS_SMI_63_BITS)
@ -1891,12 +1877,11 @@ Fragment FlowGraphBuilder::BuildTypedDataViewFactoryConstructor(
body += LoadLocal(typed_data);
body += LoadNativeField(Slot::PointerBase_data(),
InnerPointerAccess::kMayBeInnerPointer);
body += ConvertUntaggedToUnboxed(kUnboxedFfiIntPtr);
body += ConvertUntaggedToUnboxed();
body += LoadLocal(offset_in_bytes);
body += UnboxTruncate(kUnboxedFfiIntPtr);
body +=
BinaryIntegerOp(Token::kADD, kUnboxedFfiIntPtr, /*is_truncating=*/true);
body += ConvertUnboxedToUntagged(kUnboxedFfiIntPtr);
body += UnboxTruncate(kUnboxedAddress);
body += BinaryIntegerOp(Token::kADD, kUnboxedAddress, /*is_truncating=*/true);
body += ConvertUnboxedToUntagged();
body += StoreNativeField(Slot::PointerBase_data(),
InnerPointerAccess::kMayBeInnerPointer,
StoreFieldInstr::Kind::kInitializing);
@ -2066,36 +2051,44 @@ Fragment FlowGraphBuilder::BuildTypedDataMemMove(const Function& function,
Fragment call_memmove(is_too_large);
const intptr_t element_size = Instance::ElementSizeFor(cid);
auto* const arg_reps =
new (zone_) ZoneGrowableArray<Representation>(zone_, 3);
// dest: void*
call_memmove += LoadLocal(arg_to);
call_memmove += LoadNativeField(Slot::PointerBase_data(),
InnerPointerAccess::kMayBeInnerPointer);
call_memmove += ConvertUntaggedToUnboxed(kUnboxedFfiIntPtr);
call_memmove += ConvertUntaggedToUnboxed();
call_memmove += LoadLocal(arg_to_start);
call_memmove += IntConstant(element_size);
call_memmove += SmiBinaryOp(Token::kMUL, /*is_truncating=*/true);
call_memmove += UnboxTruncate(kUnboxedFfiIntPtr);
call_memmove += UnboxTruncate(kUnboxedAddress);
call_memmove +=
BinaryIntegerOp(Token::kADD, kUnboxedFfiIntPtr, /*is_truncating=*/true);
BinaryIntegerOp(Token::kADD, kUnboxedAddress, /*is_truncating=*/true);
call_memmove += ConvertUnboxedToUntagged();
arg_reps->Add(kUntagged);
// src: const void*
call_memmove += LoadLocal(arg_from);
call_memmove += LoadNativeField(Slot::PointerBase_data(),
InnerPointerAccess::kMayBeInnerPointer);
call_memmove += ConvertUntaggedToUnboxed(kUnboxedFfiIntPtr);
call_memmove += ConvertUntaggedToUnboxed();
call_memmove += LoadLocal(arg_from_start);
call_memmove += IntConstant(element_size);
call_memmove += SmiBinaryOp(Token::kMUL, /*is_truncating=*/true);
call_memmove += UnboxTruncate(kUnboxedFfiIntPtr);
call_memmove += UnboxTruncate(kUnboxedAddress);
call_memmove +=
BinaryIntegerOp(Token::kADD, kUnboxedFfiIntPtr, /*is_truncating=*/true);
BinaryIntegerOp(Token::kADD, kUnboxedAddress, /*is_truncating=*/true);
call_memmove += ConvertUnboxedToUntagged();
arg_reps->Add(kUntagged);
// n: size_t
call_memmove += LoadLocal(arg_count);
call_memmove += IntConstant(element_size);
call_memmove += SmiBinaryOp(Token::kMUL, /*is_truncating=*/true);
call_memmove += UnboxTruncate(kUnboxedFfiIntPtr);
call_memmove += LoadThread();
call_memmove += LoadUntagged(
compiler::target::Thread::OffsetFromThread(&kMemoryMoveRuntimeEntry));
call_memmove += UnboxTruncate(kUnboxedUword);
arg_reps->Add(kUnboxedUword);
// memmove(dest, src, n)
call_memmove +=
ConvertUntaggedToUnboxed(kUnboxedFfiIntPtr); // function address.
call_memmove += CCall(3);
CallRuntimeEntry(kMemoryMoveRuntimeEntry, kUntagged, *arg_reps);
// The returned address is unused.
call_memmove += Drop();
call_memmove += Goto(done);
@ -4466,61 +4459,6 @@ Fragment FlowGraphBuilder::LoadIndexedTypedDataUnboxed(
return fragment;
}
Fragment FlowGraphBuilder::EnterHandleScope() {
Fragment body;
body += LoadThread();
body += ConvertUntaggedToUnboxed(kUnboxedIntPtr); // argument.
// LoadThread again, we can't store it in a temp because it will end up
// in the environment of the FfiCall as untagged then.
body += LoadThread();
body += LoadUntagged(compiler::target::Thread::OffsetFromThread(
&kEnterHandleScopeRuntimeEntry));
body += ConvertUntaggedToUnboxed(kUnboxedFfiIntPtr); // function address.
body += CCall(/*num_arguments=*/1);
return body;
}
Fragment FlowGraphBuilder::GetTopHandleScope() {
Fragment body;
body += LoadThread();
body += LoadUntagged(compiler::target::Thread::api_top_scope_offset());
body += ConvertUntaggedToUnboxed(kUnboxedIntPtr);
return body;
}
Fragment FlowGraphBuilder::ExitHandleScope() {
Fragment code;
code += LoadThread();
code += ConvertUntaggedToUnboxed(kUnboxedIntPtr); // argument.
code += LoadThread();
code += LoadUntagged(compiler::target::Thread::OffsetFromThread(
&kExitHandleScopeRuntimeEntry));
code += ConvertUntaggedToUnboxed(kUnboxedFfiIntPtr); // function address.
code += CCall(/*num_arguments=*/1);
code += Drop();
return code;
}
Fragment FlowGraphBuilder::AllocateHandle() {
Fragment code;
// Get a reference to the top handle scope.
code += GetTopHandleScope();
code += LoadThread();
code += LoadUntagged(
compiler::target::Thread::OffsetFromThread(&kAllocateHandleRuntimeEntry));
code += ConvertUntaggedToUnboxed(kUnboxedFfiIntPtr); // function address.
code += CCall(/*num_arguments=*/1, kUnboxedIntPtr);
return code;
}
Fragment FlowGraphBuilder::RawLoadField(int32_t offset) {
Fragment code;
@ -4538,27 +4476,6 @@ Fragment FlowGraphBuilder::RawStoreField(int32_t offset) {
return code;
}
Fragment FlowGraphBuilder::WrapHandle() {
Fragment code;
LocalVariable* object = MakeTemporary();
code += AllocateHandle();
code += LoadLocal(MakeTemporary()); // Duplicate handle pointer.
code += ConvertUnboxedToUntagged(kUnboxedIntPtr);
code += LoadLocal(object);
code += RawStoreField(compiler::target::LocalHandle::ptr_offset());
code += DropTempsPreserveTop(1); // Drop object below handle.
return code;
}
Fragment FlowGraphBuilder::UnwrapHandle() {
Fragment code;
code += ConvertUnboxedToUntagged(kUnboxedIntPtr);
code += RawLoadField(compiler::target::LocalHandle::ptr_offset());
return code;
}
Fragment FlowGraphBuilder::UnhandledException() {
const auto class_table = thread_->isolate_group()->class_table();
ASSERT(class_table->HasValidClassAt(kUnhandledExceptionCid));
@ -4714,43 +4631,6 @@ Fragment FlowGraphBuilder::NativeReturn(
return Fragment(instr).closed();
}
Fragment FlowGraphBuilder::FfiPointerFromAddress() {
LocalVariable* address = MakeTemporary();
LocalVariable* result = parsed_function_->expression_temp_var();
Class& result_class =
Class::ZoneHandle(Z, IG->object_store()->ffi_pointer_class());
// This class might only be instantiated as a return type of ffi calls.
result_class.EnsureIsFinalized(thread_);
TypeArguments& args =
TypeArguments::ZoneHandle(Z, IG->object_store()->type_argument_never());
// A kernel transform for FFI in the front-end ensures that type parameters
// do not appear in the type arguments to a any Pointer classes in an FFI
// signature.
ASSERT(args.IsNull() || args.IsInstantiated());
args = args.Canonicalize(thread_);
Fragment code;
code += Constant(args);
code += AllocateObject(TokenPosition::kNoSource, result_class, 1);
LocalVariable* pointer = MakeTemporary();
code += LoadLocal(pointer);
code += LoadLocal(address);
code += UnboxTruncate(kUnboxedFfiIntPtr);
code += ConvertUnboxedToUntagged(kUnboxedFfiIntPtr);
code += StoreNativeField(Slot::PointerBase_data(),
InnerPointerAccess::kCannotBeInnerPointer,
StoreFieldInstr::Kind::kInitializing);
code += StoreLocal(TokenPosition::kNoSource, result);
code += Drop(); // StoreLocal^
code += Drop(); // address
code += LoadLocal(result);
return code;
}
Fragment FlowGraphBuilder::BitCast(Representation from, Representation to) {
BitCastInstr* instr = new (Z) BitCastInstr(from, to, Pop());
Push(instr);
@ -4865,8 +4745,8 @@ static classid_t typed_data_cid(intptr_t chunk_size) {
UNREACHABLE();
}
// Only for use within CopyFromTypedDataBaseToUnboxedAddress and
// CopyFromUnboxedAddressToTypedDataBase, where we know the "array" being
// Only for use within FfiCallbackConvertCompoundArgumentToDart and
// FfiCallbackConvertCompoundReturnToNative, where we know the "array" being
// passed is an untagged pointer coming from C.
static classid_t external_typed_data_cid(intptr_t chunk_size) {
switch (chunk_size) {
@ -4882,80 +4762,6 @@ static classid_t external_typed_data_cid(intptr_t chunk_size) {
UNREACHABLE();
}
Fragment FlowGraphBuilder::CopyFromTypedDataBaseToUnboxedAddress(
intptr_t length_in_bytes) {
Fragment body;
Value* unboxed_address_value = Pop();
LocalVariable* typed_data_base = MakeTemporary("typed_data_base");
Push(unboxed_address_value->definition());
LocalVariable* unboxed_address = MakeTemporary("unboxed_address");
intptr_t offset_in_bytes = 0;
while (offset_in_bytes < length_in_bytes) {
const intptr_t bytes_left = length_in_bytes - offset_in_bytes;
const intptr_t chunk_sizee = chunk_size(bytes_left);
body += LoadLocal(typed_data_base);
body += IntConstant(offset_in_bytes);
body += LoadIndexed(typed_data_cid(chunk_sizee), /*index_scale=*/1,
/*index_unboxed=*/false);
LocalVariable* chunk_value = MakeTemporary("chunk_value");
body += LoadLocal(unboxed_address);
body += ConvertUnboxedToUntagged(kUnboxedFfiIntPtr);
body += IntConstant(offset_in_bytes);
body += LoadLocal(chunk_value);
body += StoreIndexedTypedData(external_typed_data_cid(chunk_sizee),
/*index_scale=*/1,
/*index_unboxed=*/false);
body += DropTemporary(&chunk_value);
offset_in_bytes += chunk_sizee;
}
ASSERT(offset_in_bytes == length_in_bytes);
body += DropTemporary(&unboxed_address);
body += DropTemporary(&typed_data_base);
return body;
}
Fragment FlowGraphBuilder::CopyFromUnboxedAddressToTypedDataBase(
intptr_t length_in_bytes) {
Fragment body;
Value* typed_data_base_value = Pop();
LocalVariable* unboxed_address = MakeTemporary("unboxed_address");
Push(typed_data_base_value->definition());
LocalVariable* typed_data_base = MakeTemporary("typed_data_base");
intptr_t offset_in_bytes = 0;
while (offset_in_bytes < length_in_bytes) {
const intptr_t bytes_left = length_in_bytes - offset_in_bytes;
const intptr_t chunk_sizee = chunk_size(bytes_left);
body += LoadLocal(unboxed_address);
body += ConvertUnboxedToUntagged(kUnboxedFfiIntPtr);
body += IntConstant(offset_in_bytes);
body += LoadIndexed(external_typed_data_cid(chunk_sizee), /*index_scale=*/1,
/*index_unboxed=*/false);
LocalVariable* chunk_value = MakeTemporary("chunk_value");
body += LoadLocal(typed_data_base);
body += IntConstant(offset_in_bytes);
body += LoadLocal(chunk_value);
body += StoreIndexedTypedData(typed_data_cid(chunk_sizee),
/*index_scale=*/1,
/*index_unboxed=*/false);
body += DropTemporary(&chunk_value);
offset_in_bytes += chunk_sizee;
}
ASSERT(offset_in_bytes == length_in_bytes);
body += DropTemporary(&typed_data_base);
body += DropTemporary(&unboxed_address);
return body;
}
Fragment FlowGraphBuilder::LoadTail(LocalVariable* variable,
intptr_t size,
intptr_t offset_in_bytes,
@ -4983,7 +4789,7 @@ Fragment FlowGraphBuilder::LoadTail(LocalVariable* variable,
if (shift != 0) {
body += IntConstant(shift);
// 64-bit doesn't support kUnboxedInt32 ops.
Representation op_representation = kUnboxedFfiIntPtr;
Representation op_representation = kUnboxedIntPtr;
body += BinaryIntegerOp(Token::kSHL, op_representation,
/*is_truncating*/ true);
body += BinaryIntegerOp(Token::kBIT_OR, op_representation,
@ -5129,10 +4935,30 @@ Fragment FlowGraphBuilder::FfiCallbackConvertCompoundArgumentToDart(
body +=
AllocateTypedData(TokenPosition::kNoSource, kTypedDataUint8ArrayCid);
LocalVariable* typed_data_base = MakeTemporary("typed_data_base");
body += LoadLocal(address_of_compound);
body += LoadLocal(typed_data_base);
body += CopyFromUnboxedAddressToTypedDataBase(length_in_bytes);
body += DropTempsPreserveTop(1); // address_of_compound.
intptr_t offset_in_bytes = 0;
while (offset_in_bytes < length_in_bytes) {
const intptr_t bytes_left = length_in_bytes - offset_in_bytes;
const intptr_t chunk_sizee = chunk_size(bytes_left);
body += LoadLocal(address_of_compound);
body += IntConstant(offset_in_bytes);
body +=
LoadIndexed(external_typed_data_cid(chunk_sizee), /*index_scale=*/1,
/*index_unboxed=*/false);
LocalVariable* chunk_value = MakeTemporary("chunk_value");
body += LoadLocal(typed_data_base);
body += IntConstant(offset_in_bytes);
body += LoadLocal(chunk_value);
body += StoreIndexedTypedData(typed_data_cid(chunk_sizee),
/*index_scale=*/1,
/*index_unboxed=*/false);
body += DropTemporary(&chunk_value);
offset_in_bytes += chunk_sizee;
}
ASSERT(offset_in_bytes == length_in_bytes);
body += DropTempsPreserveTop(1); // Drop address_of_compound.
}
// Wrap typed data in compound class.
const auto& compound_type =
@ -5163,12 +4989,31 @@ Fragment FlowGraphBuilder::FfiCallbackConvertCompoundReturnToNative(
new (Z) NativeParameterInstr(marshaller, compiler::ffi::kResultIndex);
Push(pointer_to_return); // Address where return value should be stored.
body <<= pointer_to_return;
body += UnboxTruncate(kUnboxedFfiIntPtr);
LocalVariable* unboxed_address = MakeTemporary("unboxed_address");
body += LoadLocal(typed_data_base);
body += LoadLocal(unboxed_address);
body += CopyFromTypedDataBaseToUnboxedAddress(length_in_bytes);
intptr_t offset_in_bytes = 0;
while (offset_in_bytes < length_in_bytes) {
const intptr_t bytes_left = length_in_bytes - offset_in_bytes;
const intptr_t chunk_sizee = chunk_size(bytes_left);
body += LoadLocal(typed_data_base);
body += IntConstant(offset_in_bytes);
body += LoadIndexed(typed_data_cid(chunk_sizee), /*index_scale=*/1,
/*index_unboxed=*/false);
LocalVariable* chunk_value = MakeTemporary("chunk_value");
body += LoadLocal(unboxed_address);
body += IntConstant(offset_in_bytes);
body += LoadLocal(chunk_value);
body += StoreIndexedTypedData(external_typed_data_cid(chunk_sizee),
/*index_scale=*/1,
/*index_unboxed=*/false);
body += DropTemporary(&chunk_value);
offset_in_bytes += chunk_sizee;
}
ASSERT(offset_in_bytes == length_in_bytes);
body += DropTempsPreserveTop(1); // Keep address, drop typed_data_base.
}
return body;
@ -5181,11 +5026,39 @@ Fragment FlowGraphBuilder::FfiConvertPrimitiveToDart(
Fragment body;
if (marshaller.IsPointer(arg_index)) {
body += Box(kUnboxedFfiIntPtr);
body += FfiPointerFromAddress();
Class& result_class =
Class::ZoneHandle(Z, IG->object_store()->ffi_pointer_class());
// This class might only be instantiated as a return type of ffi calls.
result_class.EnsureIsFinalized(thread_);
TypeArguments& args =
TypeArguments::ZoneHandle(Z, IG->object_store()->type_argument_never());
// A kernel transform for FFI in the front-end ensures that type parameters
// do not appear in the type arguments to a any Pointer classes in an FFI
// signature.
ASSERT(args.IsNull() || args.IsInstantiated());
args = args.Canonicalize(thread_);
LocalVariable* address = MakeTemporary("address");
LocalVariable* result = parsed_function_->expression_temp_var();
body += Constant(args);
body += AllocateObject(TokenPosition::kNoSource, result_class, 1);
body += StoreLocal(TokenPosition::kNoSource, result);
body += LoadLocal(address);
body += StoreNativeField(Slot::PointerBase_data(),
InnerPointerAccess::kCannotBeInnerPointer,
StoreFieldInstr::Kind::kInitializing);
body += DropTemporary(&address); // address
body += LoadLocal(result);
} else if (marshaller.IsHandle(arg_index)) {
body += UnwrapHandle();
// The top of the stack is a Dart_Handle, so retrieve the tagged pointer
// out of it.
body += RawLoadField(compiler::target::LocalHandle::ptr_offset());
} else if (marshaller.IsVoid(arg_index)) {
// Ignore whatever value was being returned and return null.
ASSERT_EQUAL(arg_index, compiler::ffi::kResultIndex);
body += Drop();
body += NullConstant();
} else {
@ -5214,9 +5087,37 @@ Fragment FlowGraphBuilder::FfiConvertPrimitiveToNative(
// This can only be Pointer, so it is safe to load the data field.
body += LoadNativeField(Slot::PointerBase_data(),
InnerPointerAccess::kCannotBeInnerPointer);
body += ConvertUntaggedToUnboxed(kUnboxedFfiIntPtr);
} else if (marshaller.IsHandle(arg_index)) {
body += WrapHandle();
// FfiCallInstr specifies all handle locations as Stack, and will pass a
// pointer to the stack slot as the native handle argument. Therefore the
// only handles that need wrapping are function results.
ASSERT_EQUAL(arg_index, compiler::ffi::kResultIndex);
LocalVariable* object = MakeTemporary("object");
auto* const arg_reps =
new (zone_) ZoneGrowableArray<Representation>(zone_, 1);
// Get a reference to the top handle scope.
body += LoadThread();
body += LoadUntagged(compiler::target::Thread::api_top_scope_offset());
arg_reps->Add(kUntagged);
// Allocate a new handle in the top handle scope.
body += CallRuntimeEntry(kAllocateHandleRuntimeEntry, kUntagged, *arg_reps);
LocalVariable* handle = MakeTemporary("handle");
// Store the object address into the handle.
body += LoadLocal(handle);
body += LoadLocal(object);
body += RawStoreField(compiler::target::LocalHandle::ptr_offset());
body += DropTempsPreserveTop(1); // Drop object.
} else if (marshaller.IsVoid(arg_index)) {
ASSERT_EQUAL(arg_index, compiler::ffi::kResultIndex);
// Ignore whatever value was being returned and return nullptr.
body += Drop();
body += UnboxedIntConstant(0, kUnboxedIntPtr);
} else {
if (marshaller.IsBool(arg_index)) {
body += BoolToInt();
@ -5278,17 +5179,17 @@ Fragment FlowGraphBuilder::FfiNativeLookupAddress(
}
const auto& ffi_resolver =
Function::ZoneHandle(Z, IG->object_store()->ffi_resolver_function());
#if !defined(TARGET_ARCH_IA32)
// Access to the pool, use cacheable static call.
Fragment body;
body += Constant(asset_id);
body += Constant(symbol);
body += Constant(Smi::ZoneHandle(Smi::New(arg_n)));
body += CachableIdempotentCall(TokenPosition::kNoSource, ffi_resolver,
/*argument_count=*/3,
/*argument_names=*/Array::null_array(),
/*type_args_count=*/0);
body +=
CachableIdempotentCall(TokenPosition::kNoSource, kUntagged, ffi_resolver,
/*argument_count=*/3,
/*argument_names=*/Array::null_array(),
/*type_args_count=*/0);
return body;
#else // !defined(TARGET_ARCH_IA32)
// IA32 only has JIT and no pool. This function will only be compiled if
@ -5303,7 +5204,8 @@ Fragment FlowGraphBuilder::FfiNativeLookupAddress(
#endif
if (error == nullptr) {
Fragment body;
body += UnboxedIntConstant(function_address, kUnboxedFfiIntPtr);
body += UnboxedIntConstant(function_address, kUnboxedAddress);
body += ConvertUnboxedToUntagged();
return body;
} else {
free(error);
@ -5317,30 +5219,25 @@ Fragment FlowGraphBuilder::FfiNativeLookupAddress(
// Non-cacheable call, this is IA32.
body += StaticCall(TokenPosition::kNoSource, ffi_resolver,
/*argument_count=*/3, ICData::kStatic);
body += UnboxTruncate(kUnboxedFfiIntPtr);
body += UnboxTruncate(kUnboxedAddress);
body += ConvertUnboxedToUntagged();
return body;
}
#endif // !defined(TARGET_ARCH_IA32)
}
Fragment FlowGraphBuilder::FfiNativeLookupAddress(const Function& function) {
ASSERT(function.is_ffi_native());
ASSERT(!IsRecognizedMethodForFlowGraph(function));
ASSERT(optimizing_);
auto const& native_instance =
Instance::Handle(function.GetNativeAnnotation());
return FfiNativeLookupAddress(native_instance);
}
Fragment FlowGraphBuilder::FfiNativeFunctionBody(const Function& function) {
ASSERT(function.is_ffi_native());
ASSERT(!IsRecognizedMethodForFlowGraph(function));
ASSERT(optimizing_);
const auto& c_signature =
FunctionType::ZoneHandle(Z, function.FfiCSignature());
auto const& native_instance =
Instance::Handle(function.GetNativeAnnotation());
Fragment body;
body += FfiNativeLookupAddress(function);
body += FfiNativeLookupAddress(native_instance);
body += FfiCallFunctionBody(function, c_signature,
/*first_argument_parameter_offset=*/0);
return body;
@ -5403,7 +5300,15 @@ Fragment FlowGraphBuilder::FfiCallFunctionBody(
// need it.
// We no longer need the scope for passing in Handle arguments, but the
// native function might for instance be relying on this scope for Dart API.
body += EnterHandleScope();
auto* const arg_reps =
new (zone_) ZoneGrowableArray<Representation>(zone_, 1);
body += LoadThread(); // argument.
arg_reps->Add(kUntagged);
body +=
CallRuntimeEntry(kEnterHandleScopeRuntimeEntry, kUntagged, *arg_reps);
}
// Allocate typed data before FfiCall and pass it in to ffi call if needed.
@ -5460,11 +5365,25 @@ Fragment FlowGraphBuilder::FfiCallFunctionBody(
body += FfiConvertPrimitiveToDart(marshaller, compiler::ffi::kResultIndex);
}
auto exit_handle_scope = [&]() -> Fragment {
Fragment code;
auto* const arg_reps =
new (zone_) ZoneGrowableArray<Representation>(zone_, 1);
code += LoadThread(); // argument.
arg_reps->Add(kUntagged);
code +=
CallRuntimeEntry(kExitHandleScopeRuntimeEntry, kUntagged, *arg_reps);
code += Drop();
return code;
};
if (signature_contains_handles) {
// TODO(dartbug.com/48989): Remove scope for calls where we don't actually
// need it.
body += DropTempsPreserveTop(1); // Drop api_local_scope.
body += ExitHandleScope();
body += exit_handle_scope();
}
body += DropTempsPreserveTop(1); // Drop address.
@ -5472,9 +5391,6 @@ Fragment FlowGraphBuilder::FfiCallFunctionBody(
if (signature_contains_handles) {
--try_depth_;
}
if (signature_contains_handles) {
++catch_depth_;
Fragment catch_body =
CatchBlockEntry(Array::empty_array(), try_handler_index,
@ -5484,7 +5400,7 @@ Fragment FlowGraphBuilder::FfiCallFunctionBody(
// need it.
// TODO(41984): If we want to pass in the handle scope, move it out
// of the try catch.
catch_body += ExitHandleScope();
catch_body += exit_handle_scope();
catch_body += LoadLocal(CurrentException());
catch_body += LoadLocal(CurrentStackTrace());
@ -5591,10 +5507,7 @@ FlowGraph* FlowGraphBuilder::BuildGraphOfSyncFfiCallback(
ICData::kNoRebind);
}
if (marshaller.IsVoid(compiler::ffi::kResultIndex)) {
body += Drop();
body += IntConstant(0);
} else if (!marshaller.IsHandle(compiler::ffi::kResultIndex)) {
if (!marshaller.IsHandle(compiler::ffi::kResultIndex)) {
body += CheckNullOptimized(
String::ZoneHandle(Z, Symbols::New(H.thread(), "return_value")),
CheckNullInstr::kArgumentError);
@ -5619,18 +5532,19 @@ FlowGraph* FlowGraphBuilder::BuildGraphOfSyncFfiCallback(
/*is_synthesized=*/true);
// Return the "exceptional return" value given in 'fromFunction'.
//
// For pointer and void return types, the exceptional return is always null --
// return 0 instead.
if (marshaller.IsPointer(compiler::ffi::kResultIndex) ||
marshaller.IsVoid(compiler::ffi::kResultIndex)) {
if (marshaller.IsVoid(compiler::ffi::kResultIndex)) {
// The exceptional return is always null -- return nullptr instead.
ASSERT(function.FfiCallbackExceptionalReturn() == Object::null());
catch_body += UnboxedIntConstant(0, kUnboxedFfiIntPtr);
catch_body += UnboxedIntConstant(0, kUnboxedIntPtr);
} else if (marshaller.IsPointer(compiler::ffi::kResultIndex)) {
// The exceptional return is always null -- return nullptr instead.
ASSERT(function.FfiCallbackExceptionalReturn() == Object::null());
catch_body += UnboxedIntConstant(0, kUnboxedAddress);
catch_body += ConvertUnboxedToUntagged();
} else if (marshaller.IsHandle(compiler::ffi::kResultIndex)) {
catch_body += UnhandledException();
catch_body +=
FfiConvertPrimitiveToNative(marshaller, compiler::ffi::kResultIndex);
} else if (marshaller.IsCompound(compiler::ffi::kResultIndex)) {
ASSERT(function.FfiCallbackExceptionalReturn() == Object::null());
// Manufacture empty result.
@ -5712,9 +5626,7 @@ FlowGraph* FlowGraphBuilder::BuildGraphOfAsyncFfiCallback(
body += Call1ArgStub(TokenPosition::kNoSource,
Call1ArgStubInstr::StubId::kFfiAsyncCallbackSend);
// All async FFI callbacks return void, so just return 0.
body += Drop();
body += UnboxedIntConstant(0, kUnboxedFfiIntPtr);
body += FfiConvertPrimitiveToNative(marshaller, compiler::ffi::kResultIndex);
body += NativeReturn(marshaller);
--try_depth_;
@ -5726,8 +5638,10 @@ FlowGraph* FlowGraphBuilder::BuildGraphOfAsyncFfiCallback(
/*is_synthesized=*/true);
// This catch indicates there's been some sort of error, but async callbacks
// are fire-and-forget, and we don't guarantee delivery. So just return 0.
catch_body += UnboxedIntConstant(0, kUnboxedFfiIntPtr);
// are fire-and-forget, and we don't guarantee delivery.
catch_body += NullConstant();
catch_body +=
FfiConvertPrimitiveToNative(marshaller, compiler::ffi::kResultIndex);
catch_body += NativeReturn(marshaller);
--catch_depth_;

View file

@ -141,11 +141,10 @@ class FlowGraphBuilder : public BaseFlowGraphBuilder {
// Resolves the address of a native symbol from the constant data of a
// vm:ffi:native pragma.
// Because it's used in optimized mode (as part of the implementation of
// @Native functions), it pushes the value as an unboxed value. This is safe
// to use in unoptimized mode too as long as the unboxed value is consumed
// @Native functions), it pushes the value as an untagged value. This is safe
// to use in unoptimized mode too as long as the untagged value is consumed
// immediately.
Fragment FfiNativeLookupAddress(const Instance& native);
Fragment FfiNativeLookupAddress(const Function& function);
// Expects target address on stack.
Fragment FfiCallFunctionBody(const Function& function,
const FunctionType& c_signature,
@ -207,10 +206,10 @@ class FlowGraphBuilder : public BaseFlowGraphBuilder {
Fragment FfiCall(const compiler::ffi::CallMarshaller& marshaller,
bool is_leaf);
Fragment CCall(
const compiler::ffi::NativeCallingConvention& native_calling_convention);
Fragment CCall(intptr_t num_arguments,
Representation representation = kUnboxedFfiIntPtr);
Fragment CallRuntimeEntry(
const RuntimeEntry& entry,
Representation return_representation,
const ZoneGrowableArray<Representation>& argument_representations);
Fragment RethrowException(TokenPosition position, int catch_try_index);
Fragment LoadLocal(LocalVariable* variable);
@ -237,6 +236,7 @@ class FlowGraphBuilder : public BaseFlowGraphBuilder {
intptr_t type_args_len = 0,
bool use_unchecked_entry = false);
Fragment CachableIdempotentCall(TokenPosition position,
Representation representation,
const Function& target,
intptr_t argument_count,
const Array& argument_names,
@ -326,14 +326,6 @@ class FlowGraphBuilder : public BaseFlowGraphBuilder {
// Compares arbitrary integers.
Fragment IntRelationalOp(TokenPosition position, Token::Kind kind);
// Creates an ffi.Pointer holding a given address.
Fragment FfiPointerFromAddress();
// Pushes an (unboxed) bogus value returned when a native -> Dart callback
// throws an exception.
Fragment FfiExceptionalReturnValue(const AbstractType& result_type,
const Representation target);
// Pops a Dart object and push the unboxed native version, according to the
// semantics of FFI argument translation.
//
@ -397,24 +389,6 @@ class FlowGraphBuilder : public BaseFlowGraphBuilder {
ZoneGrowableArray<LocalVariable*>* definitions,
const GrowableArray<Representation>& representations);
// Copies bytes from a TypedDataBase to the address of an kUnboxedFfiIntPtr.
Fragment CopyFromTypedDataBaseToUnboxedAddress(intptr_t length_in_bytes);
// Copies bytes from the address of an kUnboxedFfiIntPtr to a TypedDataBase.
Fragment CopyFromUnboxedAddressToTypedDataBase(intptr_t length_in_bytes);
// Generates a call to `Thread::EnterApiScope`.
Fragment EnterHandleScope();
// Generates a load of `Thread::api_top_scope`.
Fragment GetTopHandleScope();
// Generates a call to `Thread::ExitApiScope`.
Fragment ExitHandleScope();
// Leaves a `LocalHandle` on the stack.
Fragment AllocateHandle();
// Loads a tagged value from an untagged base + offset from outside the heap.
Fragment RawLoadField(int32_t offset);
@ -423,12 +397,6 @@ class FlowGraphBuilder : public BaseFlowGraphBuilder {
// The store must be outside of the heap, does not emit a store barrier.
Fragment RawStoreField(int32_t offset);
// Wraps an `Object` from the stack and leaves a `LocalHandle` on the stack.
Fragment WrapHandle();
// Unwraps a `LocalHandle` from the stack and leaves the object on the stack.
Fragment UnwrapHandle();
// Wrap the current exception and stacktrace in an unhandled exception.
Fragment UnhandledException();

View file

@ -78,9 +78,6 @@ vmspecific_regress_37511_test: SkipByDesign # Symbols are not exposed on purpose
vmspecific_regress_37780_test: SkipByDesign # Symbols are not exposed on purpose and are not linked in Windows Precompiled. dartbug.com/40579
vmspecific_regress_51794_test: SkipByDesign # Symbols are not exposed on purpose and are not linked in Windows Precompiled. dartbug.com/40579
[ $arch == arm || $arch == arm_x64 || $arch == ia32 || $arch == riscv32 || $arch == simarm || $arch == simriscv32 ]
vmspecific_pointer_load_il_test: SkipByDesign # 32 bit archs use uint32 for pointers and have more int conversions.
# These tests trigger and catch an abort (intentionally) and terminate the VM.
# They're incompatible with ASAN because not all memory is freed when aborting and
# with AppJit because the abort the VM before it can generate a snapshot.

View file

@ -58,30 +58,16 @@ void matchIL$testOffset(FlowGraph graph) {
'pointer',
slot: 'PointerBase.data',
),
'pointer.address int64' <<
match.IntConverter(
'pointer.address untagged',
from: 'untagged',
to: 'int64',
),
...convertUntaggedAddressToInt64('pointer'),
'pointer2.address int64' <<
match.BinaryInt64Op(
'pointer.address int64',
'int 10',
),
// `pointer2` is not allocated.
'pointer2.address untagged' <<
match.IntConverter(
'pointer2.address int64',
from: 'int64',
to: 'untagged',
),
'pointer2.value' <<
match.LoadIndexed(
'pointer2.address untagged',
'int 0',
),
match.Return('pointer2.value'),
...convertInt64AddressToUntagged('pointer2'),
...loadIndexedValueAsInt64('pointer2', 'int 0'),
match.Return('pointer2.value int64'),
]),
]);
}
@ -119,34 +105,20 @@ void matchIL$testAllocate(FlowGraph graph) {
'pointer',
slot: 'PointerBase.data',
),
'pointer.address int64' <<
match.IntConverter(
'pointer.address untagged',
from: 'untagged',
to: 'int64',
),
...convertUntaggedAddressToInt64('pointer'),
'pointer2.address int64' <<
match.BinaryInt64Op(
'pointer.address int64',
'int 10',
),
'pointer2.address untagged' <<
match.IntConverter(
'pointer2.address int64',
from: 'int64',
to: 'untagged',
),
...convertInt64AddressToUntagged('pointer2'),
// The untagged pointer2.address can live through an allocation
// even though it is marked `InnerPointerAccess::kMayBeInnerPointer`
// because its cid is a Pointer cid.
match.AllocateObject(),
match.StoreStaticField(match.any),
'pointer2.value' <<
match.LoadIndexed(
'pointer2.address untagged',
'int 0',
),
match.Return('pointer2.value'),
...loadIndexedValueAsInt64('pointer2', 'int 0'),
match.Return('pointer2.value int64'),
]),
]);
}
@ -166,6 +138,7 @@ int testHoist(Pointer<Int8> pointer) {
void matchIL$testHoist(FlowGraph graph) {
graph.dump();
final indexRep = is32BitConfiguration ? 'int32' : 'int64';
graph.match([
match.block('Graph', [
'int 0' << match.UnboxedConstant(value: 0),
@ -177,7 +150,7 @@ void matchIL$testHoist(FlowGraph graph) {
match.Parameter(
index: 0,
),
'pointer.address' <<
'pointer[i].address untagged' <<
match.LoadField(
'pointer',
slot: 'PointerBase.data',
@ -186,6 +159,8 @@ void matchIL$testHoist(FlowGraph graph) {
]),
'B1' <<
match.block('Join', [
'result int64' << match.Phi('int 0', 'result'),
'i int64' << match.Phi('int 0', 'i'),
match.CheckStackOverflow(),
match.Branch(match.RelationalOp(match.any, match.any, kind: '<'),
ifTrue: 'B2', ifFalse: 'B3'),
@ -195,17 +170,21 @@ void matchIL$testHoist(FlowGraph graph) {
// Do some allocation.
match.AllocateObject(),
match.StoreStaticField(match.any),
if (is32BitConfiguration) ...[
'i $indexRep' <<
match.IntConverter(
'i int64',
from: 'int64',
to: indexRep,
),
],
// Do a load indexed with the untagged pointer.address that is
// hoisted out of the loop.
'pointer[i]' <<
match.LoadIndexed(
'pointer.address',
match.any, // i
),
...loadIndexedValueAsInt64('pointer[i]', 'i $indexRep'),
'result' <<
match.BinaryInt64Op(
match.any,
'pointer[i]',
'pointer[i].value int64',
),
'i' <<
match.BinaryInt64Op(
@ -220,3 +199,63 @@ void matchIL$testHoist(FlowGraph graph) {
]),
]);
}
final addressRep = is32BitConfiguration ? 'uint32' : 'int64';
final valueRep = is32BitConfiguration ? 'int32' : 'int64';
List<Matcher> convertUntaggedAddressToInt64(String name) {
return [
'$name.address $addressRep' <<
match.IntConverter(
'$name.address untagged',
from: 'untagged',
to: addressRep,
),
if (is32BitConfiguration) ...[
'$name.address int64' <<
match.IntConverter(
'$name.address $addressRep',
from: addressRep,
to: 'int64',
),
],
];
}
List<Matcher> convertInt64AddressToUntagged(String name) {
return [
if (is32BitConfiguration) ...[
'$name.address $addressRep' <<
match.IntConverter(
'$name.address int64',
from: 'int64',
to: addressRep,
),
],
// `pointer2` is not allocated.
'$name.address untagged' <<
match.IntConverter(
'$name.address $addressRep',
from: addressRep,
to: 'untagged',
),
];
}
List<Matcher> loadIndexedValueAsInt64(String name, String index) {
return [
'$name.value $valueRep' <<
match.LoadIndexed(
'$name.address untagged',
index,
),
if (is32BitConfiguration) ...[
'$name.value int64' <<
match.IntConverter(
'$name.value $valueRep',
from: valueRep,
to: 'int64',
),
],
];
}