[vm] Split DartReturnInstr and NativeReturnInstr

Renames `ReturnInstr` to `DartReturnInstr`, and introduces a new
`ReturnBaseInstr` to be the common parent of `DartReturnInstr` and
`NativeReturnInstr`. (Before this CL, `NativeReturnInstr` was a
subtype of `ReturnInstr`.)

In a follow up CL, the `NativeReturnInstr` will get up to two inputs.
https://dart-review.googlesource.com/c/sdk/+/354226
Therefore, the `ReturnBaseInstr` does not inherit from `TemplateInstr`
with 1 input, but instead only inherits from `Instruction`.

TEST=SDK build
TEST=*_il_test.dart

Change-Id: I017eb7802ae6c902b64f1cda20edf4a11408dbe1
Cq-Include-Trybots: luci.dart.try:vm-aot-linux-debug-x64-try
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/358904
Reviewed-by: Tess Strickland <sstrickl@google.com>
Commit-Queue: Daco Harkes <dacoharkes@google.com>
This commit is contained in:
Daco Harkes 2024-03-26 11:12:38 +00:00 committed by Commit Queue
parent 05936f4d0b
commit 77d10f2ed9
50 changed files with 258 additions and 214 deletions

View file

@ -28,7 +28,7 @@ void matchIL$identity(FlowGraph graph) {
from: 'int64', to: 'uint32', is_truncating: true),
'retval' << match.IntConverter('uint32', from: 'uint32', to: 'int64'),
],
match.Return(retval),
match.DartReturn(retval),
]),
]);
}

View file

@ -170,11 +170,11 @@ void matchIL$throwInALoop(FlowGraph graph) {
]),
'return_found' <<
match.block('Target', [
match.Return('i'),
match.DartReturn('i'),
]),
'return_fail' <<
match.block('Target', [
match.Return(match.any),
match.DartReturn(match.any),
]),
'throw' <<
match.block('Target', [

View file

@ -47,11 +47,11 @@ void matchIL$strictCompareValueEqConstant(FlowGraph graph) {
]),
'B1' <<
match.block('Target', [
match.Return('int 0'),
match.DartReturn('int 0'),
]),
'B2' <<
match.block('Target', [
match.Return('int 42'),
match.DartReturn('int 42'),
]),
]);
}
@ -80,11 +80,11 @@ void matchIL$strictCompareConstantEqValue(FlowGraph graph) {
]),
'B1' <<
match.block('Target', [
match.Return('int 0'),
match.DartReturn('int 0'),
]),
'B2' <<
match.block('Target', [
match.Return('int 42'),
match.DartReturn('int 42'),
]),
]);
}
@ -113,11 +113,11 @@ void matchIL$strictCompareValueNeConstant(FlowGraph graph) {
]),
'B1' <<
match.block('Target', [
match.Return('int 42'),
match.DartReturn('int 42'),
]),
'B2' <<
match.block('Target', [
match.Return('int 0'),
match.DartReturn('int 0'),
]),
]);
}
@ -146,11 +146,11 @@ void matchIL$strictCompareConstantNeValue(FlowGraph graph) {
]),
'B1' <<
match.block('Target', [
match.Return('int 42'),
match.DartReturn('int 42'),
]),
'B2' <<
match.block('Target', [
match.Return('int 0'),
match.DartReturn('int 0'),
]),
]);
}
@ -179,11 +179,11 @@ void matchIL$strictCompareBoolEqTrue(FlowGraph graph) {
]),
'B1' <<
match.block('Target', [
match.Return('false'),
match.DartReturn('false'),
]),
'B2' <<
match.block('Target', [
match.Return('true'),
match.DartReturn('true'),
]),
]);
}
@ -212,11 +212,11 @@ void matchIL$strictCompareBoolNeTrue(FlowGraph graph) {
]),
'B1' <<
match.block('Target', [
match.Return('true'),
match.DartReturn('true'),
]),
'B2' <<
match.block('Target', [
match.Return('false'),
match.DartReturn('false'),
]),
]);
}
@ -245,11 +245,11 @@ void matchIL$equalityCompareValueEqConstant(FlowGraph graph) {
]),
'B1' <<
match.block('Target', [
match.Return('int 1'),
match.DartReturn('int 1'),
]),
'B2' <<
match.block('Target', [
match.Return('int 42'),
match.DartReturn('int 42'),
]),
]);
}
@ -282,11 +282,11 @@ void matchIL$foldingOfRepeatedComparison(FlowGraph graph) {
]),
'B1' <<
match.block('Target', [
match.Return('int 1'),
match.DartReturn('int 1'),
]),
'B2' <<
match.block('Target', [
match.Return('int 42'),
match.DartReturn('int 42'),
]),
]);
}

View file

@ -71,7 +71,7 @@ void matchIL$test(FlowGraph graph) {
]),
'LoopExit' <<
match.block('Target', [
match.Return(),
match.DartReturn(),
]),
'LoopBody' <<
match.block('Target', [

View file

@ -51,7 +51,7 @@ void matchIL$bar(FlowGraph graph) {
]),
'B5' <<
match.block('Join', [
match.Return('c_42'),
match.DartReturn('c_42'),
]),
'B6' << match.block('CatchBlock'),
]);

View file

@ -54,7 +54,7 @@ void matchIL$main_foo(FlowGraph graph) {
'B4' <<
match.block('Target', [
'v27' << match.BoxInt64('v5'),
match.Return('v27'),
match.DartReturn('v27'),
]),
]);
}

View file

@ -74,7 +74,7 @@ void matchIL$unboxed1(FlowGraph graph) {
'box_w' << match.Box('w'),
match.StaticCall('box_w'),
'result' << match.BinaryInt64Op('x', 'z'),
match.Return('result'),
match.DartReturn('result'),
]),
]);
}
@ -104,7 +104,7 @@ void matchIL$unboxed2(FlowGraph graph) {
'box_w' << match.Box('w'),
match.StaticCall('box_w'),
'result' << match.BinaryDoubleOp('y', 'w'),
match.Return('result'),
match.DartReturn('result'),
]),
]);
}

View file

@ -43,7 +43,7 @@ void matchIL$copyPointerContents(FlowGraph graph) {
]),
'B4' <<
match.block('Target', [
match.Return('cnull'),
match.DartReturn('cnull'),
]),
'B96' <<
match.block('Target', [
@ -51,7 +51,7 @@ void matchIL$copyPointerContents(FlowGraph graph) {
'src.data' << match.LoadField('src', slot: 'PointerBase.data'),
match.MemoryCopy('src.data', 'dest.data', 'c0', 'c0', 'c25',
element_size: 4),
match.Return('cnull'),
match.DartReturn('cnull'),
]),
]);
}

View file

@ -45,7 +45,7 @@ void matchIL$test(FlowGraph graph) {
match.StaticCall(),
match.MoveArgument('baz_boxed'),
match.StaticCall(),
match.Return(),
match.DartReturn(),
]),
]);
}

View file

@ -138,7 +138,7 @@ void matchIL$testDouble(FlowGraph graph) {
'v7_boxed' << match.Box('v7'),
match.MoveArgument('v7_boxed'),
match.StaticCall(),
match.Return(),
match.DartReturn(),
]),
]);
}
@ -274,7 +274,7 @@ void matchIL$testInt(FlowGraph graph) {
'v7_boxed' << match.BoxInt64('v7'),
match.MoveArgument('v7_boxed'),
match.StaticCall(),
match.Return(),
match.DartReturn(),
]),
]);
}

View file

@ -62,7 +62,7 @@ void matchIL$getRecord1(FlowGraph graph) {
'y' << match.Parameter(index: 1),
'x_boxed' << match.BoxInt64('x'),
'pair' << match.MakePair('x_boxed', 'y'),
match.Return('pair'),
match.DartReturn('pair'),
]),
]);
}
@ -75,7 +75,7 @@ void matchIL$getRecord2(FlowGraph graph) {
'bar' << match.Parameter(index: 1),
'bar_boxed' << match.BoxInt64('bar'),
'pair' << match.MakePair('bar_boxed', 'foo'),
match.Return('pair'),
match.DartReturn('pair'),
]),
]);
}
@ -90,7 +90,7 @@ void matchIL$B$get$record3(FlowGraph graph) {
'x_boxed' << match.BoxInt64('x'),
'y_boxed' << match.Box('y'),
'pair' << match.MakePair('x_boxed', 'y_boxed'),
match.Return('pair'),
match.DartReturn('pair'),
]),
]);
}
@ -105,7 +105,7 @@ void matchIL$B$record4(FlowGraph graph) {
'x_boxed' << match.BoxInt64('x'),
'y_boxed' << match.Box('y'),
'pair' << match.MakePair('x_boxed', 'y_boxed'),
match.Return('pair'),
match.DartReturn('pair'),
]),
]);
}
@ -155,7 +155,7 @@ void matchIL$testSimple(FlowGraph graph) {
'r4_boxed' << match.AllocateSmallRecord('r4_0', 'r4_y'),
match.MoveArgument('r4_boxed'),
match.StaticCall(),
match.Return(),
match.DartReturn(),
]),
]);
}
@ -199,7 +199,7 @@ void matchIL$testUnboxedRecordInTryCatch(FlowGraph graph) {
]),
'B3' <<
match.block('Join', [
match.Return(),
match.DartReturn(),
]),
]);
}

View file

@ -32,7 +32,7 @@ void matchIL$deref(FlowGraph graph) {
// 'unboxed' needs to be converted to int64 before returning.
'address' << match.IntConverter('unboxed', from: 'uint32', to: 'int64'),
],
match.Return(retvalName),
match.DartReturn(retvalName),
]),
]);
}

View file

@ -52,7 +52,7 @@ void matchIL$main_testForIn(FlowGraph graph) {
]),
'B4' <<
match.block('Target', [
match.Return(match.any),
match.DartReturn(match.any),
]),
'B3' <<
match.block('Target', [

View file

@ -48,7 +48,7 @@ void matchIL$sumAll(FlowGraph graph) {
]),
'B4' <<
match.block('Target', [
match.Return('v5'),
match.DartReturn('v5'),
]),
'B3' <<
match.block('Target', [

View file

@ -46,7 +46,7 @@ void matchIL$createAndIterate(FlowGraph graph) {
].withoutWildcards),
'loop_exit' <<
match.block('Target', [
match.Return(match.any),
match.DartReturn(match.any),
]),
'loop_body' <<
match.block(

View file

@ -63,7 +63,7 @@ void matchIL$bug1(FlowGraph graph) {
]),
'B4' <<
match.block('Join', [
match.Return(),
match.DartReturn(),
]),
]);
}
@ -90,7 +90,7 @@ void matchIL$bug2(FlowGraph graph) {
]),
'B4' <<
match.block('Join', [
match.Return(),
match.DartReturn(),
]),
]);
}

View file

@ -106,7 +106,7 @@ void matchIL$testCSE1(FlowGraph graph) {
]),
'B5' <<
match.block('Join', [
match.Return(match.any),
match.DartReturn(match.any),
]),
]);
}
@ -138,7 +138,7 @@ void matchIL$testCSE2(FlowGraph graph) {
]),
'B5' <<
match.block('Join', [
match.Return(match.any),
match.DartReturn(match.any),
]),
]);
}
@ -180,7 +180,7 @@ void matchIL$testCSE3(FlowGraph graph) {
]),
'B5' <<
match.block('Join', [
match.Return(match.any),
match.DartReturn(match.any),
]),
]);
}
@ -212,7 +212,7 @@ void matchIL$testLICM1(FlowGraph graph) {
]),
'B4' <<
match.block('Target', [
match.Return(match.any),
match.DartReturn(match.any),
]),
]);
}
@ -245,7 +245,7 @@ void matchIL$testLICM2(FlowGraph graph) {
]),
'B4' <<
match.block('Target', [
match.Return(match.any),
match.DartReturn(match.any),
]),
]);
}

View file

@ -52,7 +52,7 @@ void matchIL$retrieveFromView(FlowGraph graph) {
] else ...[
'retval' << match.LoadIndexed('data', 'index'),
],
match.Return('retval'),
match.DartReturn('retval'),
]),
]);
}
@ -74,7 +74,7 @@ void matchIL$retrieveFromBase(FlowGraph graph) {
match.GenericCheckBound('unboxed_len', 'n'),
'retval' << match.LoadIndexed('src', 'n'),
],
match.Return('retval'),
match.DartReturn('retval'),
]),
]);
}
@ -100,7 +100,7 @@ void matchIL$retrieveFromExternal(FlowGraph graph) {
] else ...[
'retval' << match.LoadIndexed('data', 'n'),
],
match.Return('retval'),
match.DartReturn('retval'),
]),
]);
}

View file

@ -48,7 +48,7 @@ void matchIL$testUnsignedTruncatingDivision(FlowGraph graph) {
'unboxed_len' << match.UnboxInt64('len'),
'retval' << match.BinaryInt64Op('unboxed_len', 'c32', op_kind: '~/'),
],
match.Return('retval'),
match.DartReturn('retval'),
]),
]);
}

View file

@ -61,10 +61,10 @@ class BlockBuilder : public ValueObject {
const Function& function() const { return flow_graph_->function(); }
ReturnInstr* AddReturn(Value* value) {
DartReturnInstr* AddReturn(Value* value) {
const auto& function = flow_graph_->function();
const auto representation = FlowGraph::ReturnRepresentationOf(function);
ReturnInstr* instr = new ReturnInstr(
DartReturnInstr* instr = new DartReturnInstr(
Source(), value, CompilerState::Current().GetNextDeoptId(),
representation);
AddInstruction(instr);

View file

@ -184,7 +184,7 @@ void CodeStatistics::AppendTo(CombinedCodeStatistics* stat) {
if (bytes > 0) {
stat->entries_[i].bytes += bytes;
if (i != CombinedCodeStatistics::kTagParallelMove &&
i != CombinedCodeStatistics::kTagReturn &&
i != CombinedCodeStatistics::kTagDartReturn &&
i != CombinedCodeStatistics::kTagCheckStackOverflow &&
i != CombinedCodeStatistics::kTagCheckStackOverflowSlowPath) {
returns_constant = false;

View file

@ -191,7 +191,7 @@ void ConstantPropagator::VisitParallelMove(ParallelMoveInstr* instr) {
// Analysis of control instructions. Unconditional successors are
// reachable. Conditional successors are reachable depending on the
// constant value of the condition.
void ConstantPropagator::VisitReturn(ReturnInstr* instr) {
void ConstantPropagator::VisitDartReturn(DartReturnInstr* instr) {
// Nothing to do.
}

View file

@ -89,8 +89,8 @@ ISOLATE_UNIT_TEST_CASE(ConstantPropagation_PhiUnwrappingAndConvergence) {
EXPECT_EQ(2, blocks.length());
EXPECT_PROPERTY(blocks[0], it.IsGraphEntry());
EXPECT_PROPERTY(blocks[1], it.IsFunctionEntry());
EXPECT_PROPERTY(blocks[1]->next(), it.IsReturn());
EXPECT_PROPERTY(blocks[1]->next()->AsReturn(),
EXPECT_PROPERTY(blocks[1]->next(), it.IsDartReturn());
EXPECT_PROPERTY(blocks[1]->next()->AsDartReturn(),
it.value()->definition() == v0);
}
@ -146,7 +146,7 @@ static void ConstantPropagatorUnboxedOpTest(
auto b2 = H.TargetEntry();
auto b3 = H.TargetEntry();
auto b4 = H.JoinEntry();
ReturnInstr* ret;
DartReturnInstr* ret;
{
BlockBuilder builder(H.flow_graph(), b1);
@ -330,7 +330,7 @@ void StrictCompareSentinel(Thread* thread,
ConstantPropagator::Optimize(H.flow_graph());
FlowGraphPrinter::PrintGraph("After ConstantPropagator", H.flow_graph());
ReturnInstr* ret = nullptr;
DartReturnInstr* ret = nullptr;
ILMatcher cursor(H.flow_graph(),
H.flow_graph()->graph_entry()->normal_entry(), true);
@ -338,7 +338,7 @@ void StrictCompareSentinel(Thread* thread,
kMatchAndMoveFunctionEntry,
kMatchAndMoveLoadStaticField,
// The StrictCompare instruction should be removed.
{kMatchReturn, &ret},
{kMatchDartReturn, &ret},
}));
EXPECT_PROPERTY(ret, it.value()->BindsToConstant());

View file

@ -1980,7 +1980,7 @@ void FlowGraph::InsertConversion(Representation from,
} else if ((to == kPairOfTagged) && (from == kTagged)) {
// Insert conversion to an unboxed record, which can be only used
// in Return instruction.
ASSERT(use->instruction()->IsReturn());
ASSERT(use->instruction()->IsDartReturn());
Definition* x = new (Z)
LoadFieldInstr(use->CopyWithType(),
Slot::GetRecordFieldSlot(
@ -2491,7 +2491,7 @@ void FlowGraph::WidenSmiToInt32() {
// We assume that tagging before returning or pushing argument costs
// very little compared to the cost of the return/call itself.
ASSERT(!instr->IsMoveArgument());
if (!instr->IsReturn() &&
if (!instr->IsReturnBase() &&
(use->use_index() >= instr->ArgumentCount())) {
gain--;
if (FLAG_support_il_printer && FLAG_trace_smi_widening) {

View file

@ -116,7 +116,7 @@ static bool DefDominatesUse(Definition* def, Instruction* instruction) {
// Returns true if instruction forces control flow.
static bool IsControlFlow(Instruction* instruction) {
return instruction->IsBranch() || instruction->IsGoto() ||
instruction->IsIndirectGoto() || instruction->IsReturn() ||
instruction->IsIndirectGoto() || instruction->IsReturnBase() ||
instruction->IsThrow() || instruction->IsReThrow() ||
instruction->IsTailCall();
}
@ -461,8 +461,7 @@ void FlowGraphChecker::VisitDefUse(Definition* def,
// same basic block as the definition.
ASSERT2(def->GetBlock() == instruction->GetBlock(), def, instruction);
// Untagged pointers should not be returned from functions or FFI callbacks.
ASSERT2(!instruction->IsReturn() && !instruction->IsNativeReturn(), def,
instruction);
ASSERT2(!instruction->IsReturnBase(), def, instruction);
// Make sure no instruction between the definition and the use (including
// the use) can trigger GC.
for (const auto* current = def->next(); current != instruction->next();

View file

@ -358,7 +358,7 @@ ISOLATE_UNIT_TEST_CASE(FlowGraph_PhiUnboxingHeuristic_Double) {
// After loop
kMatchAndMoveTargetEntry,
kMatchAndMoveBox,
kMatchReturn,
kMatchDartReturn,
}));
}
@ -412,7 +412,7 @@ static void TestPhiUnboxingHeuristicSimd(const char* script) {
// After loop
kMatchAndMoveTargetEntry,
kMatchAndMoveBox,
kMatchReturn,
kMatchDartReturn,
}));
}

View file

@ -1299,7 +1299,7 @@ Instruction* Instruction::RemoveFromGraph(bool return_previous) {
ASSERT(!IsBlockEntry());
ASSERT(!IsBranch());
ASSERT(!IsThrow());
ASSERT(!IsReturn());
ASSERT(!IsReturnBase());
ASSERT(!IsReThrow());
ASSERT(!IsGoto());
ASSERT(previous() != nullptr);
@ -7862,7 +7862,7 @@ void StoreFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
}
}
const Code& ReturnInstr::GetReturnStub(FlowGraphCompiler* compiler) const {
const Code& DartReturnInstr::GetReturnStub(FlowGraphCompiler* compiler) const {
const Function& function = compiler->parsed_function().function();
ASSERT(function.IsSuspendableFunction());
if (function.IsAsyncFunction()) {

View file

@ -422,7 +422,7 @@ struct InstrAttrs {
M(TailCall, kNoGC) \
M(ParallelMove, kNoGC) \
M(MoveArgument, kNoGC) \
M(Return, kNoGC) \
M(DartReturn, kNoGC) \
M(NativeReturn, kNoGC) \
M(Throw, kNoGC) \
M(ReThrow, kNoGC) \
@ -560,6 +560,7 @@ struct InstrAttrs {
M(CheckBoundBase, _) \
M(Comparison, _) \
M(InstanceCallBase, _) \
M(ReturnBase, _) \
M(ShiftIntegerOp, _) \
M(UnaryIntegerOp, _) \
M(UnboxInteger, _)
@ -1079,7 +1080,7 @@ class Instruction : public ZoneAllocated {
Instruction* next() const { return next_; }
void set_next(Instruction* instr) {
ASSERT(!IsGraphEntry());
ASSERT(!IsReturn());
ASSERT(!IsReturnBase());
ASSERT(!IsBranch() || (instr == nullptr));
ASSERT(!IsPhi());
ASSERT(instr == nullptr || !instr->IsBlockEntry());
@ -2221,8 +2222,7 @@ class NativeEntryInstr : public FunctionEntryInstr {
PRINT_TO_SUPPORT
#define FIELD_LIST(F) \
F(const compiler::ffi::CallbackMarshaller&, marshaller_)
#define FIELD_LIST(F) F(const compiler::ffi::CallbackMarshaller&, marshaller_)
DECLARE_INSTRUCTION_SERIALIZABLE_FIELDS(NativeEntryInstr,
FunctionEntryInstr,
@ -3410,19 +3410,45 @@ inline Definition* Instruction::ArgumentAt(intptr_t index) const {
return ArgumentValueAt(index)->definition();
}
class ReturnInstr : public TemplateInstruction<1, NoThrow> {
class ReturnBaseInstr : public Instruction {
public:
ReturnInstr(const InstructionSource& source,
Value* value,
intptr_t deopt_id,
Representation representation = kTagged)
: TemplateInstruction(source, deopt_id),
explicit ReturnBaseInstr(const InstructionSource& source,
intptr_t deopt_id = DeoptId::kNone)
: Instruction(source, deopt_id) {}
ReturnBaseInstr() : Instruction(DeoptId::kNone) {}
virtual bool ComputeCanDeoptimize() const { return false; }
virtual bool HasUnknownSideEffects() const { return false; }
virtual bool MayThrow() const { return false; }
virtual SpeculativeMode SpeculativeModeOfInput(intptr_t index) const {
return kNotSpeculative;
}
DECLARE_ABSTRACT_INSTRUCTION(ReturnBase)
DECLARE_EMPTY_SERIALIZATION(ReturnBaseInstr, Instruction)
private:
DISALLOW_COPY_AND_ASSIGN(ReturnBaseInstr);
};
class DartReturnInstr : public ReturnBaseInstr {
public:
DartReturnInstr(const InstructionSource& source,
Value* value,
intptr_t deopt_id,
Representation representation = kTagged)
: ReturnBaseInstr(source, deopt_id),
token_pos_(source.token_pos),
representation_(representation) {
SetInputAt(0, value);
}
DECLARE_INSTRUCTION(Return)
DECLARE_INSTRUCTION(DartReturn)
virtual TokenPosition token_pos() const { return token_pos_; }
Value* value() const { return inputs_[0]; }
@ -3433,20 +3459,11 @@ class ReturnInstr : public TemplateInstruction<1, NoThrow> {
return true;
}
virtual bool ComputeCanDeoptimize() const { return false; }
virtual bool HasUnknownSideEffects() const { return false; }
virtual bool AttributesEqual(const Instruction& other) const {
auto const other_return = other.AsReturn();
auto const other_return = other.AsDartReturn();
return token_pos() == other_return->token_pos();
}
virtual SpeculativeMode SpeculativeModeOfInput(intptr_t index) const {
ASSERT(index == 0);
return kNotSpeculative;
}
virtual intptr_t DeoptimizationTarget() const { return DeoptId::kNone; }
virtual Representation representation() const { return representation_; }
@ -3456,29 +3473,38 @@ class ReturnInstr : public TemplateInstruction<1, NoThrow> {
return representation_;
}
virtual intptr_t InputCount() const { return 1; }
virtual Value* InputAt(intptr_t i) const { return inputs_[i]; }
#define FIELD_LIST(F) \
F(const TokenPosition, token_pos_) \
F(const Representation, representation_)
DECLARE_INSTRUCTION_SERIALIZABLE_FIELDS(ReturnInstr,
TemplateInstruction,
DECLARE_INSTRUCTION_SERIALIZABLE_FIELDS(DartReturnInstr,
ReturnBaseInstr,
FIELD_LIST)
#undef FIELD_LIST
protected:
EmbeddedArray<Value*, 1> inputs_;
private:
const Code& GetReturnStub(FlowGraphCompiler* compiler) const;
DISALLOW_COPY_AND_ASSIGN(ReturnInstr);
virtual void RawSetInputAt(intptr_t i, Value* value) { inputs_[i] = value; }
DISALLOW_COPY_AND_ASSIGN(DartReturnInstr);
};
// Represents a return from a Dart function into native code.
class NativeReturnInstr : public ReturnInstr {
class NativeReturnInstr : public ReturnBaseInstr {
public:
NativeReturnInstr(const InstructionSource& source,
Value* value,
const compiler::ffi::CallbackMarshaller& marshaller,
intptr_t deopt_id)
: ReturnInstr(source, value, deopt_id), marshaller_(marshaller) {}
NativeReturnInstr(Value* value,
const compiler::ffi::CallbackMarshaller& marshaller)
: ReturnBaseInstr(), marshaller_(marshaller) {
SetInputAt(0, value);
}
DECLARE_INSTRUCTION(NativeReturn)
@ -3490,21 +3516,37 @@ class NativeReturnInstr : public ReturnInstr {
}
virtual bool CanBecomeDeoptimizationTarget() const {
// Unlike ReturnInstr, NativeReturnInstr cannot be inlined (because it's
// Unlike DartReturnInstr, NativeReturnInstr cannot be inlined (because it's
// returning into native code).
return false;
}
virtual intptr_t InputCount() const {
return marshaller_.NumReturnDefinitions();
}
virtual bool AttributesEqual(const Instruction& other) const {
auto const other_return = other.AsNativeReturn();
return token_pos() == other_return->token_pos();
}
virtual Value* InputAt(intptr_t i) const { return inputs_[i]; }
#define FIELD_LIST(F) F(const compiler::ffi::CallbackMarshaller&, marshaller_)
DECLARE_INSTRUCTION_SERIALIZABLE_FIELDS(NativeReturnInstr,
ReturnInstr,
ReturnBaseInstr,
FIELD_LIST)
#undef FIELD_LIST
protected:
EmbeddedArray<Value*, 1> inputs_;
private:
void EmitReturnMoves(FlowGraphCompiler* compiler);
virtual void RawSetInputAt(intptr_t i, Value* value) { inputs_[i] = value; }
DISALLOW_COPY_AND_ASSIGN(NativeReturnInstr);
};

View file

@ -695,7 +695,8 @@ void MoveArgumentInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
pusher.Flush(compiler);
}
LocationSummary* ReturnInstr::MakeLocationSummary(Zone* zone, bool opt) const {
LocationSummary* DartReturnInstr::MakeLocationSummary(Zone* zone,
bool opt) const {
const intptr_t kNumInputs = 1;
const intptr_t kNumTemps = 0;
LocationSummary* locs = new (zone)
@ -727,7 +728,7 @@ LocationSummary* ReturnInstr::MakeLocationSummary(Zone* zone, bool opt) const {
// Attempt optimized compilation at return instruction instead of at the entry.
// The entry needs to be patchable, no inlined objects are allowed in the area
// that will be overwritten by the patch instructions: a branch macro sequence.
void ReturnInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
void DartReturnInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
if (locs()->in(0).IsRegister()) {
const Register result = locs()->in(0).reg();
ASSERT(result == CallingConventions::kReturnReg);
@ -771,7 +772,7 @@ void ReturnInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
#endif
ASSERT(__ constant_pool_allowed());
__ LeaveDartFrameAndReturn(); // Disallows constant pool use.
// This ReturnInstr may be emitted out of order by the optimizer. The next
// This DartReturnInstr may be emitted out of order by the optimizer. The next
// block may be a target expecting a properly set constant pool pointer.
__ set_constant_pool_allowed(true);
}

View file

@ -542,7 +542,8 @@ void MoveArgumentInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
pusher.Flush(compiler);
}
LocationSummary* ReturnInstr::MakeLocationSummary(Zone* zone, bool opt) const {
LocationSummary* DartReturnInstr::MakeLocationSummary(Zone* zone,
bool opt) const {
const intptr_t kNumInputs = 1;
const intptr_t kNumTemps = 0;
LocationSummary* locs = new (zone)
@ -574,7 +575,7 @@ LocationSummary* ReturnInstr::MakeLocationSummary(Zone* zone, bool opt) const {
// Attempt optimized compilation at return instruction instead of at the entry.
// The entry needs to be patchable, no inlined objects are allowed in the area
// that will be overwritten by the patch instructions: a branch macro sequence.
void ReturnInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
void DartReturnInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
if (locs()->in(0).IsRegister()) {
const Register result = locs()->in(0).reg();
ASSERT(result == CallingConventions::kReturnReg);
@ -619,7 +620,7 @@ void ReturnInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
ASSERT(__ constant_pool_allowed());
__ LeaveDartFrame(); // Disallows constant pool use.
__ ret();
// This ReturnInstr may be emitted out of order by the optimizer. The next
// This DartReturnInstr may be emitted out of order by the optimizer. The next
// block may be a target expecting a properly set constant pool pointer.
__ set_constant_pool_allowed(true);
}

View file

@ -345,7 +345,8 @@ void MoveArgumentInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
}
}
LocationSummary* ReturnInstr::MakeLocationSummary(Zone* zone, bool opt) const {
LocationSummary* DartReturnInstr::MakeLocationSummary(Zone* zone,
bool opt) const {
const intptr_t kNumInputs = 1;
const intptr_t kNumTemps = 0;
LocationSummary* locs = new (zone)
@ -358,7 +359,7 @@ LocationSummary* ReturnInstr::MakeLocationSummary(Zone* zone, bool opt) const {
// Attempt optimized compilation at return instruction instead of at the entry.
// The entry needs to be patchable, no inlined objects are allowed in the area
// that will be overwritten by the patch instruction: a jump).
void ReturnInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
void DartReturnInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
Register result = locs()->in(0).reg();
ASSERT(result == EAX);

View file

@ -1423,7 +1423,7 @@ void CCallInstr::PrintOperandsTo(BaseTextBuffer* f) const {
}
void NativeReturnInstr::PrintOperandsTo(BaseTextBuffer* f) const {
value()->PrintTo(f);
InputAt(0)->PrintTo(f);
f->AddString(" (@");
marshaller_.Location(compiler::ffi::kResultIndex).PrintTo(f);
f->AddString(")");

View file

@ -594,7 +594,8 @@ void MoveArgumentInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
}
}
LocationSummary* ReturnInstr::MakeLocationSummary(Zone* zone, bool opt) const {
LocationSummary* DartReturnInstr::MakeLocationSummary(Zone* zone,
bool opt) const {
const intptr_t kNumInputs = 1;
const intptr_t kNumTemps = 0;
LocationSummary* locs = new (zone)
@ -637,7 +638,7 @@ LocationSummary* ReturnInstr::MakeLocationSummary(Zone* zone, bool opt) const {
// Attempt optimized compilation at return instruction instead of at the entry.
// The entry needs to be patchable, no inlined objects are allowed in the area
// that will be overwritten by the patch instructions: a branch macro sequence.
void ReturnInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
void DartReturnInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
if (locs()->in(0).IsRegister()) {
const Register result = locs()->in(0).reg();
ASSERT(result == CallingConventions::kReturnReg);
@ -673,7 +674,7 @@ void ReturnInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
ASSERT(__ constant_pool_allowed());
__ LeaveDartFrame(fp_sp_dist); // Disallows constant pool use.
__ ret();
// This ReturnInstr may be emitted out of order by the optimizer. The next
// This DartReturnInstr may be emitted out of order by the optimizer. The next
// block may be a target expecting a properly set constant pool pointer.
__ set_constant_pool_allowed(true);
}

View file

@ -209,7 +209,7 @@ bool TestIntConverterCanonicalizationRule(Thread* thread,
auto normal_entry = H.flow_graph()->graph_entry()->normal_entry();
Definition* v0;
ReturnInstr* ret;
DartReturnInstr* ret;
{
BlockBuilder builder(H.flow_graph(), normal_entry);
@ -280,7 +280,7 @@ ISOLATE_UNIT_TEST_CASE(IL_PhiCanonicalization) {
auto b4 = H.TargetEntry();
Definition* v0;
ReturnInstr* ret;
DartReturnInstr* ret;
PhiInstr* phi;
{
@ -676,7 +676,7 @@ ISOLATE_UNIT_TEST_CASE(IRTest_DoubleEqualsSmi) {
kMoveGlob,
kMatchAndMoveBinaryDoubleOp,
kMatchAndMoveEqualityCompare,
kMatchReturn,
kMatchDartReturn,
}));
}
@ -902,13 +902,13 @@ ISOLATE_UNIT_TEST_CASE(IRTest_LoadThread) {
CompilerPass::kComputeSSA,
});
ReturnInstr* return_instr = nullptr;
DartReturnInstr* return_instr = nullptr;
{
ILMatcher cursor(flow_graph, flow_graph->graph_entry()->normal_entry());
EXPECT(cursor.TryMatch({
kMoveGlob,
{kMatchReturn, &return_instr},
{kMatchDartReturn, &return_instr},
}));
}
@ -936,7 +936,7 @@ ISOLATE_UNIT_TEST_CASE(IRTest_LoadThread) {
kMatchAndMoveLoadThread,
kMatchAndMoveIntConverter,
kMatchAndMoveBox,
kMatchReturn,
kMatchDartReturn,
}));
}
@ -1010,7 +1010,7 @@ ISOLATE_UNIT_TEST_CASE(IRTest_CachableIdempotentCall) {
kMoveGlob,
{kMatchAndMoveStaticCall, &static_call},
kMoveGlob,
kMatchReturn,
kMatchDartReturn,
}));
}
@ -1034,7 +1034,7 @@ ISOLATE_UNIT_TEST_CASE(IRTest_CachableIdempotentCall) {
// adds boxing.
kMatchBox,
kMoveGlob,
kMatchReturn,
kMatchDartReturn,
}));
}
@ -1306,12 +1306,12 @@ static void TestConstantFoldToSmi(const Library& root_library,
auto entry = flow_graph->graph_entry()->normal_entry();
EXPECT(entry != nullptr);
ReturnInstr* ret = nullptr;
DartReturnInstr* ret = nullptr;
ILMatcher cursor(flow_graph, entry, true, ParallelMovesHandling::kSkip);
RELEASE_ASSERT(cursor.TryMatch({
kMoveGlob,
{kMatchReturn, &ret},
{kMatchDartReturn, &ret},
}));
ConstantInstr* constant = ret->value()->definition()->AsConstant();
@ -1486,7 +1486,7 @@ static void TestCanonicalizationOfTypedDataViewFieldLoads(
Definition* array;
Definition* load;
ReturnInstr* ret;
DartReturnInstr* ret;
{
BlockBuilder builder(H.flow_graph(), b1);
@ -1549,7 +1549,7 @@ ISOLATE_UNIT_TEST_CASE(IL_Canonicalize_InstanceCallWithNoICDataInAOT) {
auto b1 = H.flow_graph()->graph_entry()->normal_entry();
InstanceCallInstr* length_call;
ReturnInstr* ret;
DartReturnInstr* ret;
{
BlockBuilder builder(H.flow_graph(), b1);
@ -1586,7 +1586,7 @@ static void TestTestRangeCanonicalize(const AbstractType& type,
auto normal_entry = H.flow_graph()->graph_entry()->normal_entry();
ReturnInstr* ret;
DartReturnInstr* ret;
{
BlockBuilder builder(H.flow_graph(), normal_entry);
Definition* param = builder.AddParameter(0, kTagged);
@ -1634,7 +1634,7 @@ void TestStaticFieldForwarding(Thread* thread,
const auto constant_42 = H.IntConstant(42);
const auto constant_24 = H.IntConstant(24);
Definition* load;
ReturnInstr* ret;
DartReturnInstr* ret;
{
BlockBuilder builder(H.flow_graph(), b1);

View file

@ -447,7 +447,8 @@ void MoveArgumentInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
}
}
LocationSummary* ReturnInstr::MakeLocationSummary(Zone* zone, bool opt) const {
LocationSummary* DartReturnInstr::MakeLocationSummary(Zone* zone,
bool opt) const {
const intptr_t kNumInputs = 1;
const intptr_t kNumTemps = 0;
LocationSummary* locs = new (zone)
@ -479,7 +480,7 @@ LocationSummary* ReturnInstr::MakeLocationSummary(Zone* zone, bool opt) const {
// Attempt optimized compilation at return instruction instead of at the entry.
// The entry needs to be patchable, no inlined objects are allowed in the area
// that will be overwritten by the patch instruction: a jump).
void ReturnInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
void DartReturnInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
if (locs()->in(0).IsRegister()) {
const Register result = locs()->in(0).reg();
ASSERT(result == CallingConventions::kReturnReg);
@ -525,7 +526,7 @@ void ReturnInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
ASSERT(__ constant_pool_allowed());
__ LeaveDartFrame(); // Disallows constant pool use.
__ ret();
// This ReturnInstr may be emitted out of order by the optimizer. The next
// This DartReturnInstr may be emitted out of order by the optimizer. The next
// block may be a target expecting a properly set constant pool pointer.
__ set_constant_pool_allowed(true);
}

View file

@ -673,7 +673,8 @@ static bool IsSmallLeafOrReduction(int inlining_depth,
BlockEntryInstr* entry = block_it.Current();
for (ForwardInstructionIterator it(entry); !it.Done(); it.Advance()) {
Instruction* current = it.Current();
if (current->IsReturn()) continue;
if (current->IsDartReturn()) continue;
ASSERT(!current->IsNativeReturn());
++instruction_count;
if (current->IsInstanceCall() || current->IsPolymorphicInstanceCall() ||
current->IsClosureCall()) {
@ -1985,9 +1986,9 @@ PolymorphicInliner::PolymorphicInliner(CallSiteInliner* owner,
num_variants_(call->NumberOfChecks()),
variants_(call->targets_),
inlined_variants_(zone()),
non_inlined_variants_(new (zone()) CallTargets(zone())),
non_inlined_variants_(new(zone()) CallTargets(zone())),
inlined_entries_(num_variants_),
exit_collector_(new (Z) InlineExitCollector(owner->caller_graph(), call)),
exit_collector_(new(Z) InlineExitCollector(owner->caller_graph(), call)),
caller_function_(caller_function) {}
IsolateGroup* PolymorphicInliner::isolate_group() const {
@ -2294,7 +2295,7 @@ TargetEntryInstr* PolymorphicInliner::BuildDecisionGraph() {
owner_->caller_graph()->AllocateSSAIndex(fallback_call);
fallback_call->InheritDeoptTarget(zone(), call_);
fallback_call->set_total_call_count(call_->CallCount());
ReturnInstr* fallback_return = new ReturnInstr(
DartReturnInstr* fallback_return = new DartReturnInstr(
call_->source(), new Value(fallback_call), DeoptId::kNone);
fallback_return->InheritDeoptTargetAfter(owner_->caller_graph(), call_,
fallback_call);

View file

@ -243,7 +243,7 @@ ISOLATE_UNIT_TEST_CASE(Inliner_List_generate) {
// After loop
kMatchAndMoveTargetEntry,
kMatchReturn,
kMatchDartReturn,
}));
}
@ -310,7 +310,7 @@ ISOLATE_UNIT_TEST_CASE(Inliner_always_consider_inlining) {
kMoveGlob,
{kMatchAndMoveStaticCall, &call_print3},
kMoveGlob,
kMatchReturn,
kMatchDartReturn,
}));
EXPECT(strcmp(call_print1->function().UserVisibleNameCString(), "print") ==
0);
@ -358,7 +358,7 @@ ISOLATE_UNIT_TEST_CASE(Inliner_List_of_inlined) {
kMoveGlob,
{kMatchAndMoveStaticCall, &call_print},
kMoveGlob,
kMatchReturn,
kMatchDartReturn,
}));
EXPECT(strcmp(call_print->function().UserVisibleNameCString(), "print") == 0);
// Length is fully forwarded and string interpolation is constant folded.

View file

@ -307,13 +307,13 @@ static void RunMemoryCopyInstrTest(intptr_t src_start,
auto* const param_length = initial_defs->At(4)->AsParameter();
EXPECT(param_length != nullptr);
ReturnInstr* return_instr;
DartReturnInstr* return_instr;
{
ILMatcher cursor(flow_graph, entry_instr);
EXPECT(cursor.TryMatch({
kMoveGlob,
{kMatchReturn, &return_instr},
{kMatchDartReturn, &return_instr},
}));
}
@ -362,13 +362,13 @@ static void RunMemoryCopyInstrTest(intptr_t src_start,
kMatchAndMoveUnbox,
kMatchAndMoveUnbox,
kMatchAndMoveMemoryCopy,
kMatchReturn,
kMatchDartReturn,
}));
} else {
EXPECT(cursor.TryMatch({
kMoveGlob,
kMatchAndMoveMemoryCopy,
kMatchReturn,
kMatchDartReturn,
}));
}
}

View file

@ -683,7 +683,7 @@ ISOLATE_UNIT_TEST_CASE(RangeAnalysis_ShiftUint32Op) {
kMoveGlob,
kMatchAndMoveBinaryUint32Op,
kMoveGlob,
kMatchReturn,
kMatchDartReturn,
}));
EXPECT(shift->shift_range() != nullptr);
@ -724,7 +724,7 @@ ISOLATE_UNIT_TEST_CASE(RangeAnalysis_LoadClassId) {
kMoveGlob,
kMatchAndMoveTestRange,
kMoveGlob,
kMatchReturn,
kMatchDartReturn,
}));
EXPECT(load_cid->range() != nullptr);

View file

@ -3124,7 +3124,7 @@ class StoreOptimizer : public LivenessAnalysis {
continue;
}
if (instr->IsThrow() || instr->IsReThrow() || instr->IsReturn()) {
if (instr->IsThrow() || instr->IsReThrow() || instr->IsReturnBase()) {
// Initialize live-out for exit blocks since it won't be computed
// otherwise during the fixed point iteration.
live_out->CopyFrom(all_places);
@ -3132,24 +3132,20 @@ class StoreOptimizer : public LivenessAnalysis {
// Handle side effects, deoptimization and function return.
if (CompilerState::Current().is_aot()) {
// Instructions that return from the function, instructions with
// side effects are considered as loads from all places.
if (instr->HasUnknownSideEffects() || instr->IsReturn() ||
instr->MayThrow()) {
if (instr->HasUnknownSideEffects() || instr->IsReturn()) {
// Instructions that may throw and has unknown side effects
// still load from all places.
live_in->CopyFrom(all_places);
if (instr->HasUnknownSideEffects() || instr->IsReturnBase()) {
// An instruction that returns or has unknown side effects
// is treated as if it loads from all places.
live_in->CopyFrom(all_places);
continue;
} else if (instr->MayThrow()) {
if (block->try_index() == kInvalidTryIndex) {
// Outside of a try-catch block, an instruction that may throw
// is only treated as if it loads from escaping places.
live_in->AddAll(all_aliased_places);
} else {
// If we are outside of try-catch block, instructions that "may
// throw" only "load from escaping places".
// If we are inside of try-catch block, instructions that "may
// throw" also "load from all places".
if (block->try_index() == kInvalidTryIndex) {
live_in->AddAll(all_aliased_places);
} else {
live_in->CopyFrom(all_places);
}
// Inside of a try-catch block, an instruction that may throw
// is treated as if it loads from all places.
live_in->CopyFrom(all_places);
}
continue;
}
@ -3163,7 +3159,7 @@ class StoreOptimizer : public LivenessAnalysis {
// variables include also non-escaping(not aliased) ones, so
// how to deal with that needs to be figured out.
if (instr->HasUnknownSideEffects() || instr->CanDeoptimize() ||
instr->MayThrow() || instr->IsReturn()) {
instr->MayThrow() || instr->IsReturnBase()) {
// Instructions that return from the function, instructions with
// side effects and instructions that can deoptimize are considered
// as loads from all places.

View file

@ -271,7 +271,7 @@ static void TestAliasingViaRedefinition(
LoadFieldInstr* v1;
StaticCallInstr* call;
LoadFieldInstr* v4;
ReturnInstr* ret;
DartReturnInstr* ret;
{
BlockBuilder builder(H.flow_graph(), b1);
@ -291,7 +291,7 @@ static void TestAliasingViaRedefinition(
std::move(args), S.GetNextDeoptId(), 0, ICData::RebindRule::kStatic));
v4 = builder.AddDefinition(
new LoadFieldInstr(new Value(v2), slot, InstructionSource()));
ret = builder.AddInstruction(new ReturnInstr(
ret = builder.AddInstruction(new DartReturnInstr(
InstructionSource(), new Value(v4), S.GetNextDeoptId()));
}
H.FinishGraph();
@ -445,7 +445,7 @@ static void TestAliasingViaStore(
LoadFieldInstr* v1;
StaticCallInstr* call;
LoadFieldInstr* v4;
ReturnInstr* ret;
DartReturnInstr* ret;
{
BlockBuilder builder(H.flow_graph(), b1);
@ -479,7 +479,7 @@ static void TestAliasingViaStore(
std::move(args), S.GetNextDeoptId(), 0, ICData::RebindRule::kStatic));
v4 = builder.AddDefinition(
new LoadFieldInstr(new Value(v0), slot, InstructionSource()));
ret = builder.AddInstruction(new ReturnInstr(
ret = builder.AddInstruction(new DartReturnInstr(
InstructionSource(), new Value(v4), S.GetNextDeoptId()));
}
H.FinishGraph();
@ -608,7 +608,7 @@ ISOLATE_UNIT_TEST_CASE(LoadOptimizer_AliasingViaTypedDataAndUntaggedTypedData) {
LoadFieldInstr* v2;
StoreIndexedInstr* store;
LoadIndexedInstr* v3;
ReturnInstr* ret;
DartReturnInstr* ret;
{
BlockBuilder builder(H.flow_graph(), b1);
@ -644,7 +644,7 @@ ISOLATE_UNIT_TEST_CASE(LoadOptimizer_AliasingViaTypedDataAndUntaggedTypedData) {
InstructionSource()));
// return v3
ret = builder.AddInstruction(new ReturnInstr(
ret = builder.AddInstruction(new DartReturnInstr(
InstructionSource(), new Value(v3), S.GetNextDeoptId()));
}
H.FinishGraph();
@ -665,7 +665,7 @@ ISOLATE_UNIT_TEST_CASE(LoadOptimizer_AliasingViaTypedDataAndUntaggedTypedData) {
{kMatchAndMoveLoadField, &lf},
{kMatchAndMoveStoreIndexed, &s},
{kMatchAndMoveLoadIndexed, &li2},
{kMatchReturn, &r},
{kMatchDartReturn, &r},
}));
EXPECT(array == sc);
EXPECT(v1 == li);
@ -718,7 +718,7 @@ ISOLATE_UNIT_TEST_CASE(LoadOptimizer_LoadDataFieldOfNewTypedData) {
AllocateObjectInstr* view;
LoadFieldInstr* v1;
StoreFieldInstr* store;
ReturnInstr* ret;
DartReturnInstr* ret;
{
BlockBuilder builder(H.flow_graph(), b1);
@ -745,7 +745,7 @@ ISOLATE_UNIT_TEST_CASE(LoadOptimizer_LoadDataFieldOfNewTypedData) {
InstructionSource(), StoreFieldInstr::Kind::kInitializing));
// return view
ret = builder.AddInstruction(new ReturnInstr(
ret = builder.AddInstruction(new DartReturnInstr(
InstructionSource(), new Value(view), S.GetNextDeoptId()));
}
H.FinishGraph();
@ -764,7 +764,7 @@ ISOLATE_UNIT_TEST_CASE(LoadOptimizer_LoadDataFieldOfNewTypedData) {
{kMatchAndMoveAllocateObject, &alloc_view},
{kMatchAndMoveLoadField, &lf},
{kMatchAndMoveStoreField, &sf},
{kMatchReturn, &r},
{kMatchDartReturn, &r},
}));
EXPECT(array == alloc_array);
EXPECT(view == alloc_view);
@ -806,7 +806,7 @@ ISOLATE_UNIT_TEST_CASE(LoadOptimizer_TypedArrayViewAliasing) {
auto b1 = H.flow_graph()->graph_entry()->normal_entry();
Definition* load;
ReturnInstr* ret;
DartReturnInstr* ret;
{
BlockBuilder builder(H.flow_graph(), b1);
@ -960,7 +960,7 @@ ISOLATE_UNIT_TEST_CASE(LoadOptimizer_RedundantStaticFieldInitialization) {
kMoveParallelMoves,
kMatchAndMoveBinarySmiOp,
kMoveParallelMoves,
kMatchReturn,
kMatchDartReturn,
}));
}
@ -1007,7 +1007,7 @@ ISOLATE_UNIT_TEST_CASE(LoadOptimizer_RedundantInitializerCallAfterIf) {
kMoveParallelMoves,
{kMatchAndMoveLoadStaticField, &load_static_after_if},
kMoveGlob,
kMatchReturn,
kMatchDartReturn,
}));
EXPECT(!load_static_after_if->calls_initializer());
}
@ -1153,7 +1153,7 @@ Vec3Mut main() {
{kMatchAndMoveStoreField, &store1},
{kMatchAndMoveStoreField, &store2},
{kMatchAndMoveStoreField, &store3},
kMatchReturn,
kMatchDartReturn,
}));
EXPECT(store1->instance()->definition() == allocate);
@ -1197,7 +1197,7 @@ main() {
kMoveGlob,
{kMatchAndMoveAllocateObject, &allocate},
{kMatchAndMoveStoreField, &store1}, // initializing store
kMatchReturn,
kMatchDartReturn,
}));
EXPECT(store1->instance()->definition() == allocate);
@ -1367,7 +1367,7 @@ main() {
kMatchAndMoveStoreIndexed,
kMatchAndMoveMoveArgument,
{kMatchAndMoveStaticCall, &string_interpolate},
kMatchReturn,
kMatchDartReturn,
}));
EXPECT(string_interpolate->ArgumentAt(0) == create_array);
@ -1464,7 +1464,7 @@ main() {
kMatchAndMoveStoreIndexed,
kMatchAndMoveMoveArgument,
kMatchAndMoveStaticCall,
kMatchReturn,
kMatchDartReturn,
}));
Compiler::CompileOptimizedFunction(thread, function);
@ -1636,7 +1636,7 @@ ISOLATE_UNIT_TEST_CASE(CSE_Redefinitions) {
LoadFieldInstr* load1;
LoadFieldInstr* load2;
StaticCallInstr* call;
ReturnInstr* ret;
DartReturnInstr* ret;
{
BlockBuilder builder(H.flow_graph(), b1);

View file

@ -79,13 +79,13 @@ ISOLATE_UNIT_TEST_CASE(TypePropagator_RedefinitionAfterStrictCompareWithNull) {
// In B2 v0 should not have any additional type information so reaching
// type should be still nullable int.
auto b2_value = b2->last_instruction()->AsReturn()->value();
auto b2_value = b2->last_instruction()->AsDartReturn()->value();
EXPECT(b2_value->Type()->IsNullableInt());
// In B3 v0 is constrained by comparison with null - it should be non-nullable
// integer. There should be a Redefinition inserted to prevent LICM past
// the branch.
auto b3_value = b3->last_instruction()->AsReturn()->value();
auto b3_value = b3->last_instruction()->AsDartReturn()->value();
EXPECT(b3_value->Type()->IsInt());
EXPECT(b3_value->definition()->IsRedefinition());
EXPECT(b3_value->definition()->GetBlock() == b3);
@ -144,13 +144,13 @@ ISOLATE_UNIT_TEST_CASE(
// There should be no information available about the incoming type of
// the parameter either on entry or in B3.
EXPECT_PROPERTY(v0->Type()->ToAbstractType(), it.IsDynamicType());
auto b3_value = b3->last_instruction()->AsReturn()->value();
auto b3_value = b3->last_instruction()->AsDartReturn()->value();
EXPECT(b3_value->Type() == v0->Type());
// In B3 v0 is constrained by comparison of its cid with kDoubleCid - it
// should be non-nullable double. There should be a Redefinition inserted to
// prevent LICM past the branch.
auto b2_value = b2->last_instruction()->AsReturn()->value();
auto b2_value = b2->last_instruction()->AsDartReturn()->value();
EXPECT_PROPERTY(b2_value->Type(), it.IsDouble());
EXPECT_PROPERTY(b2_value->definition(), it.IsRedefinition());
EXPECT_PROPERTY(b2_value->definition()->GetBlock(), &it == b2);
@ -350,8 +350,8 @@ ISOLATE_UNIT_TEST_CASE(TypePropagator_Regress36156) {
BlockBuilder builder(H.flow_graph(), b7);
v5 = H.Phi(b7, {{b5, v3}, {b6, H.DoubleConstant(1.0)}});
builder.AddPhi(v5);
builder.AddInstruction(new ReturnInstr(InstructionSource(), new Value(v5),
S.GetNextDeoptId()));
builder.AddInstruction(new DartReturnInstr(
InstructionSource(), new Value(v5), S.GetNextDeoptId()));
}
H.FinishGraph();
@ -571,7 +571,7 @@ ISOLATE_UNIT_TEST_CASE(TypePropagator_NonNullableLoadStaticField) {
kMatchAndMoveBoxInt64,
kMatchAndMoveMoveArgument,
kMatchAndMoveStaticCall,
kMatchReturn,
kMatchDartReturn,
}));
EXPECT_PROPERTY(load->AsLoadStaticField()->Type(), !it.is_nullable());
@ -741,7 +741,7 @@ ISOLATE_UNIT_TEST_CASE(TypePropagator_RecordFieldAccess) {
{kMatchAndMoveLoadField, &load2},
kMatchAndMoveMoveArgument,
kMatchAndMoveStaticCall,
kMatchReturn,
kMatchDartReturn,
}));
EXPECT_PROPERTY(load1->Type()->ToAbstractType(), it.IsIntType());

View file

@ -58,7 +58,7 @@ ISOLATE_UNIT_TEST_CASE(IRTest_TypedDataAOT_Inlining) {
kMoveParallelMoves,
{kMatchAndMoveLoadIndexed, &load_indexed},
kMoveGlob,
kMatchReturn,
kMatchDartReturn,
}));
} else {
RELEASE_ASSERT(cursor.TryMatch({
@ -73,7 +73,7 @@ ISOLATE_UNIT_TEST_CASE(IRTest_TypedDataAOT_Inlining) {
kMoveParallelMoves,
{kMatchAndMoveLoadIndexed, &load_indexed},
kMoveGlob,
kMatchReturn,
kMatchDartReturn,
}));
}
@ -168,7 +168,7 @@ ISOLATE_UNIT_TEST_CASE(IRTest_TypedDataAOT_FunctionalGetSet) {
// Exit the loop.
kMatchAndMoveBranchFalse,
kMoveGlob,
kMatchReturn,
kMatchDartReturn,
}));
} else {
EXPECT(cursor.TryMatch({
@ -212,7 +212,7 @@ ISOLATE_UNIT_TEST_CASE(IRTest_TypedDataAOT_FunctionalGetSet) {
// Exit the loop.
kMatchAndMoveBranchFalse,
kMoveGlob,
kMatchReturn,
kMatchDartReturn,
}));
}
};
@ -290,7 +290,7 @@ ISOLATE_UNIT_TEST_CASE(IRTest_TypedDataAOT_FunctionalIndexError) {
// Return
kMoveGlob,
kMatchReturn,
kMatchDartReturn,
}));
} else {
EXPECT(cursor.TryMatch({
@ -324,7 +324,7 @@ ISOLATE_UNIT_TEST_CASE(IRTest_TypedDataAOT_FunctionalIndexError) {
// Return
kMoveGlob,
kMatchReturn,
kMatchDartReturn,
}));
}

View file

@ -214,7 +214,7 @@ Fragment BaseFlowGraphBuilder::Return(TokenPosition position) {
const Function& function = parsed_function_->function();
const Representation representation =
FlowGraph::ReturnRepresentationOf(function);
ReturnInstr* return_instr = new (Z) ReturnInstr(
DartReturnInstr* return_instr = new (Z) DartReturnInstr(
InstructionSource(position), value, GetNextDeoptId(), representation);
if (exit_collector_ != nullptr) exit_collector_->AddExit(return_instr);

View file

@ -91,7 +91,7 @@ void InlineExitCollector::PrepareGraphs(FlowGraph* callee_graph) {
RemoveUnreachableExits(callee_graph);
}
void InlineExitCollector::AddExit(ReturnInstr* exit) {
void InlineExitCollector::AddExit(DartReturnInstr* exit) {
Data data = {nullptr, exit};
exits_.Add(data);
}

View file

@ -26,7 +26,7 @@ class InlineExitCollector : public ZoneAllocated {
InlineExitCollector(FlowGraph* caller_graph, Definition* call)
: caller_graph_(caller_graph), call_(call), exits_(4) {}
void AddExit(ReturnInstr* exit);
void AddExit(DartReturnInstr* exit);
void Union(const InlineExitCollector* other);
@ -49,7 +49,7 @@ class InlineExitCollector : public ZoneAllocated {
private:
struct Data {
BlockEntryInstr* exit_block;
ReturnInstr* exit_return;
DartReturnInstr* exit_return;
};
BlockEntryInstr* ExitBlockAt(intptr_t i) const {
@ -63,7 +63,7 @@ class InlineExitCollector : public ZoneAllocated {
Value* ValueAt(intptr_t i) const { return ReturnAt(i)->value(); }
ReturnInstr* ReturnAt(intptr_t i) const { return exits_[i].exit_return; }
DartReturnInstr* ReturnAt(intptr_t i) const { return exits_[i].exit_return; }
static int LowestBlockIdFirst(const Data* a, const Data* b);
void SortExits();

View file

@ -36,7 +36,7 @@ ISOLATE_UNIT_TEST_CASE(StreamingFlowGraphBuilder_ConstFoldStringConcats) {
auto entry = flow_graph->graph_entry()->normal_entry();
EXPECT(entry != nullptr);
ReturnInstr* ret = nullptr;
DartReturnInstr* ret = nullptr;
ILMatcher cursor(flow_graph, entry);
// clang-format off
@ -44,7 +44,7 @@ ISOLATE_UNIT_TEST_CASE(StreamingFlowGraphBuilder_ConstFoldStringConcats) {
kMatchAndMoveFunctionEntry,
kMatchAndMoveCheckStackOverflow,
kMoveDebugStepChecks,
{kMatchReturn, &ret},
{kMatchDartReturn, &ret},
}));
// clang-format on
@ -90,7 +90,7 @@ ISOLATE_UNIT_TEST_CASE(StreamingFlowGraphBuilder_FlattenNestedStringInterp) {
{kMatchAndMoveStoreIndexed, &store2},
kMatchAndMoveStaticCall,
kMoveDebugStepChecks,
kMatchReturn,
kMatchDartReturn,
}));
// clang-format on
@ -149,7 +149,7 @@ ISOLATE_UNIT_TEST_CASE(StreamingFlowGraphBuilder_DropEmptyStringInterp) {
{kMatchAndMoveStoreIndexed, &store3},
kMatchAndMoveStaticCall,
kMoveDebugStepChecks,
kMatchReturn,
kMatchDartReturn,
}));
// clang-format on
@ -217,7 +217,7 @@ ISOLATE_UNIT_TEST_CASE(StreamingFlowGraphBuilder_ConcatStringLits) {
{kMatchAndMoveStoreIndexed, &store3},
kMatchAndMoveStaticCall,
kMoveDebugStepChecks,
kMatchReturn,
kMatchDartReturn,
}));
// clang-format on
@ -279,7 +279,7 @@ ISOLATE_UNIT_TEST_CASE(StreamingFlowGraphBuilder_InvariantFlagInListLiterals) {
kMatchAndMoveStaticCall,
{kMatchAndMoveInstanceCall, &call_add},
kMoveDebugStepChecks,
kMatchReturn,
kMatchDartReturn,
}));
// clang-format on
@ -338,7 +338,7 @@ ISOLATE_UNIT_TEST_CASE(StreamingFlowGraphBuilder_TypedClosureCall) {
#endif
kMatchAndMoveClosureCall,
kMoveDebugStepChecks,
kMatchReturn,
kMatchDartReturn,
};
RELEASE_ASSERT(cursor.TryMatch(expected));
// clang-format on
@ -366,14 +366,14 @@ ISOLATE_UNIT_TEST_CASE(
auto entry = flow_graph->graph_entry()->normal_entry();
EXPECT(entry != nullptr);
ReturnInstr* return_instr = nullptr;
DartReturnInstr* return_instr = nullptr;
ILMatcher cursor(flow_graph, entry);
RELEASE_ASSERT(cursor.TryMatch({
kMatchAndMoveFunctionEntry,
kMatchAndMoveCheckStackOverflow,
kMoveDebugStepChecks,
{kMatchReturn, &return_instr},
{kMatchDartReturn, &return_instr},
}));
EXPECT(return_instr != nullptr);

View file

@ -4695,8 +4695,9 @@ Fragment FlowGraphBuilder::IntRelationalOp(TokenPosition position,
Fragment FlowGraphBuilder::NativeReturn(
const compiler::ffi::CallbackMarshaller& marshaller) {
auto* instr = new (Z)
NativeReturnInstr(InstructionSource(), Pop(), marshaller, DeoptId::kNone);
const intptr_t num_return_defs = marshaller.NumReturnDefinitions();
ASSERT_EQUAL(num_return_defs, 1);
auto* instr = new (Z) NativeReturnInstr(Pop(), marshaller);
return Fragment(instr).closed();
}

View file

@ -257,7 +257,7 @@ void IRRegExpMacroAssembler::GenerateSuccessBlock() {
PRINT(PushLocal(result_));
// Return true on success.
AppendInstruction(new (Z) ReturnInstr(
AppendInstruction(new (Z) DartReturnInstr(
InstructionSource(), Bind(LoadLocal(result_)), GetNextDeoptId()));
}
@ -266,7 +266,7 @@ void IRRegExpMacroAssembler::GenerateExitBlock() {
TAG();
// Return false on failure.
AppendInstruction(new (Z) ReturnInstr(
AppendInstruction(new (Z) DartReturnInstr(
InstructionSource(), Bind(LoadLocal(result_)), GetNextDeoptId()));
}

View file

@ -67,7 +67,7 @@ void matchIL$testOffset(FlowGraph graph) {
// `pointer2` is not allocated.
...convertInt64AddressToUntagged('pointer2'),
...loadIndexedValueAsInt64('pointer2', 'int 0'),
match.Return('pointer2.value int64'),
match.DartReturn('pointer2.value int64'),
]),
]);
}
@ -118,7 +118,7 @@ void matchIL$testAllocate(FlowGraph graph) {
match.AllocateObject(),
match.StoreStaticField(match.any),
...loadIndexedValueAsInt64('pointer2', 'int 0'),
match.Return('pointer2.value int64'),
match.DartReturn('pointer2.value int64'),
]),
]);
}
@ -195,7 +195,7 @@ void matchIL$testHoist(FlowGraph graph) {
]),
'B3' <<
match.block('Target', [
match.Return(match.any),
match.DartReturn(match.any),
]),
]);
}