[VM] Introduce function and osr entrypoints to the VM's IR

Similar to how we treat catch entry instructions, this cl adds new
function and osr entry instructions. The [FunctionEntry] and
[OsrEntry] - just like [CatchBlockEntry] - have now their own initial
definitions. The [GraphEntry] has only initial definitions for
constants.

Explicit phis are inserted for all parameter / special parameter
instructions if necessary.

Future work is:

  a) Minimize parallel moves due to the phis on parameters
  b) Cleanup frame setup: Move it entirely into FunctionEntry/CatchEntry
    (instead of the split version we have now)

Fixes https://github.com/dart-lang/sdk/issues/34435
Fixes https://github.com/dart-lang/sdk/issues/34287

Change-Id: Iefa0280a709716f748d6fb0523b8d0f4d8de1fec
Reviewed-on: https://dart-review.googlesource.com/c/74782
Commit-Queue: Martin Kustermann <kustermann@google.com>
Reviewed-by: Vyacheslav Egorov <vegorov@google.com>
This commit is contained in:
Martin Kustermann 2018-10-15 13:02:51 +00:00 committed by commit-bot@chromium.org
parent 22b11717ac
commit 06f9a9e354
38 changed files with 1061 additions and 539 deletions

View file

@ -1,7 +1,6 @@
// Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
// VMOptions=--no-sync-async
import 'dart:developer';
import 'package:observatory/service_io.dart';
@ -12,10 +11,10 @@ import 'test_helper.dart';
const alwaysInline = "AlwaysInline";
const noInline = "NeverInline";
int LINE_A = 35;
int LINE_B = 40;
int LINE_C = 43;
int LINE_D = 47;
int LINE_A = 34;
int LINE_B = 39;
int LINE_C = 42;
int LINE_D = 46;
int global = 0;
@ -60,20 +59,20 @@ var tests = <IsolateTest>[
} on ServerRpcException catch (e) {
caughtException = true;
expect(e.code, equals(ServerRpcException.kCannotResume));
expect(e.message, 'Frame must be in bounds [1..9]: saw 0');
expect(e.message, 'Frame must be in bounds [1..12]: saw 0');
}
expect(caughtException, isTrue);
},
(Isolate isolate) async {
// We are not able to rewind frame 10.
// We are not able to rewind frame 13.
bool caughtException;
try {
await isolate.rewind(10);
await isolate.rewind(13);
expect(false, isTrue, reason: 'Unreachable');
} on ServerRpcException catch (e) {
caughtException = true;
expect(e.code, equals(ServerRpcException.kCannotResume));
expect(e.message, 'Frame must be in bounds [1..9]: saw 10');
expect(e.message, 'Frame must be in bounds [1..12]: saw 13');
}
expect(caughtException, isTrue);
},

View file

@ -104,7 +104,7 @@ step_through_getter_test: RuntimeError # Debugging StringConcatenation doesn't w
*: SkipByDesign
[ $compiler == dartk || $compiler == dartkp ]
rewind_test: Pass, RuntimeError, Slow # Issue 34287
rewind_test: Pass, Slow
# Skip all service tests because random reloads interfere.
[ $hot_reload || $hot_reload_rollback ]

View file

@ -51,6 +51,9 @@ dart/simd128float32_test: Skip # compilers not aware of Simd128
dart/truncating_ints_test: SkipByDesign # The test requires int64.
dart/wrap_around_in_range_analysis_test: SkipByDesign # The test requires int64.
[ $compiler != dartk || ($arch != x64 && $arch != simarm && $arch != arm) || $hot_reload || $hot_reload_rollback ]
dart/entrypoints/*: Skip # Only supported in Dart 2 JIT (hot-reload -> issue 34199).
[ ($compiler == dartk || $compiler == dartkb) ]
cc/DartAPI_New: Fail # Issue #33041
dart/redirection_type_shuffling_test/00: RuntimeError, Pass

View file

@ -71,10 +71,15 @@ void BlockScheduler::AssignEdgeWeights() const {
Array& edge_counters = Array::Handle();
edge_counters ^= ic_data_array.At(0);
intptr_t entry_count = GetEdgeCount(
edge_counters,
flow_graph()->graph_entry()->normal_entry()->preorder_number());
flow_graph()->graph_entry()->set_entry_count(entry_count);
auto graph_entry = flow_graph()->graph_entry();
BlockEntryInstr* entry = graph_entry->normal_entry();
if (entry == nullptr) {
entry = graph_entry->osr_entry();
ASSERT(entry != nullptr);
}
const intptr_t entry_count =
GetEdgeCount(edge_counters, entry->preorder_number());
graph_entry->set_entry_count(entry_count);
for (BlockIterator it = flow_graph()->reverse_postorder_iterator();
!it.Done(); it.Advance()) {

View file

@ -61,7 +61,7 @@ bool BranchSimplifier::Match(JoinEntryInstr* block) {
}
JoinEntryInstr* BranchSimplifier::ToJoinEntry(Zone* zone,
TargetEntryInstr* target) {
BlockEntryInstr* target) {
// Convert a target block into a join block. Branches will be duplicated
// so the former true and false targets become joins of the control flows
// from all the duplicated branches.
@ -74,6 +74,17 @@ JoinEntryInstr* BranchSimplifier::ToJoinEntry(Zone* zone,
return join;
}
TargetEntryInstr* BranchSimplifier::ToTargetEntry(Zone* zone,
BlockEntryInstr* target) {
auto replacement = new (zone)
TargetEntryInstr(target->block_id(), target->try_index(), DeoptId::kNone);
replacement->InheritDeoptTarget(zone, target);
replacement->LinkTo(target->next());
replacement->set_last_instruction(target->last_instruction());
target->UnuseAllInputs();
return replacement;
}
BranchInstr* BranchSimplifier::CloneBranch(Zone* zone,
BranchInstr* branch,
Value* new_left,

View file

@ -9,7 +9,9 @@
namespace dart {
class BlockEntryInstr;
class FlowGraph;
class FunctionEntryInstr;
class JoinEntryInstr;
class Zone;
class TargetEntryInstr;
@ -23,10 +25,15 @@ class BranchSimplifier : public AllStatic {
public:
static void Simplify(FlowGraph* flow_graph);
// Replace a target entry instruction with a join entry instruction. Does
// Replace a block entry instruction with a join entry instruction. Does
// not update the original target's predecessors to point to the new block
// and does not replace the target in already computed block order lists.
static JoinEntryInstr* ToJoinEntry(Zone* zone, TargetEntryInstr* target);
static JoinEntryInstr* ToJoinEntry(Zone* zone, BlockEntryInstr* target);
// Replace a block entry instruction with a target entry instruction. Does
// not update the original target's predecessors to point to the new block and
// does not replace the target in already computed block order lists.
static TargetEntryInstr* ToTargetEntry(Zone* zone, BlockEntryInstr* target);
private:
// Match an instance of the pattern to rewrite. See the implementation

View file

@ -121,9 +121,8 @@ void ConstantPropagator::Join(Object* left, const Object& right) {
// Analysis of blocks. Called at most once per block. The block is already
// marked as reachable. All instructions in the block are analyzed.
void ConstantPropagator::VisitGraphEntry(GraphEntryInstr* block) {
const GrowableArray<Definition*>& defs = *block->initial_definitions();
for (intptr_t i = 0; i < defs.length(); ++i) {
defs[i]->Accept(this);
for (auto def : *block->initial_definitions()) {
def->Accept(this);
}
ASSERT(ForwardInstructionIterator(block).Done());
@ -134,6 +133,33 @@ void ConstantPropagator::VisitGraphEntry(GraphEntryInstr* block) {
}
}
void ConstantPropagator::VisitFunctionEntry(FunctionEntryInstr* block) {
for (auto def : *block->initial_definitions()) {
def->Accept(this);
}
for (ForwardInstructionIterator it(block); !it.Done(); it.Advance()) {
it.Current()->Accept(this);
}
}
void ConstantPropagator::VisitOsrEntry(OsrEntryInstr* block) {
for (auto def : *block->initial_definitions()) {
def->Accept(this);
}
for (ForwardInstructionIterator it(block); !it.Done(); it.Advance()) {
it.Current()->Accept(this);
}
}
void ConstantPropagator::VisitCatchBlockEntry(CatchBlockEntryInstr* block) {
for (auto def : *block->initial_definitions()) {
def->Accept(this);
}
for (ForwardInstructionIterator it(block); !it.Done(); it.Advance()) {
it.Current()->Accept(this);
}
}
void ConstantPropagator::VisitJoinEntry(JoinEntryInstr* block) {
// Phis are visited when visiting Goto at a predecessor. See VisitGoto.
for (ForwardInstructionIterator it(block); !it.Done(); it.Advance()) {
@ -153,16 +179,6 @@ void ConstantPropagator::VisitIndirectEntry(IndirectEntryInstr* block) {
}
}
void ConstantPropagator::VisitCatchBlockEntry(CatchBlockEntryInstr* block) {
const GrowableArray<Definition*>& defs = *block->initial_definitions();
for (intptr_t i = 0; i < defs.length(); ++i) {
defs[i]->Accept(this);
}
for (ForwardInstructionIterator it(block); !it.Done(); it.Advance()) {
it.Current()->Accept(this);
}
}
void ConstantPropagator::VisitParallelMove(ParallelMoveInstr* instr) {
// Parallel moves have not yet been inserted in the graph.
UNREACHABLE();

View file

@ -29,6 +29,9 @@ DEFINE_FLAG(bool, trace_smi_widening, false, "Trace Smi->Int32 widening pass.");
DEFINE_FLAG(bool, prune_dead_locals, true, "optimize dead locals away");
DECLARE_FLAG(bool, verify_compiler);
// Quick access to the current zone.
#define Z (zone())
FlowGraph::FlowGraph(const ParsedFunction& parsed_function,
GraphEntryInstr* graph_entry,
intptr_t max_block_id,
@ -136,20 +139,23 @@ ConstantInstr* FlowGraph::GetConstant(const Object& object) {
constant =
new (zone()) ConstantInstr(Object::ZoneHandle(zone(), object.raw()));
constant->set_ssa_temp_index(alloc_ssa_temp_index());
AddToInitialDefinitions(constant);
AddToGraphInitialDefinitions(constant);
constant_instr_pool_.Insert(constant);
}
return constant;
}
void FlowGraph::AddToInitialDefinitions(Definition* defn) {
// TODO(zerny): Set previous to the graph entry so it is accessible by
// GetBlock. Remove this once there is a direct pointer to the block.
void FlowGraph::AddToGraphInitialDefinitions(Definition* defn) {
defn->set_previous(graph_entry_);
graph_entry_->initial_definitions()->Add(defn);
}
void FlowGraph::AddToInitialDefinitions(BlockEntryWithInitialDefs* entry,
Definition* defn) {
defn->set_previous(entry);
entry->initial_definitions()->Add(defn);
}
void FlowGraph::InsertBefore(Instruction* next,
Instruction* instr,
Environment* env,
@ -836,6 +842,27 @@ void VariableLivenessAnalysis::ComputeInitialSets() {
continue;
}
}
// For blocks with parameter or special parameter instructions we add them
// to the kill set.
const bool is_function_entry = block->IsFunctionEntry();
const bool is_osr_entry = block->IsOsrEntry();
if (is_function_entry || is_osr_entry || block->IsCatchBlockEntry()) {
const intptr_t parameter_count =
is_osr_entry ? flow_graph_->variable_count()
: flow_graph_->num_direct_parameters();
for (intptr_t i = 0; i < parameter_count; ++i) {
live_in->Remove(i);
kill->Add(i);
}
}
if (is_function_entry) {
if (flow_graph_->parsed_function().has_arg_desc_var()) {
const auto index = flow_graph_->ArgumentDescriptorEnvIndex();
live_in->Remove(index);
kill->Add(index);
}
}
}
}
@ -845,6 +872,8 @@ void FlowGraph::ComputeSSA(
ASSERT((next_virtual_register_number == 0) || (inlining_parameters != NULL));
current_ssa_temp_index_ = next_virtual_register_number;
GrowableArray<BitVector*> dominance_frontier;
GrowableArray<intptr_t> idom;
ComputeDominators(&dominance_frontier);
VariableLivenessAnalysis variable_liveness(this);
@ -1048,6 +1077,39 @@ void FlowGraph::Rename(GrowableArray<PhiInstr*>* live_phis,
constant_null_ = GetConstant(Object::ZoneHandle());
constant_dead_ = GetConstant(Symbols::OptimizedOut());
const intptr_t parameter_count =
IsCompiledForOsr() ? variable_count() : num_direct_parameters_;
// Initial renaming environment.
GrowableArray<Definition*> env(parameter_count + num_stack_locals());
env.FillWith(constant_dead(), 0, parameter_count);
if (!IsCompiledForOsr()) {
env.FillWith(constant_null(), parameter_count, num_stack_locals());
}
if (entry->catch_entries().length() > 0) {
// Functions with try-catch have a fixed area of stack slots reserved
// so that all local variables are stored at a known location when
// on entry to the catch.
entry->set_fixed_slot_count(num_stack_locals());
} else {
ASSERT(entry->unchecked_entry() != nullptr ? entry->SuccessorCount() == 2
: entry->SuccessorCount() == 1);
}
RenameRecursive(entry, &env, live_phis, variable_liveness,
inlining_parameters);
}
void FlowGraph::PopulateEnvironmentFromFunctionEntry(
FunctionEntryInstr* function_entry,
GrowableArray<Definition*>* env,
GrowableArray<PhiInstr*>* live_phis,
VariableLivenessAnalysis* variable_liveness,
ZoneGrowableArray<Definition*>* inlining_parameters) {
ASSERT(!IsCompiledForOsr());
const intptr_t parameter_count = num_direct_parameters_;
// Check if inlining_parameters include a type argument vector parameter.
const intptr_t inlined_type_args_param =
(FLAG_reify_generic_functions && (inlining_parameters != NULL) &&
@ -1055,25 +1117,11 @@ void FlowGraph::Rename(GrowableArray<PhiInstr*>* live_phis,
? 1
: 0;
// Initial renaming environment.
GrowableArray<Definition*> env(variable_count());
{
const intptr_t parameter_count =
IsCompiledForOsr() ? variable_count() : num_direct_parameters_;
for (intptr_t i = 0; i < parameter_count; i++) {
ParameterInstr* param = new (zone()) ParameterInstr(i, entry);
param->set_ssa_temp_index(alloc_ssa_temp_index());
AddToInitialDefinitions(param);
env.Add(param);
}
ASSERT(env.length() == parameter_count);
// Fill in all local variables with `null` (for osr the stack locals have
// already been been handled above).
if (!IsCompiledForOsr()) {
ASSERT(env.length() == num_direct_parameters_);
env.FillWith(constant_null(), num_direct_parameters_, num_stack_locals());
}
for (intptr_t i = 0; i < parameter_count; i++) {
ParameterInstr* param = new (zone()) ParameterInstr(i, function_entry);
param->set_ssa_temp_index(alloc_ssa_temp_index());
AddToInitialDefinitions(function_entry, param);
(*env)[i] = param;
}
// Override the entries in the renaming environment which are special (i.e.
@ -1084,55 +1132,90 @@ void FlowGraph::Rename(GrowableArray<PhiInstr*>* live_phis,
for (intptr_t i = 0; i < function().NumParameters(); ++i) {
Definition* defn = (*inlining_parameters)[inlined_type_args_param + i];
AllocateSSAIndexes(defn);
AddToInitialDefinitions(defn);
AddToInitialDefinitions(function_entry, defn);
intptr_t index = EnvIndex(parsed_function_.RawParameterVariable(i));
env[index] = defn;
(*env)[index] = defn;
}
}
if (!IsCompiledForOsr()) {
const bool reify_generic_argument =
function().IsGeneric() && FLAG_reify_generic_functions;
// Replace the type arguments slot with a special parameter.
const bool reify_generic_argument =
function().IsGeneric() && FLAG_reify_generic_functions;
if (reify_generic_argument) {
ASSERT(parsed_function().function_type_arguments() != NULL);
// Replace the type arguments slot with a special parameter.
if (reify_generic_argument) {
ASSERT(parsed_function().function_type_arguments() != NULL);
Definition* defn;
if (inlining_parameters == NULL) {
// Note: If we are not inlining, then the prologue builder will
// take care of checking that we got the correct reified type
// arguments. This includes checking the argument descriptor in order
// to even find out if the parameter was passed or not.
defn = constant_dead();
} else {
defn = (*inlining_parameters)[0];
}
AllocateSSAIndexes(defn);
AddToInitialDefinitions(defn);
env[RawTypeArgumentEnvIndex()] = defn;
Definition* defn;
if (inlining_parameters == NULL) {
// Note: If we are not inlining, then the prologue builder will
// take care of checking that we got the correct reified type
// arguments. This includes checking the argument descriptor in order
// to even find out if the parameter was passed or not.
defn = constant_dead();
} else {
defn = (*inlining_parameters)[0];
}
AllocateSSAIndexes(defn);
AddToInitialDefinitions(function_entry, defn);
(*env)[RawTypeArgumentEnvIndex()] = defn;
}
// Replace the argument descriptor slot with a special parameter.
if (parsed_function().has_arg_desc_var()) {
Definition* defn =
new SpecialParameterInstr(SpecialParameterInstr::kArgDescriptor,
DeoptId::kNone, graph_entry_);
AllocateSSAIndexes(defn);
AddToInitialDefinitions(defn);
env[ArgumentDescriptorEnvIndex()] = defn;
}
// Replace the argument descriptor slot with a special parameter.
if (parsed_function().has_arg_desc_var()) {
Definition* defn =
new (Z) SpecialParameterInstr(SpecialParameterInstr::kArgDescriptor,
DeoptId::kNone, function_entry);
AllocateSSAIndexes(defn);
AddToInitialDefinitions(function_entry, defn);
(*env)[ArgumentDescriptorEnvIndex()] = defn;
}
}
}
if (entry->SuccessorCount() > 1) {
// Functions with try-catch have a fixed area of stack slots reserved
// so that all local variables are stored at a known location when
// on entry to the catch.
entry->set_fixed_slot_count(num_stack_locals());
void FlowGraph::PopulateEnvironmentFromOsrEntry(
OsrEntryInstr* osr_entry,
GrowableArray<Definition*>* env) {
ASSERT(IsCompiledForOsr());
const intptr_t parameter_count = variable_count();
for (intptr_t i = 0; i < parameter_count; i++) {
ParameterInstr* param = new (zone()) ParameterInstr(i, osr_entry);
param->set_ssa_temp_index(alloc_ssa_temp_index());
AddToInitialDefinitions(osr_entry, param);
(*env)[i] = param;
}
}
void FlowGraph::PopulateEnvironmentFromCatchEntry(
CatchBlockEntryInstr* catch_entry,
GrowableArray<Definition*>* env) {
const intptr_t raw_exception_var_envindex =
catch_entry->raw_exception_var() != nullptr
? EnvIndex(catch_entry->raw_exception_var())
: -1;
const intptr_t raw_stacktrace_var_envindex =
catch_entry->raw_stacktrace_var() != nullptr
? EnvIndex(catch_entry->raw_stacktrace_var())
: -1;
// Add real definitions for all locals and parameters.
for (intptr_t i = 0; i < variable_count(); ++i) {
// Replace usages of the raw exception/stacktrace variables with
// [SpecialParameterInstr]s.
Definition* param = nullptr;
if (raw_exception_var_envindex == i) {
param = new (Z) SpecialParameterInstr(SpecialParameterInstr::kException,
DeoptId::kNone, catch_entry);
} else if (raw_stacktrace_var_envindex == i) {
param = new (Z) SpecialParameterInstr(SpecialParameterInstr::kStackTrace,
DeoptId::kNone, catch_entry);
} else {
param = new (Z) ParameterInstr(i, catch_entry);
}
param->set_ssa_temp_index(alloc_ssa_temp_index()); // New SSA temp.
(*env)[i] = param;
catch_entry->initial_definitions()->Add(param);
}
RenameRecursive(entry, &env, live_phis, variable_liveness);
}
void FlowGraph::AttachEnvironment(Instruction* instr,
@ -1153,13 +1236,14 @@ void FlowGraph::AttachEnvironment(Instruction* instr,
}
}
void FlowGraph::RenameRecursive(BlockEntryInstr* block_entry,
GrowableArray<Definition*>* env,
GrowableArray<PhiInstr*>* live_phis,
VariableLivenessAnalysis* variable_liveness) {
void FlowGraph::RenameRecursive(
BlockEntryInstr* block_entry,
GrowableArray<Definition*>* env,
GrowableArray<PhiInstr*>* live_phis,
VariableLivenessAnalysis* variable_liveness,
ZoneGrowableArray<Definition*>* inlining_parameters) {
// 1. Process phis first.
if (block_entry->IsJoinEntry()) {
JoinEntryInstr* join = block_entry->AsJoinEntry();
if (auto join = block_entry->AsJoinEntry()) {
if (join->phis() != NULL) {
for (intptr_t i = 0; i < join->phis()->length(); ++i) {
PhiInstr* phi = (*join->phis())[i];
@ -1178,49 +1262,31 @@ void FlowGraph::RenameRecursive(BlockEntryInstr* block_entry,
}
}
}
} else if (CatchBlockEntryInstr* catch_entry =
block_entry->AsCatchBlockEntry()) {
const intptr_t raw_exception_var_envindex =
catch_entry->raw_exception_var() != nullptr
? EnvIndex(catch_entry->raw_exception_var())
: -1;
const intptr_t raw_stacktrace_var_envindex =
catch_entry->raw_stacktrace_var() != nullptr
? EnvIndex(catch_entry->raw_stacktrace_var())
: -1;
} else if (auto osr_entry = block_entry->AsOsrEntry()) {
PopulateEnvironmentFromOsrEntry(osr_entry, env);
} else if (auto function_entry = block_entry->AsFunctionEntry()) {
ASSERT(!IsCompiledForOsr());
PopulateEnvironmentFromFunctionEntry(
function_entry, env, live_phis, variable_liveness, inlining_parameters);
} else if (auto catch_entry = block_entry->AsCatchBlockEntry()) {
PopulateEnvironmentFromCatchEntry(catch_entry, env);
}
// Add real definitions for all locals and parameters.
for (intptr_t i = 0; i < env->length(); ++i) {
// Replace usages of the raw exception/stacktrace variables with
// [SpecialParameterInstr]s.
Definition* param = nullptr;
if (raw_exception_var_envindex == i) {
param = new SpecialParameterInstr(SpecialParameterInstr::kException,
DeoptId::kNone, catch_entry);
} else if (raw_stacktrace_var_envindex == i) {
param = new SpecialParameterInstr(SpecialParameterInstr::kStackTrace,
DeoptId::kNone, catch_entry);
} else {
param = new (zone()) ParameterInstr(i, block_entry);
if (!block_entry->IsGraphEntry() &&
!block_entry->IsBlockEntryWithInitialDefs()) {
// Prune non-live variables at block entry by replacing their environment
// slots with null.
BitVector* live_in = variable_liveness->GetLiveInSet(block_entry);
for (intptr_t i = 0; i < variable_count(); i++) {
// TODO(fschneider): Make sure that live_in always contains the
// CurrentContext variable to avoid the special case here.
if (FLAG_prune_dead_locals && !live_in->Contains(i) &&
(i != CurrentContextEnvIndex())) {
(*env)[i] = constant_dead();
}
param->set_ssa_temp_index(alloc_ssa_temp_index()); // New SSA temp.
(*env)[i] = param;
block_entry->AsCatchBlockEntry()->initial_definitions()->Add(param);
}
}
// Prune non-live variables at block entry by replacing their environment
// slots with null.
BitVector* live_in = variable_liveness->GetLiveInSet(block_entry);
for (intptr_t i = 0; i < variable_count(); i++) {
// TODO(fschneider): Make sure that live_in always contains the
// CurrentContext variable to avoid the special case here.
if (FLAG_prune_dead_locals && !live_in->Contains(i) &&
(i != CurrentContextEnvIndex())) {
(*env)[i] = constant_dead();
}
}
// Attach environment to the block entry.
AttachEnvironment(block_entry, env);
@ -1406,7 +1472,8 @@ void FlowGraph::RenameRecursive(BlockEntryInstr* block_entry,
BlockEntryInstr* block = block_entry->dominated_blocks()[i];
GrowableArray<Definition*> new_env(env->length());
new_env.AddArray(*env);
RenameRecursive(block, &new_env, live_phis, variable_liveness);
RenameRecursive(block, &new_env, live_phis, variable_liveness,
inlining_parameters);
}
// 4. Process successor block. We have edge-split form, so that only blocks
@ -1613,9 +1680,6 @@ intptr_t FlowGraph::InstructionCount() const {
return size;
}
// Quick access to the current zone.
#define Z (zone())
void FlowGraph::ConvertUse(Value* use, Representation from_rep) {
const Representation to_rep =
use->instruction()->RequiredInputRepresentation(use->use_index());
@ -1652,8 +1716,9 @@ void FlowGraph::InsertConversion(Representation from,
if (phi != NULL) {
ASSERT(phi->is_alive());
// For phis conversions have to be inserted in the predecessor.
insert_before =
phi->block()->PredecessorAt(use->use_index())->last_instruction();
auto predecessor = phi->block()->PredecessorAt(use->use_index());
insert_before = predecessor->last_instruction();
ASSERT(insert_before->GetBlock() == predecessor);
deopt_target = NULL;
} else {
deopt_target = insert_before = use->instruction();
@ -1803,8 +1868,8 @@ static void UnboxPhi(PhiInstr* phi) {
}
void FlowGraph::SelectRepresentations() {
// Conservatively unbox all phis that were proven to be of Double,
// Float32x4, or Int32x4 type.
// First we decide for each phi if it is beneficial to unbox it. If so, we
// change it's `phi->representation()`
for (BlockIterator block_it = reverse_postorder_iterator(); !block_it.Done();
block_it.Advance()) {
JoinEntryInstr* join_entry = block_it.Current()->AsJoinEntry();
@ -1816,18 +1881,28 @@ void FlowGraph::SelectRepresentations() {
}
}
// Process all instructions and insert conversions where needed.
// Visit incoming parameters and constants.
// Process all initial definitions and insert conversions when needed (depends
// on phi unboxing decision above).
for (intptr_t i = 0; i < graph_entry()->initial_definitions()->length();
i++) {
InsertConversionsFor((*graph_entry()->initial_definitions())[i]);
}
for (intptr_t i = 0; i < graph_entry()->SuccessorCount(); ++i) {
auto successor = graph_entry()->SuccessorAt(i);
if (auto entry = successor->AsBlockEntryWithInitialDefs()) {
auto& initial_definitions = *entry->initial_definitions();
for (intptr_t j = 0; j < initial_definitions.length(); j++) {
InsertConversionsFor(initial_definitions[j]);
}
}
}
// Process all normal definitions and insert conversions when needed (depends
// on phi unboxing decision above).
for (BlockIterator block_it = reverse_postorder_iterator(); !block_it.Done();
block_it.Advance()) {
BlockEntryInstr* entry = block_it.Current();
JoinEntryInstr* join_entry = entry->AsJoinEntry();
if (join_entry != NULL) {
if (JoinEntryInstr* join_entry = entry->AsJoinEntry()) {
for (PhiIterator it(join_entry); !it.Done(); it.Advance()) {
PhiInstr* phi = it.Current();
ASSERT(phi != NULL);
@ -1835,13 +1910,6 @@ void FlowGraph::SelectRepresentations() {
InsertConversionsFor(phi);
}
}
CatchBlockEntryInstr* catch_entry = entry->AsCatchBlockEntry();
if (catch_entry != NULL) {
for (intptr_t i = 0; i < catch_entry->initial_definitions()->length();
i++) {
InsertConversionsFor((*catch_entry->initial_definitions())[i]);
}
}
for (ForwardInstructionIterator it(entry); !it.Done(); it.Advance()) {
Definition* def = it.Current()->AsDefinition();
if (def != NULL) {

View file

@ -150,10 +150,6 @@ class FlowGraph : public ZoneAllocated {
return num_direct_parameters_ - variable->index().value();
}
bool IsEntryPoint(BlockEntryInstr* target) const {
return graph_entry()->IsEntryPoint(target);
}
// Flow graph orders.
const GrowableArray<BlockEntryInstr*>& preorder() const { return preorder_; }
const GrowableArray<BlockEntryInstr*>& postorder() const {
@ -230,7 +226,9 @@ class FlowGraph : public ZoneAllocated {
intptr_t InstructionCount() const;
ConstantInstr* GetConstant(const Object& object);
void AddToInitialDefinitions(Definition* defn);
void AddToGraphInitialDefinitions(Definition* defn);
void AddToInitialDefinitions(BlockEntryWithInitialDefs* entry,
Definition* defn);
enum UseKind { kEffect, kValue };
@ -428,7 +426,21 @@ class FlowGraph : public ZoneAllocated {
void RenameRecursive(BlockEntryInstr* block_entry,
GrowableArray<Definition*>* env,
GrowableArray<PhiInstr*>* live_phis,
VariableLivenessAnalysis* variable_liveness);
VariableLivenessAnalysis* variable_liveness,
ZoneGrowableArray<Definition*>* inlining_parameters);
void PopulateEnvironmentFromFunctionEntry(
FunctionEntryInstr* function_entry,
GrowableArray<Definition*>* env,
GrowableArray<PhiInstr*>* live_phis,
VariableLivenessAnalysis* variable_liveness,
ZoneGrowableArray<Definition*>* inlining_parameters);
void PopulateEnvironmentFromOsrEntry(OsrEntryInstr* osr_entry,
GrowableArray<Definition*>* env);
void PopulateEnvironmentFromCatchEntry(CatchBlockEntryInstr* catch_entry,
GrowableArray<Definition*>* env);
void AttachEnvironment(Instruction* instr, GrowableArray<Definition*>* env);

View file

@ -274,7 +274,8 @@ bool FlowGraphCompiler::IsEmptyBlock(BlockEntryInstr* block) const {
return !block->IsCatchBlockEntry() && !block->HasNonRedundantParallelMove() &&
block->next()->IsGoto() &&
!block->next()->AsGoto()->HasNonRedundantParallelMove() &&
!block->IsIndirectEntry() && !flow_graph().IsEntryPoint(block);
!block->IsIndirectEntry() && !block->IsFunctionEntry() &&
!block->IsOsrEntry();
}
void FlowGraphCompiler::CompactBlock(BlockEntryInstr* block) {
@ -324,10 +325,14 @@ void FlowGraphCompiler::CompactBlocks() {
}
intptr_t FlowGraphCompiler::UncheckedEntryOffset() const {
TargetEntryInstr* entry = flow_graph().graph_entry()->unchecked_entry();
BlockEntryInstr* entry = flow_graph().graph_entry()->unchecked_entry();
if (entry == nullptr) {
entry = flow_graph().graph_entry()->normal_entry();
}
if (entry == nullptr) {
entry = flow_graph().graph_entry()->osr_entry();
}
ASSERT(entry != nullptr);
Label* target = GetJumpLabel(entry);
if (target->IsBound()) {
@ -1331,11 +1336,11 @@ void FlowGraphCompiler::EmitComment(Instruction* instr) {
#if !defined(TARGET_ARCH_DBC)
// TODO(vegorov) enable edge-counters on DBC if we consider them beneficial.
bool FlowGraphCompiler::NeedsEdgeCounter(TargetEntryInstr* block) {
bool FlowGraphCompiler::NeedsEdgeCounter(BlockEntryInstr* block) {
// Only emit an edge counter if there is not goto at the end of the block,
// except for the entry block.
return FLAG_reorder_basic_blocks && (!block->last_instruction()->IsGoto() ||
flow_graph().IsEntryPoint(block));
return FLAG_reorder_basic_blocks &&
(!block->last_instruction()->IsGoto() || block->IsFunctionEntry());
}
// Allocate a register that is not explictly blocked.

View file

@ -589,7 +589,7 @@ class FlowGraphCompiler : public ValueObject {
TokenPosition token_pos,
intptr_t deopt_id);
bool NeedsEdgeCounter(TargetEntryInstr* block);
bool NeedsEdgeCounter(BlockEntryInstr* block);
void EmitEdgeCounter(intptr_t edge_id);
#endif // !defined(TARGET_ARCH_DBC)

View file

@ -1044,25 +1044,22 @@ const Object& Value::BoundConstant() const {
}
GraphEntryInstr::GraphEntryInstr(const ParsedFunction& parsed_function,
TargetEntryInstr* normal_entry,
intptr_t osr_id)
: BlockEntryInstr(0,
kInvalidTryIndex,
CompilerState::Current().GetNextDeoptId()),
: BlockEntryWithInitialDefs(0,
kInvalidTryIndex,
CompilerState::Current().GetNextDeoptId()),
parsed_function_(parsed_function),
normal_entry_(normal_entry),
catch_entries_(),
indirect_entries_(),
initial_definitions_(),
osr_id_(osr_id),
entry_count_(0),
spill_slot_count_(0),
fixed_slot_count_(0) {}
ConstantInstr* GraphEntryInstr::constant_null() {
ASSERT(initial_definitions_.length() > 0);
for (intptr_t i = 0; i < initial_definitions_.length(); ++i) {
ConstantInstr* defn = initial_definitions_[i]->AsConstant();
ASSERT(initial_definitions()->length() > 0);
for (intptr_t i = 0; i < initial_definitions()->length(); ++i) {
ConstantInstr* defn = (*initial_definitions())[i]->AsConstant();
if (defn != NULL && defn->value().IsNull()) return defn;
}
UNREACHABLE();
@ -1550,10 +1547,21 @@ bool BlockEntryInstr::FindOsrEntryAndRelink(GraphEntryInstr* graph_entry,
// we can simply jump to the beginning of the block.
ASSERT(instr->previous() == this);
GotoInstr* goto_join = new GotoInstr(
AsJoinEntry(), CompilerState::Current().GetNextDeoptId());
auto normal_entry = graph_entry->normal_entry();
auto osr_entry = new OsrEntryInstr(graph_entry, normal_entry->block_id(),
normal_entry->try_index(),
normal_entry->deopt_id());
auto goto_join = new GotoInstr(AsJoinEntry(),
CompilerState::Current().GetNextDeoptId());
goto_join->CopyDeoptIdFrom(*parent);
graph_entry->normal_entry()->LinkTo(goto_join);
osr_entry->LinkTo(goto_join);
// Remove normal function entries & add osr entry.
graph_entry->set_normal_entry(nullptr);
graph_entry->set_unchecked_entry(nullptr);
graph_entry->set_osr_entry(osr_entry);
return true;
}
}
@ -1734,16 +1742,25 @@ BlockEntryInstr* Instruction::SuccessorAt(intptr_t index) const {
}
intptr_t GraphEntryInstr::SuccessorCount() const {
return 1 + (unchecked_entry() == nullptr ? 0 : 1) + catch_entries_.length();
return (normal_entry() == nullptr ? 0 : 1) +
(unchecked_entry() == nullptr ? 0 : 1) +
(osr_entry() == nullptr ? 0 : 1) + catch_entries_.length();
}
BlockEntryInstr* GraphEntryInstr::SuccessorAt(intptr_t index) const {
if (index == 0) return normal_entry_;
if (unchecked_entry() != nullptr) {
if (index == 1) return unchecked_entry();
return catch_entries_[index - 2];
if (normal_entry() != nullptr) {
if (index == 0) return normal_entry_;
index--;
}
return catch_entries_[index - 1];
if (unchecked_entry() != nullptr) {
if (index == 0) return unchecked_entry();
index--;
}
if (osr_entry() != nullptr) {
if (index == 0) return osr_entry();
index--;
}
return catch_entries_[index];
}
intptr_t BranchInstr::SuccessorCount() const {
@ -3643,24 +3660,57 @@ LocationSummary* TargetEntryInstr::MakeLocationSummary(Zone* zone,
void TargetEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
__ Bind(compiler->GetJumpLabel(this));
// TODO(kusterman): Remove duplicate between
// {TargetEntryInstr,FunctionEntryInstr}::EmitNativeCode.
if (!compiler->is_optimizing()) {
#if !defined(TARGET_ARCH_DBC)
// TODO(vegorov) re-enable edge counters on DBC if we consider them
// beneficial for the quality of the optimized bytecode.
if (compiler->NeedsEdgeCounter(this)) {
compiler->EmitEdgeCounter(preorder_number());
}
#endif
// The deoptimization descriptor points after the edge counter code for
// uniformity with ARM, where we can reuse pattern matching code that
// matches backwards from the end of the pattern.
compiler->AddCurrentDescriptor(RawPcDescriptors::kDeopt, GetDeoptId(),
TokenPosition::kNoSource);
}
if (HasParallelMove()) {
if (Assembler::EmittingComments()) {
compiler->EmitComment(parallel_move());
}
compiler->parallel_move_resolver()->EmitNativeCode(parallel_move());
}
}
LocationSummary* FunctionEntryInstr::MakeLocationSummary(
Zone* zone,
bool optimizing) const {
UNREACHABLE();
return NULL;
}
void FunctionEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
__ Bind(compiler->GetJumpLabel(this));
// In the AOT compiler we want to reduce code size, so generate no
// fall-through code in [FlowGraphCompiler::CompileGraph()].
// (As opposed to here where we don't check for the return value of
// [Intrinsify]).
if (!FLAG_precompiled_mode) {
#if defined(TARGET_ARCH_X64) || defined(TARGET_ARCH_ARM)
if (compiler->flow_graph().IsEntryPoint(this)) {
// NOTE: Because in JIT X64/ARM mode the graph can have multiple
// entrypoints, so we generate several times the same intrinsification &
// frame setup. That's why we cannot rely on the constant pool being
// `false` when we come in here.
__ set_constant_pool_allowed(false);
// TODO(#34162): Don't emit more code if 'TryIntrinsify' returns 'true'
// (meaning the function was fully intrinsified).
compiler->TryIntrinsify();
compiler->EmitPrologue();
ASSERT(__ constant_pool_allowed());
}
// NOTE: Because in JIT X64/ARM mode the graph can have multiple
// entrypoints, so we generate several times the same intrinsification &
// frame setup. That's why we cannot rely on the constant pool being
// `false` when we come in here.
__ set_constant_pool_allowed(false);
// TODO(#34162): Don't emit more code if 'TryIntrinsify' returns 'true'
// (meaning the function was fully intrinsified).
compiler->TryIntrinsify();
compiler->EmitPrologue();
ASSERT(__ constant_pool_allowed());
#endif
}
@ -3687,6 +3737,35 @@ void TargetEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
}
}
LocationSummary* OsrEntryInstr::MakeLocationSummary(Zone* zone,
bool optimizing) const {
UNREACHABLE();
return NULL;
}
void OsrEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
ASSERT(!FLAG_precompiled_mode);
ASSERT(compiler->is_optimizing());
__ Bind(compiler->GetJumpLabel(this));
#if defined(TARGET_ARCH_X64) || defined(TARGET_ARCH_ARM)
// NOTE: Because in JIT X64/ARM mode the graph can have multiple
// entrypoints, so we generate several times the same intrinsification &
// frame setup. That's why we cannot rely on the constant pool being
// `false` when we come in here.
__ set_constant_pool_allowed(false);
compiler->EmitPrologue();
ASSERT(__ constant_pool_allowed());
#endif
if (HasParallelMove()) {
if (Assembler::EmittingComments()) {
compiler->EmitComment(parallel_move());
}
compiler->parallel_move_resolver()->EmitNativeCode(parallel_move());
}
}
void IndirectGotoInstr::ComputeOffsetTable() {
if (GetBlock()->offset() < 0) {
// Don't generate a table when contained in an unreachable block.

View file

@ -21,6 +21,7 @@ namespace dart {
class BitVector;
class BlockEntryInstr;
class BlockEntryWithInitialDefs;
class BoxIntegerInstr;
class BufferFormatter;
class CallTargets;
@ -484,6 +485,8 @@ struct InstrAttrs {
M(GraphEntry, kNoGC) \
M(JoinEntry, kNoGC) \
M(TargetEntry, kNoGC) \
M(FunctionEntry, kNoGC) \
M(OsrEntry, kNoGC) \
M(IndirectEntry, kNoGC) \
M(CatchBlockEntry, kNoGC) \
M(Phi, kNoGC) \
@ -903,6 +906,8 @@ class Instruction : public ZoneAllocated {
DECLARE_INSTRUCTION_TYPE_CHECK(Name, Name##Instr)
DECLARE_INSTRUCTION_TYPE_CHECK(Definition, Definition)
DECLARE_INSTRUCTION_TYPE_CHECK(BlockEntryWithInitialDefs,
BlockEntryWithInitialDefs)
FOR_EACH_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
@ -1283,6 +1288,7 @@ class BlockEntryInstr : public Instruction {
}
void AddDominatedBlock(BlockEntryInstr* block) {
ASSERT(!block->IsFunctionEntry() || this->IsGraphEntry());
block->set_dominator(this);
dominated_blocks_.Add(block);
}
@ -1476,10 +1482,37 @@ class BackwardInstructionIterator : public ValueObject {
Instruction* current_;
};
class GraphEntryInstr : public BlockEntryInstr {
// Base class shared by all block entries which define initial definitions.
//
// The initial definitions define parameters, special parameters and constants.
class BlockEntryWithInitialDefs : public BlockEntryInstr {
public:
BlockEntryWithInitialDefs(intptr_t block_id,
intptr_t try_index,
intptr_t deopt_id)
: BlockEntryInstr(block_id, try_index, deopt_id) {}
GrowableArray<Definition*>* initial_definitions() {
return &initial_definitions_;
}
virtual bool IsBlockEntryWithInitialDefs() { return true; }
virtual BlockEntryWithInitialDefs* AsBlockEntryWithInitialDefs() {
return this;
}
protected:
void PrintInitialDefinitionsTo(BufferFormatter* f) const;
private:
GrowableArray<Definition*> initial_definitions_;
DISALLOW_COPY_AND_ASSIGN(BlockEntryWithInitialDefs);
};
class GraphEntryInstr : public BlockEntryWithInitialDefs {
public:
GraphEntryInstr(const ParsedFunction& parsed_function,
TargetEntryInstr* normal_entry,
intptr_t osr_id);
DECLARE_INSTRUCTION(GraphEntry)
@ -1500,9 +1533,6 @@ class GraphEntryInstr : public BlockEntryInstr {
indirect_entries_.Add(entry);
}
GrowableArray<Definition*>* initial_definitions() {
return &initial_definitions_;
}
ConstantInstr* constant_null();
void RelinkToOsrEntry(Zone* zone, intptr_t max_block_id);
@ -1526,12 +1556,14 @@ class GraphEntryInstr : public BlockEntryInstr {
ASSERT(count >= 0);
fixed_slot_count_ = count;
}
TargetEntryInstr* normal_entry() const { return normal_entry_; }
TargetEntryInstr* unchecked_entry() const { return unchecked_entry_; }
void set_normal_entry(TargetEntryInstr* entry) { normal_entry_ = entry; }
void set_unchecked_entry(TargetEntryInstr* target) {
FunctionEntryInstr* normal_entry() const { return normal_entry_; }
FunctionEntryInstr* unchecked_entry() const { return unchecked_entry_; }
void set_normal_entry(FunctionEntryInstr* entry) { normal_entry_ = entry; }
void set_unchecked_entry(FunctionEntryInstr* target) {
unchecked_entry_ = target;
}
OsrEntryInstr* osr_entry() const { return osr_entry_; }
void set_osr_entry(OsrEntryInstr* entry) { osr_entry_ = entry; }
const ParsedFunction& parsed_function() const { return parsed_function_; }
@ -1543,13 +1575,6 @@ class GraphEntryInstr : public BlockEntryInstr {
return indirect_entries_;
}
bool IsEntryPoint(BlockEntryInstr* entry) const {
if (TargetEntryInstr* target = entry->AsTargetEntry()) {
return target == normal_entry_ || target == unchecked_entry_;
}
return false;
}
bool HasSingleEntryPoint() const {
return catch_entries().is_empty() && unchecked_entry() == nullptr;
}
@ -1561,12 +1586,12 @@ class GraphEntryInstr : public BlockEntryInstr {
virtual void AddPredecessor(BlockEntryInstr* predecessor) { UNREACHABLE(); }
const ParsedFunction& parsed_function_;
TargetEntryInstr* normal_entry_;
TargetEntryInstr* unchecked_entry_ = nullptr;
FunctionEntryInstr* normal_entry_ = nullptr;
FunctionEntryInstr* unchecked_entry_ = nullptr;
OsrEntryInstr* osr_entry_ = nullptr;
GrowableArray<CatchBlockEntryInstr*> catch_entries_;
// Indirect targets are blocks reachable only through indirect gotos.
GrowableArray<IndirectEntryInstr*> indirect_entries_;
GrowableArray<Definition*> initial_definitions_;
const intptr_t osr_id_;
intptr_t entry_count_;
intptr_t spill_slot_count_;
@ -1680,6 +1705,88 @@ class TargetEntryInstr : public BlockEntryInstr {
DISALLOW_COPY_AND_ASSIGN(TargetEntryInstr);
};
// Represents an entrypoint to a function which callers can invoke (i.e. not
// used for OSR entries).
//
// The flow graph builder might decide to create create multiple entrypoints
// (e.g. checked/unchecked entrypoints) and will attach those to the
// [GraphEntryInstr].
//
// Every entrypoint has it's own initial definitions. The SSA renaming
// will insert phi's for parameter instructions if necessary.
class FunctionEntryInstr : public BlockEntryWithInitialDefs {
public:
FunctionEntryInstr(GraphEntryInstr* graph_entry,
intptr_t block_id,
intptr_t try_index,
intptr_t deopt_id)
: BlockEntryWithInitialDefs(block_id, try_index, deopt_id),
graph_entry_(graph_entry) {}
DECLARE_INSTRUCTION(FunctionEntry)
virtual intptr_t PredecessorCount() const {
return (graph_entry_ == nullptr) ? 0 : 1;
}
virtual BlockEntryInstr* PredecessorAt(intptr_t index) const {
ASSERT(index == 0 && graph_entry_ != nullptr);
return graph_entry_;
}
GraphEntryInstr* graph_entry() const { return graph_entry_; }
PRINT_TO_SUPPORT
private:
virtual void ClearPredecessors() { graph_entry_ = nullptr; }
virtual void AddPredecessor(BlockEntryInstr* predecessor) {
ASSERT(graph_entry_ == nullptr && predecessor->IsGraphEntry());
graph_entry_ = predecessor->AsGraphEntry();
}
GraphEntryInstr* graph_entry_;
DISALLOW_COPY_AND_ASSIGN(FunctionEntryInstr);
};
// Represents an OSR entrypoint to a function.
//
// The OSR entry has it's own initial definitions.
class OsrEntryInstr : public BlockEntryWithInitialDefs {
public:
OsrEntryInstr(GraphEntryInstr* graph_entry,
intptr_t block_id,
intptr_t try_index,
intptr_t deopt_id)
: BlockEntryWithInitialDefs(block_id, try_index, deopt_id),
graph_entry_(graph_entry) {}
DECLARE_INSTRUCTION(OsrEntry)
virtual intptr_t PredecessorCount() const {
return (graph_entry_ == nullptr) ? 0 : 1;
}
virtual BlockEntryInstr* PredecessorAt(intptr_t index) const {
ASSERT(index == 0 && graph_entry_ != nullptr);
return graph_entry_;
}
GraphEntryInstr* graph_entry() const { return graph_entry_; }
PRINT_TO_SUPPORT
private:
virtual void ClearPredecessors() { graph_entry_ = nullptr; }
virtual void AddPredecessor(BlockEntryInstr* predecessor) {
ASSERT(graph_entry_ == nullptr && predecessor->IsGraphEntry());
graph_entry_ = predecessor->AsGraphEntry();
}
GraphEntryInstr* graph_entry_;
DISALLOW_COPY_AND_ASSIGN(OsrEntryInstr);
};
class IndirectEntryInstr : public JoinEntryInstr {
public:
IndirectEntryInstr(intptr_t block_id,
@ -1699,7 +1806,7 @@ class IndirectEntryInstr : public JoinEntryInstr {
const intptr_t indirect_id_;
};
class CatchBlockEntryInstr : public BlockEntryInstr {
class CatchBlockEntryInstr : public BlockEntryWithInitialDefs {
public:
CatchBlockEntryInstr(TokenPosition handler_token_pos,
bool is_generated,
@ -1714,7 +1821,7 @@ class CatchBlockEntryInstr : public BlockEntryInstr {
const LocalVariable* stacktrace_var,
const LocalVariable* raw_exception_var,
const LocalVariable* raw_stacktrace_var)
: BlockEntryInstr(block_id, try_index, deopt_id),
: BlockEntryWithInitialDefs(block_id, try_index, deopt_id),
graph_entry_(graph_entry),
predecessor_(NULL),
catch_handler_types_(Array::ZoneHandle(handler_types.raw())),
@ -1755,9 +1862,6 @@ class CatchBlockEntryInstr : public BlockEntryInstr {
// Returns try index for the try block to which this catch handler
// corresponds.
intptr_t catch_try_index() const { return catch_try_index_; }
GrowableArray<Definition*>* initial_definitions() {
return &initial_definitions_;
}
PRINT_TO_SUPPORT
@ -2281,7 +2385,7 @@ class LoadIndexedUnsafeInstr : public TemplateDefinition<1, NoThrow> {
DISALLOW_COPY_AND_ASSIGN(LoadIndexedUnsafeInstr);
};
// Unwinds the current frame and taill calls a target.
// Unwinds the current frame and tail calls a target.
//
// The return address saved by the original caller of this frame will be in it's
// usual location (stack or LR). The arguments descriptor supplied by the

View file

@ -6559,8 +6559,11 @@ void StopInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
}
void GraphEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
if (!compiler->CanFallThroughTo(normal_entry())) {
__ b(compiler->GetJumpLabel(normal_entry()));
BlockEntryInstr* entry = normal_entry();
if (entry == nullptr) entry = osr_entry();
if (!compiler->CanFallThroughTo(entry)) {
__ b(compiler->GetJumpLabel(entry));
}
}

View file

@ -5822,8 +5822,11 @@ void StopInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
}
void GraphEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
if (!compiler->CanFallThroughTo(normal_entry())) {
__ b(compiler->GetJumpLabel(normal_entry()));
BlockEntryInstr* entry = normal_entry();
if (entry == nullptr) entry = osr_entry();
if (!compiler->CanFallThroughTo(entry)) {
__ b(compiler->GetJumpLabel(entry));
}
}

View file

@ -1270,8 +1270,11 @@ void DebugStepCheckInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
}
void GraphEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
if (!compiler->CanFallThroughTo(normal_entry())) {
__ Jump(compiler->GetJumpLabel(normal_entry()));
BlockEntryInstr* entry = normal_entry();
if (entry == nullptr) entry = osr_entry();
if (!compiler->CanFallThroughTo(entry)) {
__ Jump(compiler->GetJumpLabel(entry));
}
}

View file

@ -5926,8 +5926,11 @@ void StopInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
}
void GraphEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
if (!compiler->CanFallThroughTo(normal_entry())) {
__ jmp(compiler->GetJumpLabel(normal_entry()));
BlockEntryInstr* entry = normal_entry();
if (entry == nullptr) entry = osr_entry();
if (!compiler->CanFallThroughTo(entry)) {
__ jmp(compiler->GetJumpLabel(entry));
}
}

View file

@ -848,9 +848,9 @@ void InvokeMathCFunctionInstr::PrintOperandsTo(BufferFormatter* f) const {
Definition::PrintOperandsTo(f);
}
void GraphEntryInstr::PrintTo(BufferFormatter* f) const {
void BlockEntryWithInitialDefs::PrintInitialDefinitionsTo(
BufferFormatter* f) const {
const GrowableArray<Definition*>& defns = initial_definitions_;
f->Print("B%" Pd "[graph]:%" Pd, block_id(), GetDeoptId());
if (defns.length() > 0) {
f->Print(" {");
for (intptr_t i = 0; i < defns.length(); ++i) {
@ -862,6 +862,11 @@ void GraphEntryInstr::PrintTo(BufferFormatter* f) const {
}
}
void GraphEntryInstr::PrintTo(BufferFormatter* f) const {
f->Print("B%" Pd "[graph]:%" Pd, block_id(), GetDeoptId());
BlockEntryWithInitialDefs::PrintInitialDefinitionsTo(f);
}
void JoinEntryInstr::PrintTo(BufferFormatter* f) const {
if (try_index() != kInvalidTryIndex) {
f->Print("B%" Pd "[join try_idx %" Pd "]:%" Pd " pred(", block_id(),
@ -912,7 +917,7 @@ void IndirectEntryInstr::PrintTo(BufferFormatter* f) const {
}
}
static const char* RepresentationToCString(Representation rep) {
const char* RepresentationToCString(Representation rep) {
switch (rep) {
case kTagged:
return "tagged";
@ -1019,6 +1024,24 @@ void TargetEntryInstr::PrintTo(BufferFormatter* f) const {
}
}
void OsrEntryInstr::PrintTo(BufferFormatter* f) const {
f->Print("B%" Pd "[osr entry]:%" Pd, block_id(), GetDeoptId());
if (HasParallelMove()) {
f->Print("\n");
parallel_move()->PrintTo(f);
}
BlockEntryWithInitialDefs::PrintInitialDefinitionsTo(f);
}
void FunctionEntryInstr::PrintTo(BufferFormatter* f) const {
f->Print("B%" Pd "[function entry]:%" Pd, block_id(), GetDeoptId());
if (HasParallelMove()) {
f->Print("\n");
parallel_move()->PrintTo(f);
}
BlockEntryWithInitialDefs::PrintInitialDefinitionsTo(f);
}
void CatchBlockEntryInstr::PrintTo(BufferFormatter* f) const {
f->Print("B%" Pd "[target catch try_idx %" Pd " catch_try_idx %" Pd "]",
block_id(), try_index(), catch_try_index());
@ -1027,16 +1050,7 @@ void CatchBlockEntryInstr::PrintTo(BufferFormatter* f) const {
parallel_move()->PrintTo(f);
}
const GrowableArray<Definition*>& defns = initial_definitions_;
if (defns.length() > 0) {
f->Print(" {");
for (intptr_t i = 0; i < defns.length(); ++i) {
Definition* def = defns[i];
f->Print("\n ");
def->PrintTo(f);
}
f->Print("\n}");
}
BlockEntryWithInitialDefs::PrintInitialDefinitionsTo(f);
}
void LoadIndexedUnsafeInstr::PrintOperandsTo(BufferFormatter* f) const {
@ -1063,7 +1077,9 @@ void TailCallInstr::PrintOperandsTo(BufferFormatter* f) const {
.ToFullyQualifiedCString();
}
}
f->Print("%s", name);
f->Print("%s(", name);
InputAt(0)->PrintTo(f);
f->Print(")");
}
void PushArgumentInstr::PrintOperandsTo(BufferFormatter* f) const {

View file

@ -13,6 +13,8 @@ namespace dart {
class ParsedFunction;
const char* RepresentationToCString(Representation rep);
// Graph printing.
class FlowGraphPrinter : public ValueObject {
public:

View file

@ -6130,8 +6130,11 @@ void StopInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
}
void GraphEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
if (!compiler->CanFallThroughTo(normal_entry())) {
__ jmp(compiler->GetJumpLabel(normal_entry()));
BlockEntryInstr* entry = normal_entry();
if (entry == nullptr) entry = osr_entry();
if (!compiler->CanFallThroughTo(entry)) {
__ jmp(compiler->GetJumpLabel(entry));
}
}

View file

@ -561,7 +561,7 @@ static void ReplaceParameterStubs(Zone* zone,
const bool is_polymorphic = call_data->call->IsPolymorphicInstanceCall();
ASSERT(is_polymorphic == (target_info != NULL));
FlowGraph* callee_graph = call_data->callee_graph;
TargetEntryInstr* callee_entry = callee_graph->graph_entry()->normal_entry();
auto callee_entry = callee_graph->graph_entry()->normal_entry();
// Replace each stub with the actual argument or the caller's constant.
// Nulls denote optional parameters for which no actual was given.
@ -602,15 +602,23 @@ static void ReplaceParameterStubs(Zone* zone,
// Replace remaining constants with uses by constants in the caller's
// initial definitions.
GrowableArray<Definition*>* defns =
callee_graph->graph_entry()->initial_definitions();
auto defns = callee_graph->graph_entry()->initial_definitions();
for (intptr_t i = 0; i < defns->length(); ++i) {
ConstantInstr* constant = (*defns)[i]->AsConstant();
if ((constant != NULL) && constant->HasUses()) {
if (constant != NULL && constant->HasUses()) {
constant->ReplaceUsesWith(caller_graph->GetConstant(constant->value()));
}
}
defns = callee_graph->graph_entry()->normal_entry()->initial_definitions();
for (intptr_t i = 0; i < defns->length(); ++i) {
ConstantInstr* constant = (*defns)[i]->AsConstant();
if (constant != NULL && constant->HasUses()) {
constant->ReplaceUsesWith(caller_graph->GetConstant(constant->value()));
}
SpecialParameterInstr* param = (*defns)[i]->AsSpecialParameter();
if ((param != NULL) && param->HasUses()) {
if (param != NULL && param->HasUses()) {
switch (param->kind()) {
case SpecialParameterInstr::kContext: {
ASSERT(!is_polymorphic);
@ -1307,12 +1315,12 @@ class CallSiteInliner : public ValueObject {
void InlineCall(InlinedCallData* call_data) {
FlowGraph* callee_graph = call_data->callee_graph;
TargetEntryInstr* callee_entry =
callee_graph->graph_entry()->normal_entry();
auto callee_function_entry = callee_graph->graph_entry()->normal_entry();
// Plug result in the caller graph.
InlineExitCollector* exit_collector = call_data->exit_collector;
exit_collector->PrepareGraphs(callee_graph);
exit_collector->ReplaceCall(callee_entry);
exit_collector->ReplaceCall(callee_function_entry);
ReplaceParameterStubs(zone(), caller_graph_, call_data, NULL);
@ -1631,8 +1639,9 @@ bool PolymorphicInliner::CheckInlinedDuplicate(const Function& target) {
// variant and the shared join for all later variants.
if (inlined_entries_[i]->IsGraphEntry()) {
// Convert the old target entry to a new join entry.
TargetEntryInstr* old_target =
inlined_entries_[i]->AsGraphEntry()->normal_entry();
auto old_entry = inlined_entries_[i]->AsGraphEntry()->normal_entry();
BlockEntryInstr* old_target = old_entry;
// Unuse all inputs in the old graph entry since it is not part of
// the graph anymore. A new target be created instead.
inlined_entries_[i]->AsGraphEntry()->UnuseAllInputs();
@ -1725,7 +1734,11 @@ static Instruction* AppendInstruction(Instruction* first, Instruction* second) {
bool PolymorphicInliner::TryInlineRecognizedMethod(intptr_t receiver_cid,
const Function& target) {
TargetEntryInstr* entry = nullptr;
auto temp_parsed_function = new (Z) ParsedFunction(Thread::Current(), target);
auto graph_entry =
new (Z) GraphEntryInstr(*temp_parsed_function, Compiler::kNoOSRDeoptId);
FunctionEntryInstr* entry = nullptr;
Instruction* last = nullptr;
// Replace the receiver argument with a redefinition to prevent code from
// the inlined body from being hoisted above the inlined entry.
@ -1738,8 +1751,9 @@ bool PolymorphicInliner::TryInlineRecognizedMethod(intptr_t receiver_cid,
if (FlowGraphInliner::TryInlineRecognizedMethod(
owner_->caller_graph(), receiver_cid, target, call_, redefinition,
call_->instance_call()->token_pos(),
call_->instance_call()->ic_data(), &entry, &last,
call_->instance_call()->ic_data(), graph_entry, &entry, &last,
owner_->inliner_->speculative_policy())) {
graph_entry->set_normal_entry(entry);
ASSERT(last->IsDefinition());
// Create a graph fragment.
redefinition->InsertAfter(entry);
@ -1754,10 +1768,7 @@ bool PolymorphicInliner::TryInlineRecognizedMethod(intptr_t receiver_cid,
FlowGraph::kEffect);
entry->set_last_instruction(result);
exit_collector->AddExit(result);
ParsedFunction* temp_parsed_function =
new ParsedFunction(Thread::Current(), target);
GraphEntryInstr* graph_entry = new (Z)
GraphEntryInstr(*temp_parsed_function, entry, Compiler::kNoOSRDeoptId);
// Update polymorphic inliner state.
inlined_entries_.Add(graph_entry);
exit_collector_->Union(exit_collector);
@ -1823,7 +1834,7 @@ TargetEntryInstr* PolymorphicInliner::BuildDecisionGraph() {
if (callee_entry->IsGraphEntry()) {
// Unshared. Graft the normal entry on after the check class
// instruction.
TargetEntryInstr* target = callee_entry->AsGraphEntry()->normal_entry();
auto target = callee_entry->AsGraphEntry()->normal_entry();
cursor->LinkTo(target->next());
target->ReplaceAsPredecessorWith(current_block);
// Unuse all inputs of the graph entry and the normal entry. They are
@ -1912,10 +1923,21 @@ TargetEntryInstr* PolymorphicInliner::BuildDecisionGraph() {
TargetEntryInstr* true_target = NULL;
if (callee_entry->IsGraphEntry()) {
// Unshared.
true_target = callee_entry->AsGraphEntry()->normal_entry();
auto graph_entry = callee_entry->AsGraphEntry();
auto function_entry = graph_entry->normal_entry();
true_target = BranchSimplifier::ToTargetEntry(zone(), function_entry);
function_entry->ReplaceAsPredecessorWith(true_target);
for (intptr_t j = 0; j < function_entry->dominated_blocks().length();
++j) {
BlockEntryInstr* block = function_entry->dominated_blocks()[j];
true_target->AddDominatedBlock(block);
}
// Unuse all inputs of the graph entry. It is not in the graph anymore.
callee_entry->UnuseAllInputs();
graph_entry->UnuseAllInputs();
} else if (callee_entry->IsTargetEntry()) {
ASSERT(!callee_entry->IsFunctionEntry());
// Shared inlined body and this is the first entry. We have already
// constructed a join and this target jumps to it.
true_target = callee_entry->AsTargetEntry();
@ -2330,7 +2352,8 @@ static bool InlineGetIndexed(FlowGraph* flow_graph,
MethodRecognizer::Kind kind,
Instruction* call,
Definition* receiver,
TargetEntryInstr** entry,
GraphEntryInstr* graph_entry,
FunctionEntryInstr** entry,
Instruction** last,
bool can_speculate) {
intptr_t array_cid = MethodRecognizer::MethodKindToReceiverCid(kind);
@ -2338,8 +2361,8 @@ static bool InlineGetIndexed(FlowGraph* flow_graph,
Definition* array = receiver;
Definition* index = call->ArgumentAt(1);
*entry =
new (Z) TargetEntryInstr(flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
(*entry)->InheritDeoptTarget(Z, call);
Instruction* cursor = *entry;
@ -2380,7 +2403,8 @@ static bool InlineSetIndexed(FlowGraph* flow_graph,
TokenPosition token_pos,
const Cids* value_check,
FlowGraphInliner::ExactnessInfo* exactness,
TargetEntryInstr** entry,
GraphEntryInstr* graph_entry,
FunctionEntryInstr** entry,
Instruction** last) {
intptr_t array_cid = MethodRecognizer::MethodKindToReceiverCid(kind);
@ -2389,8 +2413,8 @@ static bool InlineSetIndexed(FlowGraph* flow_graph,
Definition* stored_value = call->ArgumentAt(2);
*entry =
new (Z) TargetEntryInstr(flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
(*entry)->InheritDeoptTarget(Z, call);
Instruction* cursor = *entry;
if (flow_graph->isolate()->argument_type_checks() &&
@ -2523,7 +2547,8 @@ static bool InlineDoubleOp(FlowGraph* flow_graph,
Token::Kind op_kind,
Instruction* call,
Definition* receiver,
TargetEntryInstr** entry,
GraphEntryInstr* graph_entry,
FunctionEntryInstr** entry,
Instruction** last) {
if (!CanUnboxDouble()) {
return false;
@ -2532,8 +2557,8 @@ static bool InlineDoubleOp(FlowGraph* flow_graph,
Definition* right = call->ArgumentAt(1);
*entry =
new (Z) TargetEntryInstr(flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
(*entry)->InheritDeoptTarget(Z, call);
// Arguments are checked. No need for class check.
BinaryDoubleOpInstr* double_bin_op = new (Z)
@ -2549,15 +2574,16 @@ static bool InlineDoubleTestOp(FlowGraph* flow_graph,
Instruction* call,
Definition* receiver,
MethodRecognizer::Kind kind,
TargetEntryInstr** entry,
GraphEntryInstr* graph_entry,
FunctionEntryInstr** entry,
Instruction** last) {
if (!CanUnboxDouble()) {
return false;
}
*entry =
new (Z) TargetEntryInstr(flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
(*entry)->InheritDeoptTarget(Z, call);
// Arguments are checked. No need for class check.
@ -2572,14 +2598,15 @@ static bool InlineDoubleTestOp(FlowGraph* flow_graph,
static bool InlineSmiBitAndFromSmi(FlowGraph* flow_graph,
Instruction* call,
Definition* receiver,
TargetEntryInstr** entry,
GraphEntryInstr* graph_entry,
FunctionEntryInstr** entry,
Instruction** last) {
Definition* left = receiver;
Definition* right = call->ArgumentAt(1);
*entry =
new (Z) TargetEntryInstr(flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
(*entry)->InheritDeoptTarget(Z, call);
// Right arguments is known to be smi: other._bitAndFromSmi(this);
BinarySmiOpInstr* smi_op =
@ -2596,14 +2623,15 @@ static bool InlineGrowableArraySetter(FlowGraph* flow_graph,
StoreBarrierType store_barrier_type,
Instruction* call,
Definition* receiver,
TargetEntryInstr** entry,
GraphEntryInstr* graph_entry,
FunctionEntryInstr** entry,
Instruction** last) {
Definition* array = receiver;
Definition* value = call->ArgumentAt(1);
*entry =
new (Z) TargetEntryInstr(flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
(*entry)->InheritDeoptTarget(Z, call);
// This is an internal method, no need to check argument types.
@ -2743,7 +2771,8 @@ static bool InlineByteArrayBaseLoad(FlowGraph* flow_graph,
Definition* receiver,
intptr_t array_cid,
intptr_t view_cid,
TargetEntryInstr** entry,
GraphEntryInstr* graph_entry,
FunctionEntryInstr** entry,
Instruction** last) {
ASSERT(array_cid != kIllegalCid);
@ -2761,8 +2790,8 @@ static bool InlineByteArrayBaseLoad(FlowGraph* flow_graph,
Definition* array = receiver;
Definition* index = call->ArgumentAt(1);
*entry =
new (Z) TargetEntryInstr(flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
(*entry)->InheritDeoptTarget(Z, call);
Instruction* cursor = *entry;
@ -2841,7 +2870,8 @@ static bool InlineByteArrayBaseStore(FlowGraph* flow_graph,
Definition* receiver,
intptr_t array_cid,
intptr_t view_cid,
TargetEntryInstr** entry,
GraphEntryInstr* graph_entry,
FunctionEntryInstr** entry,
Instruction** last) {
ASSERT(array_cid != kIllegalCid);
@ -2859,8 +2889,8 @@ static bool InlineByteArrayBaseStore(FlowGraph* flow_graph,
Definition* array = receiver;
Definition* index = call->ArgumentAt(1);
*entry =
new (Z) TargetEntryInstr(flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
(*entry)->InheritDeoptTarget(Z, call);
Instruction* cursor = *entry;
@ -3073,7 +3103,8 @@ static bool InlineStringBaseCharAt(FlowGraph* flow_graph,
Instruction* call,
Definition* receiver,
intptr_t cid,
TargetEntryInstr** entry,
GraphEntryInstr* graph_entry,
FunctionEntryInstr** entry,
Instruction** last) {
if ((cid != kOneByteStringCid) && (cid != kExternalOneByteStringCid)) {
return false;
@ -3082,8 +3113,8 @@ static bool InlineStringBaseCharAt(FlowGraph* flow_graph,
Definition* index = call->ArgumentAt(1);
*entry =
new (Z) TargetEntryInstr(flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
(*entry)->InheritDeoptTarget(Z, call);
*last = PrepareInlineStringIndexOp(flow_graph, call, cid, str, index, *entry);
@ -3101,7 +3132,8 @@ static bool InlineStringCodeUnitAt(FlowGraph* flow_graph,
Instruction* call,
Definition* receiver,
intptr_t cid,
TargetEntryInstr** entry,
GraphEntryInstr* graph_entry,
FunctionEntryInstr** entry,
Instruction** last) {
if (cid == kDynamicCid) {
ASSERT(call->IsStaticCall());
@ -3114,8 +3146,8 @@ static bool InlineStringCodeUnitAt(FlowGraph* flow_graph,
Definition* index = call->ArgumentAt(1);
*entry =
new (Z) TargetEntryInstr(flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
(*entry)->InheritDeoptTarget(Z, call);
*last = PrepareInlineStringIndexOp(flow_graph, call, cid, str, index, *entry);
@ -3133,14 +3165,14 @@ bool FlowGraphInliner::TryReplaceInstanceCallWithInline(
GrowableArray<intptr_t> class_ids;
call->ic_data()->GetCheckAt(0, &class_ids, &target);
const intptr_t receiver_cid = class_ids[0];
TargetEntryInstr* entry = nullptr;
FunctionEntryInstr* entry = nullptr;
Instruction* last = nullptr;
auto exactness = call->ic_data()->GetExactnessAt(0);
ExactnessInfo exactness_info{exactness.IsExact(), false};
if (FlowGraphInliner::TryInlineRecognizedMethod(
flow_graph, receiver_cid, target, call,
call->Receiver()->definition(), call->token_pos(), call->ic_data(),
&entry, &last, policy, &exactness_info)) {
/*graph_entry=*/nullptr, &entry, &last, policy, &exactness_info)) {
// Determine if inlining instance methods needs a check.
FlowGraph::ToCheck check = FlowGraph::ToCheck::kNoCheck;
if (MethodRecognizer::PolymorphicTarget(target)) {
@ -3212,7 +3244,7 @@ bool FlowGraphInliner::TryReplaceStaticCallWithInline(
ForwardInstructionIterator* iterator,
StaticCallInstr* call,
SpeculativeInliningPolicy* policy) {
TargetEntryInstr* entry = nullptr;
FunctionEntryInstr* entry = nullptr;
Instruction* last = nullptr;
Definition* receiver = nullptr;
intptr_t receiver_cid = kIllegalCid;
@ -3222,7 +3254,8 @@ bool FlowGraphInliner::TryReplaceStaticCallWithInline(
}
if (FlowGraphInliner::TryInlineRecognizedMethod(
flow_graph, receiver_cid, call->function(), call, receiver,
call->token_pos(), call->ic_data(), &entry, &last, policy)) {
call->token_pos(), call->ic_data(), /*graph_entry=*/nullptr, &entry,
&last, policy)) {
// Remove the original push arguments.
for (intptr_t i = 0; i < call->ArgumentCount(); ++i) {
PushArgumentInstr* push = call->PushArgumentAt(i);
@ -3292,14 +3325,15 @@ static bool InlineSimdOp(FlowGraph* flow_graph,
Instruction* call,
Definition* receiver,
MethodRecognizer::Kind kind,
TargetEntryInstr** entry,
GraphEntryInstr* graph_entry,
FunctionEntryInstr** entry,
Instruction** last) {
if (!ShouldInlineSimd()) {
return false;
}
*entry =
new (Z) TargetEntryInstr(flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
(*entry)->InheritDeoptTarget(Z, call);
Instruction* cursor = *entry;
switch (kind) {
@ -3361,14 +3395,15 @@ static bool InlineSimdOp(FlowGraph* flow_graph,
static bool InlineMathCFunction(FlowGraph* flow_graph,
Instruction* call,
MethodRecognizer::Kind kind,
TargetEntryInstr** entry,
GraphEntryInstr* graph_entry,
FunctionEntryInstr** entry,
Instruction** last) {
if (!CanUnboxDouble()) {
return false;
}
*entry =
new (Z) TargetEntryInstr(flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
(*entry)->InheritDeoptTarget(Z, call);
Instruction* cursor = *entry;
@ -3408,7 +3443,8 @@ static Instruction* InlineMul(FlowGraph* flow_graph,
static bool InlineMathIntPow(FlowGraph* flow_graph,
Instruction* call,
TargetEntryInstr** entry,
GraphEntryInstr* graph_entry,
FunctionEntryInstr** entry,
Instruction** last) {
// Invoking the _intPow(x, y) implies that both:
// (1) x, y are int
@ -3430,8 +3466,8 @@ static bool InlineMathIntPow(FlowGraph* flow_graph,
} else if (1 < val && val <= small_exponent) {
// Lazily construct entry only in this case.
*entry = new (Z)
TargetEntryInstr(flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
(*entry)->InheritDeoptTarget(Z, call);
Definition* x_def = x->definition();
Definition* square =
@ -3471,7 +3507,8 @@ bool FlowGraphInliner::TryInlineRecognizedMethod(
Definition* receiver,
TokenPosition token_pos,
const ICData* ic_data,
TargetEntryInstr** entry,
GraphEntryInstr* graph_entry,
FunctionEntryInstr** entry,
Instruction** last,
SpeculativeInliningPolicy* policy,
FlowGraphInliner::ExactnessInfo* exactness) {
@ -3491,36 +3528,36 @@ bool FlowGraphInliner::TryInlineRecognizedMethod(
case MethodRecognizer::kExternalUint8ClampedArrayGetIndexed:
case MethodRecognizer::kInt16ArrayGetIndexed:
case MethodRecognizer::kUint16ArrayGetIndexed:
return InlineGetIndexed(flow_graph, kind, call, receiver, entry, last,
can_speculate);
return InlineGetIndexed(flow_graph, kind, call, receiver, graph_entry,
entry, last, can_speculate);
case MethodRecognizer::kFloat32ArrayGetIndexed:
case MethodRecognizer::kFloat64ArrayGetIndexed:
if (!CanUnboxDouble()) {
return false;
}
return InlineGetIndexed(flow_graph, kind, call, receiver, entry, last,
can_speculate);
return InlineGetIndexed(flow_graph, kind, call, receiver, graph_entry,
entry, last, can_speculate);
case MethodRecognizer::kFloat32x4ArrayGetIndexed:
case MethodRecognizer::kFloat64x2ArrayGetIndexed:
if (!ShouldInlineSimd()) {
return false;
}
return InlineGetIndexed(flow_graph, kind, call, receiver, entry, last,
can_speculate);
return InlineGetIndexed(flow_graph, kind, call, receiver, graph_entry,
entry, last, can_speculate);
case MethodRecognizer::kInt32ArrayGetIndexed:
case MethodRecognizer::kUint32ArrayGetIndexed:
if (!CanUnboxInt32()) {
return false;
}
return InlineGetIndexed(flow_graph, kind, call, receiver, entry, last,
can_speculate);
return InlineGetIndexed(flow_graph, kind, call, receiver, graph_entry,
entry, last, can_speculate);
case MethodRecognizer::kInt64ArrayGetIndexed:
case MethodRecognizer::kUint64ArrayGetIndexed:
if (!ShouldInlineInt64ArrayOps()) {
return false;
}
return InlineGetIndexed(flow_graph, kind, call, receiver, entry, last,
can_speculate);
return InlineGetIndexed(flow_graph, kind, call, receiver, graph_entry,
entry, last, can_speculate);
default:
break;
}
@ -3538,7 +3575,7 @@ bool FlowGraphInliner::TryInlineRecognizedMethod(
case MethodRecognizer::kGrowableArraySetIndexedUnchecked:
return InlineSetIndexed(flow_graph, kind, target, call, receiver,
token_pos, /* value_check = */ NULL, exactness,
entry, last);
graph_entry, entry, last);
case MethodRecognizer::kInt8ArraySetIndexed:
case MethodRecognizer::kUint8ArraySetIndexed:
case MethodRecognizer::kUint8ClampedArraySetIndexed:
@ -3553,7 +3590,8 @@ bool FlowGraphInliner::TryInlineRecognizedMethod(
}
Cids* value_check = Cids::CreateMonomorphic(Z, kSmiCid);
return InlineSetIndexed(flow_graph, kind, target, call, receiver,
token_pos, value_check, exactness, entry, last);
token_pos, value_check, exactness, graph_entry,
entry, last);
}
case MethodRecognizer::kInt32ArraySetIndexed:
case MethodRecognizer::kUint32ArraySetIndexed: {
@ -3561,7 +3599,7 @@ bool FlowGraphInliner::TryInlineRecognizedMethod(
// implicitly contain unboxing instructions which check for right type.
return InlineSetIndexed(flow_graph, kind, target, call, receiver,
token_pos, /* value_check = */ NULL, exactness,
entry, last);
graph_entry, entry, last);
}
case MethodRecognizer::kInt64ArraySetIndexed:
case MethodRecognizer::kUint64ArraySetIndexed:
@ -3570,7 +3608,7 @@ bool FlowGraphInliner::TryInlineRecognizedMethod(
}
return InlineSetIndexed(flow_graph, kind, target, call, receiver,
token_pos, /* value_check = */ NULL, exactness,
entry, last);
graph_entry, entry, last);
case MethodRecognizer::kFloat32ArraySetIndexed:
case MethodRecognizer::kFloat64ArraySetIndexed: {
if (!CanUnboxDouble()) {
@ -3578,7 +3616,8 @@ bool FlowGraphInliner::TryInlineRecognizedMethod(
}
Cids* value_check = Cids::CreateMonomorphic(Z, kDoubleCid);
return InlineSetIndexed(flow_graph, kind, target, call, receiver,
token_pos, value_check, exactness, entry, last);
token_pos, value_check, exactness, graph_entry,
entry, last);
}
case MethodRecognizer::kFloat32x4ArraySetIndexed: {
if (!ShouldInlineSimd()) {
@ -3586,7 +3625,8 @@ bool FlowGraphInliner::TryInlineRecognizedMethod(
}
Cids* value_check = Cids::CreateMonomorphic(Z, kFloat32x4Cid);
return InlineSetIndexed(flow_graph, kind, target, call, receiver,
token_pos, value_check, exactness, entry, last);
token_pos, value_check, exactness, graph_entry,
entry, last);
}
case MethodRecognizer::kFloat64x2ArraySetIndexed: {
if (!ShouldInlineSimd()) {
@ -3594,158 +3634,172 @@ bool FlowGraphInliner::TryInlineRecognizedMethod(
}
Cids* value_check = Cids::CreateMonomorphic(Z, kFloat64x2Cid);
return InlineSetIndexed(flow_graph, kind, target, call, receiver,
token_pos, value_check, exactness, entry, last);
token_pos, value_check, exactness, graph_entry,
entry, last);
}
case MethodRecognizer::kByteArrayBaseGetInt8:
return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid,
kTypedDataInt8ArrayCid, entry, last);
kTypedDataInt8ArrayCid, graph_entry, entry,
last);
case MethodRecognizer::kByteArrayBaseGetUint8:
return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid,
kTypedDataUint8ArrayCid, entry, last);
kTypedDataUint8ArrayCid, graph_entry,
entry, last);
case MethodRecognizer::kByteArrayBaseGetInt16:
return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid,
kTypedDataInt16ArrayCid, entry, last);
kTypedDataInt16ArrayCid, graph_entry,
entry, last);
case MethodRecognizer::kByteArrayBaseGetUint16:
return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid,
kTypedDataUint16ArrayCid, entry, last);
kTypedDataUint16ArrayCid, graph_entry,
entry, last);
case MethodRecognizer::kByteArrayBaseGetInt32:
if (!CanUnboxInt32()) {
return false;
}
return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid,
kTypedDataInt32ArrayCid, entry, last);
kTypedDataInt32ArrayCid, graph_entry,
entry, last);
case MethodRecognizer::kByteArrayBaseGetUint32:
if (!CanUnboxInt32()) {
return false;
}
return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid,
kTypedDataUint32ArrayCid, entry, last);
kTypedDataUint32ArrayCid, graph_entry,
entry, last);
case MethodRecognizer::kByteArrayBaseGetInt64:
if (!ShouldInlineInt64ArrayOps()) {
return false;
}
return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid,
kTypedDataInt64ArrayCid, entry, last);
kTypedDataInt64ArrayCid, graph_entry,
entry, last);
case MethodRecognizer::kByteArrayBaseGetUint64:
if (!ShouldInlineInt64ArrayOps()) {
return false;
}
return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid,
kTypedDataUint64ArrayCid, entry, last);
kTypedDataUint64ArrayCid, graph_entry,
entry, last);
case MethodRecognizer::kByteArrayBaseGetFloat32:
if (!CanUnboxDouble()) {
return false;
}
return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid,
kTypedDataFloat32ArrayCid, entry, last);
kTypedDataFloat32ArrayCid, graph_entry,
entry, last);
case MethodRecognizer::kByteArrayBaseGetFloat64:
if (!CanUnboxDouble()) {
return false;
}
return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid,
kTypedDataFloat64ArrayCid, entry, last);
kTypedDataFloat64ArrayCid, graph_entry,
entry, last);
case MethodRecognizer::kByteArrayBaseGetFloat32x4:
if (!ShouldInlineSimd()) {
return false;
}
return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid,
kTypedDataFloat32x4ArrayCid, entry, last);
kTypedDataFloat32x4ArrayCid, graph_entry,
entry, last);
case MethodRecognizer::kByteArrayBaseGetInt32x4:
if (!ShouldInlineSimd()) {
return false;
}
return InlineByteArrayBaseLoad(flow_graph, call, receiver, receiver_cid,
kTypedDataInt32x4ArrayCid, entry, last);
kTypedDataInt32x4ArrayCid, graph_entry,
entry, last);
case MethodRecognizer::kByteArrayBaseSetInt8:
return InlineByteArrayBaseStore(flow_graph, target, call, receiver,
receiver_cid, kTypedDataInt8ArrayCid,
entry, last);
graph_entry, entry, last);
case MethodRecognizer::kByteArrayBaseSetUint8:
return InlineByteArrayBaseStore(flow_graph, target, call, receiver,
receiver_cid, kTypedDataUint8ArrayCid,
entry, last);
graph_entry, entry, last);
case MethodRecognizer::kByteArrayBaseSetInt16:
return InlineByteArrayBaseStore(flow_graph, target, call, receiver,
receiver_cid, kTypedDataInt16ArrayCid,
entry, last);
graph_entry, entry, last);
case MethodRecognizer::kByteArrayBaseSetUint16:
return InlineByteArrayBaseStore(flow_graph, target, call, receiver,
receiver_cid, kTypedDataUint16ArrayCid,
entry, last);
graph_entry, entry, last);
case MethodRecognizer::kByteArrayBaseSetInt32:
return InlineByteArrayBaseStore(flow_graph, target, call, receiver,
receiver_cid, kTypedDataInt32ArrayCid,
entry, last);
graph_entry, entry, last);
case MethodRecognizer::kByteArrayBaseSetUint32:
return InlineByteArrayBaseStore(flow_graph, target, call, receiver,
receiver_cid, kTypedDataUint32ArrayCid,
entry, last);
graph_entry, entry, last);
case MethodRecognizer::kByteArrayBaseSetInt64:
if (!ShouldInlineInt64ArrayOps()) {
return false;
}
return InlineByteArrayBaseStore(flow_graph, target, call, receiver,
receiver_cid, kTypedDataInt64ArrayCid,
entry, last);
graph_entry, entry, last);
case MethodRecognizer::kByteArrayBaseSetUint64:
if (!ShouldInlineInt64ArrayOps()) {
return false;
}
return InlineByteArrayBaseStore(flow_graph, target, call, receiver,
receiver_cid, kTypedDataUint64ArrayCid,
entry, last);
graph_entry, entry, last);
case MethodRecognizer::kByteArrayBaseSetFloat32:
if (!CanUnboxDouble()) {
return false;
}
return InlineByteArrayBaseStore(flow_graph, target, call, receiver,
receiver_cid, kTypedDataFloat32ArrayCid,
entry, last);
graph_entry, entry, last);
case MethodRecognizer::kByteArrayBaseSetFloat64:
if (!CanUnboxDouble()) {
return false;
}
return InlineByteArrayBaseStore(flow_graph, target, call, receiver,
receiver_cid, kTypedDataFloat64ArrayCid,
entry, last);
graph_entry, entry, last);
case MethodRecognizer::kByteArrayBaseSetFloat32x4:
if (!ShouldInlineSimd()) {
return false;
}
return InlineByteArrayBaseStore(flow_graph, target, call, receiver,
receiver_cid, kTypedDataFloat32x4ArrayCid,
entry, last);
graph_entry, entry, last);
case MethodRecognizer::kByteArrayBaseSetInt32x4:
if (!ShouldInlineSimd()) {
return false;
}
return InlineByteArrayBaseStore(flow_graph, target, call, receiver,
receiver_cid, kTypedDataInt32x4ArrayCid,
entry, last);
graph_entry, entry, last);
case MethodRecognizer::kOneByteStringCodeUnitAt:
case MethodRecognizer::kTwoByteStringCodeUnitAt:
case MethodRecognizer::kExternalOneByteStringCodeUnitAt:
case MethodRecognizer::kExternalTwoByteStringCodeUnitAt:
return InlineStringCodeUnitAt(flow_graph, call, receiver, receiver_cid,
entry, last);
graph_entry, entry, last);
case MethodRecognizer::kStringBaseCharAt:
return InlineStringBaseCharAt(flow_graph, call, receiver, receiver_cid,
entry, last);
graph_entry, entry, last);
case MethodRecognizer::kDoubleAdd:
return InlineDoubleOp(flow_graph, Token::kADD, call, receiver, entry,
last);
return InlineDoubleOp(flow_graph, Token::kADD, call, receiver,
graph_entry, entry, last);
case MethodRecognizer::kDoubleSub:
return InlineDoubleOp(flow_graph, Token::kSUB, call, receiver, entry,
last);
return InlineDoubleOp(flow_graph, Token::kSUB, call, receiver,
graph_entry, entry, last);
case MethodRecognizer::kDoubleMul:
return InlineDoubleOp(flow_graph, Token::kMUL, call, receiver, entry,
last);
return InlineDoubleOp(flow_graph, Token::kMUL, call, receiver,
graph_entry, entry, last);
case MethodRecognizer::kDoubleDiv:
return InlineDoubleOp(flow_graph, Token::kDIV, call, receiver, entry,
last);
return InlineDoubleOp(flow_graph, Token::kDIV, call, receiver,
graph_entry, entry, last);
case MethodRecognizer::kDouble_getIsNaN:
case MethodRecognizer::kDouble_getIsInfinite:
return InlineDoubleTestOp(flow_graph, call, receiver, kind, entry, last);
return InlineDoubleTestOp(flow_graph, call, receiver, kind, graph_entry,
entry, last);
case MethodRecognizer::kGrowableArraySetData:
ASSERT((receiver_cid == kGrowableObjectArrayCid) ||
((receiver_cid == kDynamicCid) && call->IsStaticCall()));
@ -3753,7 +3807,7 @@ bool FlowGraphInliner::TryInlineRecognizedMethod(
(ic_data == NULL || ic_data->NumberOfChecksIs(1)));
return InlineGrowableArraySetter(
flow_graph, GrowableObjectArray::data_offset(), kEmitStoreBarrier,
call, receiver, entry, last);
call, receiver, graph_entry, entry, last);
case MethodRecognizer::kGrowableArraySetLength:
ASSERT((receiver_cid == kGrowableObjectArrayCid) ||
((receiver_cid == kDynamicCid) && call->IsStaticCall()));
@ -3761,9 +3815,10 @@ bool FlowGraphInliner::TryInlineRecognizedMethod(
(ic_data == NULL || ic_data->NumberOfChecksIs(1)));
return InlineGrowableArraySetter(
flow_graph, GrowableObjectArray::length_offset(), kNoStoreBarrier,
call, receiver, entry, last);
call, receiver, graph_entry, entry, last);
case MethodRecognizer::kSmi_bitAndFromSmi:
return InlineSmiBitAndFromSmi(flow_graph, call, receiver, entry, last);
return InlineSmiBitAndFromSmi(flow_graph, call, receiver, graph_entry,
entry, last);
case MethodRecognizer::kFloat32x4Abs:
case MethodRecognizer::kFloat32x4Clamp:
@ -3826,7 +3881,8 @@ bool FlowGraphInliner::TryInlineRecognizedMethod(
case MethodRecognizer::kInt32x4ShuffleMix:
case MethodRecognizer::kFloat32x4Shuffle:
case MethodRecognizer::kInt32x4Shuffle:
return InlineSimdOp(flow_graph, call, receiver, kind, entry, last);
return InlineSimdOp(flow_graph, call, receiver, kind, graph_entry, entry,
last);
case MethodRecognizer::kMathSqrt:
case MethodRecognizer::kMathDoublePow:
@ -3837,15 +3893,16 @@ bool FlowGraphInliner::TryInlineRecognizedMethod(
case MethodRecognizer::kMathAcos:
case MethodRecognizer::kMathAtan:
case MethodRecognizer::kMathAtan2:
return InlineMathCFunction(flow_graph, call, kind, entry, last);
return InlineMathCFunction(flow_graph, call, kind, graph_entry, entry,
last);
case MethodRecognizer::kMathIntPow:
return InlineMathIntPow(flow_graph, call, entry, last);
return InlineMathIntPow(flow_graph, call, graph_entry, entry, last);
case MethodRecognizer::kObjectConstructor: {
*entry = new (Z)
TargetEntryInstr(flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
(*entry)->InheritDeoptTarget(Z, call);
ASSERT(!call->HasUses());
*last = NULL; // Empty body.
@ -3862,8 +3919,8 @@ bool FlowGraphInliner::TryInlineRecognizedMethod(
const auto type = new (Z) Value(call->ArgumentAt(0));
const auto num_elements = new (Z) Value(call->ArgumentAt(1));
*entry = new (Z)
TargetEntryInstr(flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
(*entry)->InheritDeoptTarget(Z, call);
*last = new (Z) CreateArrayInstr(call->token_pos(), type, num_elements,
call->deopt_id());
@ -3881,8 +3938,8 @@ bool FlowGraphInliner::TryInlineRecognizedMethod(
if (length >= 0 && length <= Array::kMaxElements) {
Value* type = new (Z) Value(call->ArgumentAt(0));
*entry = new (Z)
TargetEntryInstr(flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
(*entry)->InheritDeoptTarget(Z, call);
*last = new (Z) CreateArrayInstr(call->token_pos(), type,
num_elements, call->deopt_id());
@ -3916,8 +3973,8 @@ bool FlowGraphInliner::TryInlineRecognizedMethod(
if (!type.IsNull()) {
*entry = new (Z)
TargetEntryInstr(flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
(*entry)->InheritDeoptTarget(Z, call);
*last = new (Z) ConstantInstr(type);
flow_graph->AppendTo(
@ -3933,8 +3990,8 @@ bool FlowGraphInliner::TryInlineRecognizedMethod(
// This is an internal method, no need to check argument types nor
// range.
*entry = new (Z)
TargetEntryInstr(flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
call->GetBlock()->try_index(), DeoptId::kNone);
(*entry)->InheritDeoptTarget(Z, call);
Definition* str = call->ArgumentAt(0);
Definition* index = call->ArgumentAt(1);

View file

@ -15,6 +15,8 @@ class Field;
class FlowGraph;
class ForwardInstructionIterator;
class Function;
class FunctionEntryInstr;
class GraphEntryInstr;
class ICData;
class InstanceCallInstr;
class Instruction;
@ -134,7 +136,8 @@ class FlowGraphInliner : ValueObject {
Definition* receiver,
TokenPosition token_pos,
const ICData* ic_data,
TargetEntryInstr** entry,
GraphEntryInstr* graph_entry,
FunctionEntryInstr** entry,
Instruction** last,
SpeculativeInliningPolicy* policy,
ExactnessInfo* exactness = nullptr);

View file

@ -243,26 +243,16 @@ void SSALivenessAnalysis::ComputeInitialSets() {
}
}
}
} else if (block->IsCatchBlockEntry()) {
// Process initial definitions.
CatchBlockEntryInstr* catch_entry = block->AsCatchBlockEntry();
for (intptr_t i = 0; i < catch_entry->initial_definitions()->length();
i++) {
Definition* def = (*catch_entry->initial_definitions())[i];
} else if (auto entry = block->AsBlockEntryWithInitialDefs()) {
// Process initial definitions, i.e. parameters and special parameters.
for (intptr_t i = 0; i < entry->initial_definitions()->length(); i++) {
Definition* def = (*entry->initial_definitions())[i];
const intptr_t vreg = def->ssa_temp_index();
kill_[catch_entry->postorder_number()]->Add(vreg);
live_in_[catch_entry->postorder_number()]->Remove(vreg);
kill_[entry->postorder_number()]->Add(vreg);
live_in_[entry->postorder_number()]->Remove(vreg);
}
}
}
// Process initial definitions, ie, constants and incoming parameters.
for (intptr_t i = 0; i < graph_entry_->initial_definitions()->length(); i++) {
Definition* def = (*graph_entry_->initial_definitions())[i];
const intptr_t vreg = def->ssa_temp_index();
kill_[graph_entry_->postorder_number()]->Add(vreg);
live_in_[graph_entry_->postorder_number()]->Remove(vreg);
}
}
UsePosition* LiveRange::AddUse(intptr_t pos, Location* location_slot) {
@ -584,14 +574,11 @@ void FlowGraphAllocator::BuildLiveRanges() {
}
}
if (block->IsJoinEntry()) {
ConnectIncomingPhiMoves(block->AsJoinEntry());
} else if (block->IsCatchBlockEntry()) {
if (auto join_entry = block->AsJoinEntry()) {
ConnectIncomingPhiMoves(join_entry);
} else if (auto catch_entry = block->AsCatchBlockEntry()) {
// Process initial definitions.
CatchBlockEntryInstr* catch_entry = block->AsCatchBlockEntry();
ProcessEnvironmentUses(catch_entry, catch_entry); // For lazy deopt
for (intptr_t i = 0; i < catch_entry->initial_definitions()->length();
i++) {
Definition* defn = (*catch_entry->initial_definitions())[i];
@ -599,6 +586,17 @@ void FlowGraphAllocator::BuildLiveRanges() {
range->DefineAt(catch_entry->start_pos()); // Defined at block entry.
ProcessInitialDefinition(defn, range, catch_entry);
}
} else if (auto entry = block->AsBlockEntryWithInitialDefs()) {
ASSERT(block->IsFunctionEntry() || block->IsOsrEntry());
auto& initial_definitions = *entry->initial_definitions();
for (intptr_t i = 0; i < initial_definitions.length(); i++) {
Definition* defn = initial_definitions[i];
ASSERT(!defn->HasPairRepresentation());
LiveRange* range = GetLiveRange(defn->ssa_temp_index());
range->AddUseInterval(entry->start_pos(), entry->start_pos() + 2);
range->DefineAt(entry->start_pos());
ProcessInitialDefinition(defn, range, entry);
}
}
}
@ -729,10 +727,12 @@ void FlowGraphAllocator::ProcessInitialDefinition(Definition* defn,
range->set_assigned_location(loc);
if (loc.IsRegister()) {
AssignSafepoints(defn, range);
if (range->End() > kNormalEntryPos) {
SplitInitialDefinitionAt(range, kNormalEntryPos);
if (range->End() > (block->lifetime_position() + 2)) {
SplitInitialDefinitionAt(range, block->lifetime_position() + 2);
}
ConvertAllUses(range);
BlockLocation(loc, block->lifetime_position(),
block->lifetime_position() + 2);
return;
}
} else {
@ -2504,18 +2504,17 @@ MoveOperands* FlowGraphAllocator::AddMoveAt(intptr_t pos,
Location from) {
ASSERT(!IsBlockEntry(pos));
if (pos < kNormalEntryPos) {
ASSERT(pos > 0);
// Parallel moves added to the GraphEntry (B0) will be added at the start
// of the normal entry (B1)
BlockEntryInstr* entry = InstructionAt(kNormalEntryPos)->AsBlockEntry();
return entry->GetParallelMove()->AddMove(to, from);
}
Instruction* instr = InstructionAt(pos);
// Now that the GraphEntry (B0) does no longer have any parameter instructions
// in it so we should not attempt to add parallel moves to it.
ASSERT(pos >= kNormalEntryPos);
ParallelMoveInstr* parallel_move = NULL;
if (IsInstructionStartPosition(pos)) {
Instruction* instr = InstructionAt(pos);
if (auto entry = instr->AsFunctionEntry()) {
// Parallel moves added to the FunctionEntry will be added after the block
// entry.
parallel_move = CreateParallelMoveAfter(entry, pos);
} else if (IsInstructionStartPosition(pos)) {
parallel_move = CreateParallelMoveBefore(instr, pos);
} else {
parallel_move = CreateParallelMoveAfter(instr, pos);
@ -2886,10 +2885,11 @@ static Representation RepresentationForRange(Representation definition_rep) {
}
void FlowGraphAllocator::CollectRepresentations() {
// Parameters.
// Constants.
GraphEntryInstr* graph_entry = flow_graph_.graph_entry();
for (intptr_t i = 0; i < graph_entry->initial_definitions()->length(); ++i) {
Definition* def = (*graph_entry->initial_definitions())[i];
auto initial_definitions = graph_entry->initial_definitions();
for (intptr_t i = 0; i < initial_definitions->length(); ++i) {
Definition* def = (*initial_definitions)[i];
value_representations_[def->ssa_temp_index()] =
RepresentationForRange(def->representation());
ASSERT(!def->HasPairRepresentation());
@ -2899,20 +2899,15 @@ void FlowGraphAllocator::CollectRepresentations() {
it.Advance()) {
BlockEntryInstr* block = it.Current();
// Catch entry.
if (block->IsCatchBlockEntry()) {
CatchBlockEntryInstr* catch_entry = block->AsCatchBlockEntry();
for (intptr_t i = 0; i < catch_entry->initial_definitions()->length();
++i) {
Definition* def = (*catch_entry->initial_definitions())[i];
if (auto entry = block->AsBlockEntryWithInitialDefs()) {
initial_definitions = entry->initial_definitions();
for (intptr_t i = 0; i < initial_definitions->length(); ++i) {
Definition* def = (*initial_definitions)[i];
ASSERT(!def->HasPairRepresentation());
value_representations_[def->ssa_temp_index()] =
RepresentationForRange(def->representation());
}
}
// Phis.
if (block->IsJoinEntry()) {
JoinEntryInstr* join = block->AsJoinEntry();
} else if (auto join = block->AsJoinEntry()) {
for (PhiIterator it(join); !it.Done(); it.Advance()) {
PhiInstr* phi = it.Current();
ASSERT(phi != NULL && phi->ssa_temp_index() >= 0);
@ -2924,6 +2919,7 @@ void FlowGraphAllocator::CollectRepresentations() {
}
}
}
// Normal instructions.
for (ForwardInstructionIterator instr_it(block); !instr_it.Done();
instr_it.Advance()) {

View file

@ -212,8 +212,9 @@ void RangeAnalysis::DiscoverSimpleInductionVariables() {
}
void RangeAnalysis::CollectValues() {
const GrowableArray<Definition*>& initial =
*flow_graph_->graph_entry()->initial_definitions();
auto graph_entry = flow_graph_->graph_entry();
auto& initial = *graph_entry->initial_definitions();
for (intptr_t i = 0; i < initial.length(); ++i) {
Definition* current = initial[i];
if (IsIntegerDefinition(current)) {
@ -221,23 +222,26 @@ void RangeAnalysis::CollectValues() {
}
}
for (BlockIterator block_it = flow_graph_->reverse_postorder_iterator();
!block_it.Done(); block_it.Advance()) {
BlockEntryInstr* block = block_it.Current();
if (block->IsGraphEntry() || block->IsCatchBlockEntry()) {
const GrowableArray<Definition*>& initial =
block->IsGraphEntry()
? *block->AsGraphEntry()->initial_definitions()
: *block->AsCatchBlockEntry()->initial_definitions();
for (intptr_t i = 0; i < initial.length(); ++i) {
Definition* current = initial[i];
for (intptr_t i = 0; i < graph_entry->SuccessorCount(); ++i) {
auto successor = graph_entry->SuccessorAt(i);
if (successor->IsFunctionEntry() || successor->IsCatchBlockEntry()) {
auto function_entry = successor->AsFunctionEntry();
auto catch_entry = successor->AsCatchBlockEntry();
const auto& initial = function_entry != nullptr
? *function_entry->initial_definitions()
: *catch_entry->initial_definitions();
for (intptr_t j = 0; j < initial.length(); ++j) {
Definition* current = initial[j];
if (IsIntegerDefinition(current)) {
values_.Add(current);
}
}
}
}
for (BlockIterator block_it = flow_graph_->reverse_postorder_iterator();
!block_it.Done(); block_it.Advance()) {
BlockEntryInstr* block = block_it.Current();
JoinEntryInstr* join = block->AsJoinEntry();
if (join != NULL) {
for (PhiIterator phi_it(join); !phi_it.Done(); phi_it.Advance()) {
@ -693,14 +697,28 @@ void RangeAnalysis::InferRanges() {
// Collect integer definitions (including constraints) in the reverse
// postorder. This improves convergence speed compared to iterating
// values_ and constraints_ array separately.
const GrowableArray<Definition*>& initial =
*flow_graph_->graph_entry()->initial_definitions();
auto graph_entry = flow_graph_->graph_entry();
const auto& initial = *graph_entry->initial_definitions();
for (intptr_t i = 0; i < initial.length(); ++i) {
Definition* definition = initial[i];
if (set->Contains(definition->ssa_temp_index())) {
definitions_.Add(definition);
}
}
for (intptr_t i = 0; i < graph_entry->SuccessorCount(); ++i) {
auto successor = graph_entry->SuccessorAt(i);
if (auto function_entry = successor->AsFunctionEntry()) {
const auto& initial = *function_entry->initial_definitions();
for (intptr_t j = 0; j < initial.length(); ++j) {
Definition* definition = initial[j];
if (set->Contains(definition->ssa_temp_index())) {
definitions_.Add(definition);
}
}
}
}
CollectDefinitions(set);
// Perform an iteration of range inference just propagating ranges

View file

@ -908,7 +908,15 @@ CompileType ParameterInstr::ComputeType() const {
// for example receiver.
GraphEntryInstr* graph_entry = block_->AsGraphEntry();
if (graph_entry == NULL) {
graph_entry = block_->AsCatchBlockEntry()->graph_entry();
if (auto function_entry = block_->AsFunctionEntry()) {
graph_entry = function_entry->graph_entry();
} else if (auto osr_entry = block_->AsOsrEntry()) {
graph_entry = osr_entry->graph_entry();
} else if (auto catch_entry = block_->AsCatchBlockEntry()) {
graph_entry = catch_entry->graph_entry();
} else {
UNREACHABLE();
}
}
// Parameters at OSR entries have type dynamic.
//

View file

@ -571,6 +571,12 @@ TargetEntryInstr* BaseFlowGraphBuilder::BuildTargetEntry() {
TargetEntryInstr(AllocateBlockId(), CurrentTryIndex(), GetNextDeoptId());
}
FunctionEntryInstr* BaseFlowGraphBuilder::BuildFunctionEntry(
GraphEntryInstr* graph_entry) {
return new (Z) FunctionEntryInstr(graph_entry, AllocateBlockId(),
CurrentTryIndex(), GetNextDeoptId());
}
JoinEntryInstr* BaseFlowGraphBuilder::BuildJoinEntry(intptr_t try_index) {
return new (Z) JoinEntryInstr(AllocateBlockId(), try_index, GetNextDeoptId());
}

View file

@ -194,6 +194,7 @@ class BaseFlowGraphBuilder {
ArgumentArray GetArguments(int count);
TargetEntryInstr* BuildTargetEntry();
FunctionEntryInstr* BuildFunctionEntry(GraphEntryInstr* graph_entry);
JoinEntryInstr* BuildJoinEntry();
JoinEntryInstr* BuildJoinEntry(intptr_t try_index);

View file

@ -1446,9 +1446,11 @@ FlowGraph* BytecodeFlowGraphBuilder::BuildGraph() {
ProcessICDataInObjectPool(object_pool_);
TargetEntryInstr* normal_entry = B->BuildTargetEntry();
GraphEntryInstr* graph_entry =
new (Z) GraphEntryInstr(*parsed_function_, normal_entry, B->osr_id_);
new (Z) GraphEntryInstr(*parsed_function_, B->osr_id_);
auto normal_entry = B->BuildFunctionEntry(graph_entry);
graph_entry->set_normal_entry(normal_entry);
const PcDescriptors& descriptors =
PcDescriptors::Handle(Z, bytecode.pc_descriptors());

View file

@ -6,6 +6,7 @@
#include "vm/compiler/frontend/flow_graph_builder.h"
#include "vm/compiler/backend/branch_optimizer.h"
#include "vm/compiler/backend/flow_graph.h"
#include "vm/compiler/backend/il.h"
#include "vm/compiler/frontend/kernel_to_il.h"
@ -229,7 +230,7 @@ Definition* InlineExitCollector::JoinReturns(BlockEntryInstr** exit_block,
}
}
void InlineExitCollector::ReplaceCall(TargetEntryInstr* callee_entry) {
void InlineExitCollector::ReplaceCall(BlockEntryInstr* callee_entry) {
ASSERT(call_->previous() != NULL);
ASSERT(call_->next() != NULL);
BlockEntryInstr* call_block = call_->GetBlock();
@ -260,7 +261,11 @@ void InlineExitCollector::ReplaceCall(TargetEntryInstr* callee_entry) {
CompilerState::Current().GetNextDeoptId()),
CompilerState::Current().GetNextDeoptId()); // No number check.
branch->InheritDeoptTarget(zone(), call_);
*branch->true_successor_address() = callee_entry;
auto true_target = BranchSimplifier::ToTargetEntry(zone(), callee_entry);
callee_entry->ReplaceAsPredecessorWith(true_target);
*branch->true_successor_address() = true_target;
*branch->false_successor_address() = false_block;
call_->previous()->AppendInstruction(branch);
@ -271,7 +276,7 @@ void InlineExitCollector::ReplaceCall(TargetEntryInstr* callee_entry) {
call_->ReplaceUsesWith(caller_graph_->constant_null());
// Update dominator tree.
call_block->AddDominatedBlock(callee_entry);
call_block->AddDominatedBlock(true_target);
call_block->AddDominatedBlock(false_block);
} else {

View file

@ -40,7 +40,7 @@ class InlineExitCollector : public ZoneAllocated {
//
// After inlining the caller graph will have correctly adjusted the use
// lists. The block orders will need to be recomputed.
void ReplaceCall(TargetEntryInstr* callee_entry);
void ReplaceCall(BlockEntryInstr* callee_entry);
private:
struct Data {

View file

@ -48,13 +48,14 @@ FlowGraph* StreamingFlowGraphBuilder::BuildGraphOfFieldInitializer() {
UNREACHABLE();
}
TargetEntryInstr* normal_entry = flow_graph_builder_->BuildTargetEntry();
flow_graph_builder_->graph_entry_ = new (Z) GraphEntryInstr(
*parsed_function(), normal_entry, Compiler::kNoOSRDeoptId);
B->graph_entry_ =
new (Z) GraphEntryInstr(*parsed_function(), Compiler::kNoOSRDeoptId);
auto normal_entry = B->BuildFunctionEntry(B->graph_entry_);
B->graph_entry_->set_normal_entry(normal_entry);
Fragment body(normal_entry);
body +=
flow_graph_builder_->CheckStackOverflowInPrologue(field_helper.position_);
body += B->CheckStackOverflowInPrologue(field_helper.position_);
if (field_helper.IsConst()) {
// this will (potentially) read the initializer, but reset the position.
body += Constant(Instance::ZoneHandle(
@ -66,9 +67,8 @@ FlowGraph* StreamingFlowGraphBuilder::BuildGraphOfFieldInitializer() {
body += Return(TokenPosition::kNoSource);
PrologueInfo prologue_info(-1, -1);
return new (Z)
FlowGraph(*parsed_function(), flow_graph_builder_->graph_entry_,
flow_graph_builder_->last_used_block_id_, prologue_info);
return new (Z) FlowGraph(*parsed_function(), B->graph_entry_,
B->last_used_block_id_, prologue_info);
}
FlowGraph* StreamingFlowGraphBuilder::BuildGraphOfFieldAccessor(
@ -88,9 +88,11 @@ FlowGraph* StreamingFlowGraphBuilder::BuildGraphOfFieldAccessor(
Field& field = Field::ZoneHandle(
Z, H.LookupFieldByKernelField(field_helper.canonical_name_));
TargetEntryInstr* normal_entry = flow_graph_builder_->BuildTargetEntry();
flow_graph_builder_->graph_entry_ = new (Z) GraphEntryInstr(
*parsed_function(), normal_entry, Compiler::kNoOSRDeoptId);
B->graph_entry_ =
new (Z) GraphEntryInstr(*parsed_function(), Compiler::kNoOSRDeoptId);
auto normal_entry = B->BuildFunctionEntry(B->graph_entry_);
B->graph_entry_->set_normal_entry(normal_entry);
Fragment body(normal_entry);
if (is_setter) {
@ -465,7 +467,7 @@ Fragment StreamingFlowGraphBuilder::BuildDefaultTypeHandling(
}
void StreamingFlowGraphBuilder::RecordUncheckedEntryPoint(
TargetEntryInstr* extra_entry) {
FunctionEntryInstr* extra_entry) {
if (!B->IsInlining()) {
B->graph_entry_->set_unchecked_entry(extra_entry);
} else if (B->InliningUncheckedEntry()) {
@ -497,13 +499,17 @@ FlowGraph* StreamingFlowGraphBuilder::BuildGraphOfImplicitClosureFunction(
// The prologue builder needs the default parameter values.
SetupDefaultParameterValues();
TargetEntryInstr* normal_entry = flow_graph_builder_->BuildTargetEntry();
flow_graph_builder_->graph_entry_ =
new (Z) GraphEntryInstr(*parsed_function(), Compiler::kNoOSRDeoptId);
auto normal_entry = flow_graph_builder_->BuildFunctionEntry(
flow_graph_builder_->graph_entry_);
flow_graph_builder_->graph_entry_->set_normal_entry(normal_entry);
PrologueInfo prologue_info(-1, -1);
BlockEntryInstr* instruction_cursor =
flow_graph_builder_->BuildPrologue(normal_entry, &prologue_info);
flow_graph_builder_->graph_entry_ = new (Z) GraphEntryInstr(
*parsed_function(), normal_entry, Compiler::kNoOSRDeoptId);
const Fragment prologue =
flow_graph_builder_->CheckStackOverflowInPrologue(function.token_pos());
@ -604,7 +610,7 @@ FlowGraph* StreamingFlowGraphBuilder::BuildGraphOfImplicitClosureFunction(
body += Return(function_node_helper.end_position_);
// Setup multiple entrypoints if useful.
TargetEntryInstr* extra_entry = nullptr;
FunctionEntryInstr* extra_entry = nullptr;
if (function.MayHaveUncheckedEntryPoint(I)) {
// The prologue for a closure will always have context handling (e.g.
// setting up the 'this_variable'), but we don't need it on the unchecked
@ -653,14 +659,16 @@ FlowGraph* StreamingFlowGraphBuilder::BuildGraphOfNoSuchMethodForwarder(
// The prologue builder needs the default parameter values.
SetupDefaultParameterValues();
TargetEntryInstr* normal_entry = B->BuildTargetEntry();
B->graph_entry_ =
new (Z) GraphEntryInstr(*parsed_function(), Compiler::kNoOSRDeoptId);
auto normal_entry = B->BuildFunctionEntry(B->graph_entry_);
B->graph_entry_->set_normal_entry(normal_entry);
PrologueInfo prologue_info(-1, -1);
BlockEntryInstr* instruction_cursor =
B->BuildPrologue(normal_entry, &prologue_info);
B->graph_entry_ = new (Z) GraphEntryInstr(*parsed_function(), normal_entry,
Compiler::kNoOSRDeoptId);
Fragment body(instruction_cursor);
body += B->CheckStackOverflowInPrologue(function.token_pos());
@ -1157,22 +1165,22 @@ Fragment StreamingFlowGraphBuilder::PushAllArguments(PushedArguments* pushed) {
}
FlowGraph* StreamingFlowGraphBuilder::BuildGraphOfDynamicInvocationForwarder() {
const Function& dart_function = parsed_function()->function();
// The prologue builder needs the default parameter values.
SetupDefaultParameterValues();
const Function& dart_function = parsed_function()->function();
TargetEntryInstr* normal_entry = flow_graph_builder_->BuildTargetEntry();
PrologueInfo prologue_info(-1, -1);
BlockEntryInstr* instruction_cursor =
flow_graph_builder_->BuildPrologue(normal_entry, &prologue_info);
B->graph_entry_ = new (Z) GraphEntryInstr(*parsed_function(), B->osr_id_);
flow_graph_builder_->graph_entry_ = new (Z) GraphEntryInstr(
*parsed_function(), normal_entry, flow_graph_builder_->osr_id_);
auto normal_entry = B->BuildFunctionEntry(B->graph_entry_);
B->graph_entry_->set_normal_entry(normal_entry);
PrologueInfo prologue_info(-1, -1);
auto instruction_cursor = B->BuildPrologue(normal_entry, &prologue_info);
Fragment body;
if (!dart_function.is_native()) {
body += flow_graph_builder_->CheckStackOverflowInPrologue(
dart_function.token_pos());
body += B->CheckStackOverflowInPrologue(dart_function.token_pos());
}
ASSERT(parsed_function()->node_sequence()->scope()->num_context_variables() ==
@ -1240,14 +1248,13 @@ FlowGraph* StreamingFlowGraphBuilder::BuildGraphOfDynamicInvocationForwarder() {
instruction_cursor->LinkTo(body.entry);
GraphEntryInstr* graph_entry = flow_graph_builder_->graph_entry_;
GraphEntryInstr* graph_entry = B->graph_entry_;
// When compiling for OSR, use a depth first search to find the OSR
// entry and make graph entry jump to it instead of normal entry.
// Catch entries are always considered reachable, even if they
// become unreachable after OSR.
if (flow_graph_builder_->IsCompiledForOsr()) {
graph_entry->RelinkToOsrEntry(Z,
flow_graph_builder_->last_used_block_id_ + 1);
if (B->IsCompiledForOsr()) {
graph_entry->RelinkToOsrEntry(Z, B->last_used_block_id_ + 1);
}
return new (Z) FlowGraph(*parsed_function(), graph_entry,
B->last_used_block_id_, prologue_info);
@ -1647,7 +1654,7 @@ Fragment StreamingFlowGraphBuilder::BuildEntryPointsIntrospection() {
return call_hook;
}
TargetEntryInstr* StreamingFlowGraphBuilder::BuildSharedUncheckedEntryPoint(
FunctionEntryInstr* StreamingFlowGraphBuilder::BuildSharedUncheckedEntryPoint(
Fragment shared_prologue_linked_in,
Fragment skippable_checks,
Fragment body) {
@ -1664,7 +1671,7 @@ TargetEntryInstr* StreamingFlowGraphBuilder::BuildSharedUncheckedEntryPoint(
normal_entry += Drop();
normal_entry += Goto(join_entry);
auto* extra_target_entry = B->BuildTargetEntry();
auto* extra_target_entry = B->BuildFunctionEntry(B->graph_entry_);
Fragment extra_entry(extra_target_entry);
extra_entry += IntConstant(UncheckedEntryPointStyle::kSharedWithVariable);
extra_entry += StoreLocal(TokenPosition::kNoSource,
@ -1694,14 +1701,14 @@ TargetEntryInstr* StreamingFlowGraphBuilder::BuildSharedUncheckedEntryPoint(
return extra_target_entry;
}
TargetEntryInstr* StreamingFlowGraphBuilder::BuildSeparateUncheckedEntryPoint(
FunctionEntryInstr* StreamingFlowGraphBuilder::BuildSeparateUncheckedEntryPoint(
BlockEntryInstr* normal_entry,
Fragment normal_prologue,
Fragment extra_prologue,
Fragment shared_prologue,
Fragment body) {
auto* join_entry = BuildJoinEntry();
auto* extra_entry = B->BuildTargetEntry();
auto* extra_entry = B->BuildFunctionEntry(B->graph_entry_);
Fragment normal(normal_entry);
normal += IntConstant(UncheckedEntryPointStyle::kNone);
@ -1779,15 +1786,17 @@ FlowGraph* StreamingFlowGraphBuilder::BuildGraphOfFunction(
}
const Function& dart_function = parsed_function()->function();
TargetEntryInstr* normal_entry = flow_graph_builder_->BuildTargetEntry();
auto graph_entry = flow_graph_builder_->graph_entry_ =
new (Z) GraphEntryInstr(*parsed_function(), flow_graph_builder_->osr_id_);
auto normal_entry = flow_graph_builder_->BuildFunctionEntry(graph_entry);
graph_entry->set_normal_entry(normal_entry);
PrologueInfo prologue_info(-1, -1);
BlockEntryInstr* instruction_cursor =
flow_graph_builder_->BuildPrologue(normal_entry, &prologue_info);
GraphEntryInstr* graph_entry = flow_graph_builder_->graph_entry_ =
new (Z) GraphEntryInstr(*parsed_function(), normal_entry,
flow_graph_builder_->osr_id_);
// The 'every_time_prologue' runs first and is run when resuming from yield
// points.
const Fragment every_time_prologue = BuildEveryTimePrologue(
@ -1818,7 +1827,7 @@ FlowGraph* StreamingFlowGraphBuilder::BuildGraphOfFunction(
Fragment function(instruction_cursor);
if (yield_continuations().is_empty()) {
TargetEntryInstr* extra_entry = nullptr;
FunctionEntryInstr* extra_entry = nullptr;
switch (extra_entry_point_style) {
case UncheckedEntryPointStyle::kNone: {
function += every_time_prologue + first_time_prologue +

View file

@ -129,13 +129,13 @@ class StreamingFlowGraphBuilder : public KernelReaderHelper {
Fragment* explicit_checks,
Fragment* implicit_checks);
Fragment CompleteBodyWithYieldContinuations(Fragment body);
TargetEntryInstr* BuildSeparateUncheckedEntryPoint(
FunctionEntryInstr* BuildSeparateUncheckedEntryPoint(
BlockEntryInstr* normal_entry,
Fragment normal_prologue,
Fragment extra_prologue,
Fragment shared_prologue,
Fragment body);
TargetEntryInstr* BuildSharedUncheckedEntryPoint(
FunctionEntryInstr* BuildSharedUncheckedEntryPoint(
Fragment prologue_from_normal_entry,
Fragment skippable_checks,
Fragment body);
@ -149,7 +149,7 @@ class StreamingFlowGraphBuilder : public KernelReaderHelper {
const Fragment& every_time_prologue,
const Fragment& type_args_handling);
void RecordUncheckedEntryPoint(TargetEntryInstr* extra_entry);
void RecordUncheckedEntryPoint(FunctionEntryInstr* extra_entry);
void loop_depth_inc();
void loop_depth_dec();

View file

@ -1116,7 +1116,7 @@ Fragment FlowGraphBuilder::AssertSubtype(TokenPosition position,
return instructions;
}
BlockEntryInstr* FlowGraphBuilder::BuildPrologue(TargetEntryInstr* normal_entry,
BlockEntryInstr* FlowGraphBuilder::BuildPrologue(BlockEntryInstr* normal_entry,
PrologueInfo* prologue_info) {
const bool compiling_for_osr = IsCompiledForOsr();
@ -1136,9 +1136,12 @@ FlowGraph* FlowGraphBuilder::BuildGraphOfMethodExtractor(
const Function& function =
Function::ZoneHandle(Z, method.extracted_method_closure());
TargetEntryInstr* normal_entry = BuildTargetEntry();
graph_entry_ = new (Z)
GraphEntryInstr(*parsed_function_, normal_entry, Compiler::kNoOSRDeoptId);
graph_entry_ =
new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
auto normal_entry = BuildFunctionEntry(graph_entry_);
graph_entry_->set_normal_entry(normal_entry);
Fragment body(normal_entry);
body += CheckStackOverflowInPrologue(method.token_pos());
body += BuildImplicitClosureCreation(function);
@ -1155,12 +1158,15 @@ FlowGraph* FlowGraphBuilder::BuildGraphOfNoSuchMethodDispatcher(
// This function is specialized for a receiver class, a method name, and
// the arguments descriptor at a call site.
TargetEntryInstr* normal_entry = BuildTargetEntry();
graph_entry_ =
new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
auto normal_entry = BuildFunctionEntry(graph_entry_);
graph_entry_->set_normal_entry(normal_entry);
PrologueInfo prologue_info(-1, -1);
BlockEntryInstr* instruction_cursor =
BuildPrologue(normal_entry, &prologue_info);
graph_entry_ = new (Z)
GraphEntryInstr(*parsed_function_, normal_entry, Compiler::kNoOSRDeoptId);
// The backend will expect an array of default values for all the named
// parameters, even if they are all known to be passed at the call site
@ -1304,12 +1310,15 @@ FlowGraph* FlowGraphBuilder::BuildGraphOfInvokeFieldDispatcher(
}
parsed_function_->set_default_parameter_values(default_values);
TargetEntryInstr* normal_entry = BuildTargetEntry();
graph_entry_ =
new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
auto normal_entry = BuildFunctionEntry(graph_entry_);
graph_entry_->set_normal_entry(normal_entry);
PrologueInfo prologue_info(-1, -1);
BlockEntryInstr* instruction_cursor =
BuildPrologue(normal_entry, &prologue_info);
graph_entry_ = new (Z)
GraphEntryInstr(*parsed_function_, normal_entry, Compiler::kNoOSRDeoptId);
Fragment body(instruction_cursor);
body += CheckStackOverflowInPrologue(function.token_pos());

View file

@ -56,7 +56,7 @@ class FlowGraphBuilder : public BaseFlowGraphBuilder {
FlowGraph* BuildGraph();
private:
BlockEntryInstr* BuildPrologue(TargetEntryInstr* normal_entry,
BlockEntryInstr* BuildPrologue(BlockEntryInstr* normal_entry,
PrologueInfo* prologue_info);
FlowGraph* BuildGraphOfMethodExtractor(const Function& method);

View file

@ -218,11 +218,14 @@ bool Intrinsifier::GraphIntrinsify(const ParsedFunction& parsed_function,
ASSERT(!parsed_function.function().HasOptionalParameters());
PrologueInfo prologue_info(-1, -1);
auto graph_entry =
new GraphEntryInstr(parsed_function, Compiler::kNoOSRDeoptId);
intptr_t block_id = 1; // 0 is GraphEntry.
TargetEntryInstr* normal_entry = new TargetEntryInstr(
block_id, kInvalidTryIndex, CompilerState::Current().GetNextDeoptId());
GraphEntryInstr* graph_entry = new GraphEntryInstr(
parsed_function, normal_entry, Compiler::kNoOSRDeoptId);
graph_entry->set_normal_entry(
new FunctionEntryInstr(graph_entry, block_id, kInvalidTryIndex,
CompilerState::Current().GetNextDeoptId()));
FlowGraph* graph =
new FlowGraph(parsed_function, graph_entry, block_id, prologue_info);
const Function& function = parsed_function.function();
@ -376,7 +379,7 @@ static Representation RepresentationForCid(intptr_t cid) {
//
class BlockBuilder : public ValueObject {
public:
BlockBuilder(FlowGraph* flow_graph, TargetEntryInstr* entry)
BlockBuilder(FlowGraph* flow_graph, BlockEntryInstr* entry)
: flow_graph_(flow_graph),
entry_(entry),
current_(entry),
@ -388,7 +391,8 @@ class BlockBuilder : public ValueObject {
Definition* AddToInitialDefinitions(Definition* def) {
def->set_ssa_temp_index(flow_graph_->alloc_ssa_temp_index());
flow_graph_->AddToInitialDefinitions(def);
auto normal_entry = flow_graph_->graph_entry()->normal_entry();
flow_graph_->AddToInitialDefinitions(normal_entry, def);
return def;
}
@ -482,7 +486,7 @@ static void PrepareIndexedOp(BlockBuilder* builder,
static bool IntrinsifyArrayGetIndexed(FlowGraph* flow_graph,
intptr_t array_cid) {
GraphEntryInstr* graph_entry = flow_graph->graph_entry();
TargetEntryInstr* normal_entry = graph_entry->normal_entry();
auto normal_entry = graph_entry->normal_entry();
BlockBuilder builder(flow_graph, normal_entry);
Definition* index = builder.AddParameter(1);
@ -565,7 +569,7 @@ static bool IntrinsifyArrayGetIndexed(FlowGraph* flow_graph,
static bool IntrinsifyArraySetIndexed(FlowGraph* flow_graph,
intptr_t array_cid) {
GraphEntryInstr* graph_entry = flow_graph->graph_entry();
TargetEntryInstr* normal_entry = graph_entry->normal_entry();
auto normal_entry = graph_entry->normal_entry();
BlockBuilder builder(flow_graph, normal_entry);
Definition* value = builder.AddParameter(1);
@ -768,7 +772,7 @@ DEFINE_SIMD_ARRAY_GETTER_SETTER_INTRINSICS(Float64x2Array)
static bool BuildCodeUnitAt(FlowGraph* flow_graph, intptr_t cid) {
GraphEntryInstr* graph_entry = flow_graph->graph_entry();
TargetEntryInstr* normal_entry = graph_entry->normal_entry();
auto normal_entry = graph_entry->normal_entry();
BlockBuilder builder(flow_graph, normal_entry);
Definition* index = builder.AddParameter(1);
@ -816,7 +820,7 @@ static bool BuildSimdOp(FlowGraph* flow_graph, intptr_t cid, Token::Kind kind) {
Zone* zone = flow_graph->zone();
GraphEntryInstr* graph_entry = flow_graph->graph_entry();
TargetEntryInstr* normal_entry = graph_entry->normal_entry();
auto normal_entry = graph_entry->normal_entry();
BlockBuilder builder(flow_graph, normal_entry);
Definition* right = builder.AddParameter(1);
@ -860,7 +864,7 @@ static bool BuildFloat32x4Shuffle(FlowGraph* flow_graph,
return false;
}
GraphEntryInstr* graph_entry = flow_graph->graph_entry();
TargetEntryInstr* normal_entry = graph_entry->normal_entry();
auto normal_entry = graph_entry->normal_entry();
BlockBuilder builder(flow_graph, normal_entry);
Definition* receiver = builder.AddParameter(1);
@ -900,7 +904,7 @@ bool Intrinsifier::Build_Float32x4ShuffleW(FlowGraph* flow_graph) {
static bool BuildLoadField(FlowGraph* flow_graph, intptr_t offset) {
GraphEntryInstr* graph_entry = flow_graph->graph_entry();
TargetEntryInstr* normal_entry = graph_entry->normal_entry();
auto normal_entry = graph_entry->normal_entry();
BlockBuilder builder(flow_graph, normal_entry);
Definition* array = builder.AddParameter(1);
@ -933,7 +937,7 @@ bool Intrinsifier::Build_TypedDataLength(FlowGraph* flow_graph) {
bool Intrinsifier::Build_GrowableArrayCapacity(FlowGraph* flow_graph) {
GraphEntryInstr* graph_entry = flow_graph->graph_entry();
TargetEntryInstr* normal_entry = graph_entry->normal_entry();
auto normal_entry = graph_entry->normal_entry();
BlockBuilder builder(flow_graph, normal_entry);
Definition* array = builder.AddParameter(1);
@ -950,7 +954,7 @@ bool Intrinsifier::Build_GrowableArrayCapacity(FlowGraph* flow_graph) {
bool Intrinsifier::Build_GrowableArrayGetIndexed(FlowGraph* flow_graph) {
GraphEntryInstr* graph_entry = flow_graph->graph_entry();
TargetEntryInstr* normal_entry = graph_entry->normal_entry();
auto normal_entry = graph_entry->normal_entry();
BlockBuilder builder(flow_graph, normal_entry);
Definition* index = builder.AddParameter(1);
@ -990,7 +994,7 @@ bool Intrinsifier::Build_GrowableArraySetIndexed(FlowGraph* flow_graph) {
bool Intrinsifier::Build_GrowableArraySetIndexedUnchecked(
FlowGraph* flow_graph) {
GraphEntryInstr* graph_entry = flow_graph->graph_entry();
TargetEntryInstr* normal_entry = graph_entry->normal_entry();
auto normal_entry = graph_entry->normal_entry();
BlockBuilder builder(flow_graph, normal_entry);
Definition* value = builder.AddParameter(1);
@ -1017,7 +1021,7 @@ bool Intrinsifier::Build_GrowableArraySetIndexedUnchecked(
bool Intrinsifier::Build_GrowableArraySetData(FlowGraph* flow_graph) {
GraphEntryInstr* graph_entry = flow_graph->graph_entry();
TargetEntryInstr* normal_entry = graph_entry->normal_entry();
auto normal_entry = graph_entry->normal_entry();
BlockBuilder builder(flow_graph, normal_entry);
Definition* data = builder.AddParameter(1);
@ -1039,7 +1043,7 @@ bool Intrinsifier::Build_GrowableArraySetData(FlowGraph* flow_graph) {
bool Intrinsifier::Build_GrowableArraySetLength(FlowGraph* flow_graph) {
GraphEntryInstr* graph_entry = flow_graph->graph_entry();
TargetEntryInstr* normal_entry = graph_entry->normal_entry();
auto normal_entry = graph_entry->normal_entry();
BlockBuilder builder(flow_graph, normal_entry);
Definition* length = builder.AddParameter(1);
@ -1060,7 +1064,7 @@ bool Intrinsifier::Build_DoubleFlipSignBit(FlowGraph* flow_graph) {
return false;
}
GraphEntryInstr* graph_entry = flow_graph->graph_entry();
TargetEntryInstr* normal_entry = graph_entry->normal_entry();
auto normal_entry = graph_entry->normal_entry();
BlockBuilder builder(flow_graph, normal_entry);
Definition* receiver = builder.AddParameter(1);
@ -1106,7 +1110,7 @@ bool Intrinsifier::Build_MathSin(FlowGraph* flow_graph) {
if (!FlowGraphCompiler::SupportsUnboxedDoubles()) return false;
GraphEntryInstr* graph_entry = flow_graph->graph_entry();
TargetEntryInstr* normal_entry = graph_entry->normal_entry();
auto normal_entry = graph_entry->normal_entry();
BlockBuilder builder(flow_graph, normal_entry);
return BuildInvokeMathCFunction(&builder, MethodRecognizer::kMathSin);
@ -1116,7 +1120,7 @@ bool Intrinsifier::Build_MathCos(FlowGraph* flow_graph) {
if (!FlowGraphCompiler::SupportsUnboxedDoubles()) return false;
GraphEntryInstr* graph_entry = flow_graph->graph_entry();
TargetEntryInstr* normal_entry = graph_entry->normal_entry();
auto normal_entry = graph_entry->normal_entry();
BlockBuilder builder(flow_graph, normal_entry);
return BuildInvokeMathCFunction(&builder, MethodRecognizer::kMathCos);
@ -1126,7 +1130,7 @@ bool Intrinsifier::Build_MathTan(FlowGraph* flow_graph) {
if (!FlowGraphCompiler::SupportsUnboxedDoubles()) return false;
GraphEntryInstr* graph_entry = flow_graph->graph_entry();
TargetEntryInstr* normal_entry = graph_entry->normal_entry();
auto normal_entry = graph_entry->normal_entry();
BlockBuilder builder(flow_graph, normal_entry);
return BuildInvokeMathCFunction(&builder, MethodRecognizer::kMathTan);
@ -1136,7 +1140,7 @@ bool Intrinsifier::Build_MathAsin(FlowGraph* flow_graph) {
if (!FlowGraphCompiler::SupportsUnboxedDoubles()) return false;
GraphEntryInstr* graph_entry = flow_graph->graph_entry();
TargetEntryInstr* normal_entry = graph_entry->normal_entry();
auto normal_entry = graph_entry->normal_entry();
BlockBuilder builder(flow_graph, normal_entry);
return BuildInvokeMathCFunction(&builder, MethodRecognizer::kMathAsin);
@ -1146,7 +1150,7 @@ bool Intrinsifier::Build_MathAcos(FlowGraph* flow_graph) {
if (!FlowGraphCompiler::SupportsUnboxedDoubles()) return false;
GraphEntryInstr* graph_entry = flow_graph->graph_entry();
TargetEntryInstr* normal_entry = graph_entry->normal_entry();
auto normal_entry = graph_entry->normal_entry();
BlockBuilder builder(flow_graph, normal_entry);
return BuildInvokeMathCFunction(&builder, MethodRecognizer::kMathAcos);
@ -1156,7 +1160,7 @@ bool Intrinsifier::Build_MathAtan(FlowGraph* flow_graph) {
if (!FlowGraphCompiler::SupportsUnboxedDoubles()) return false;
GraphEntryInstr* graph_entry = flow_graph->graph_entry();
TargetEntryInstr* normal_entry = graph_entry->normal_entry();
auto normal_entry = graph_entry->normal_entry();
BlockBuilder builder(flow_graph, normal_entry);
return BuildInvokeMathCFunction(&builder, MethodRecognizer::kMathAtan);
@ -1166,7 +1170,7 @@ bool Intrinsifier::Build_MathAtan2(FlowGraph* flow_graph) {
if (!FlowGraphCompiler::SupportsUnboxedDoubles()) return false;
GraphEntryInstr* graph_entry = flow_graph->graph_entry();
TargetEntryInstr* normal_entry = graph_entry->normal_entry();
auto normal_entry = graph_entry->normal_entry();
BlockBuilder builder(flow_graph, normal_entry);
return BuildInvokeMathCFunction(&builder, MethodRecognizer::kMathAtan2,
@ -1177,7 +1181,7 @@ bool Intrinsifier::Build_DoubleMod(FlowGraph* flow_graph) {
if (!FlowGraphCompiler::SupportsUnboxedDoubles()) return false;
GraphEntryInstr* graph_entry = flow_graph->graph_entry();
TargetEntryInstr* normal_entry = graph_entry->normal_entry();
auto normal_entry = graph_entry->normal_entry();
BlockBuilder builder(flow_graph, normal_entry);
return BuildInvokeMathCFunction(&builder, MethodRecognizer::kDoubleMod,
@ -1191,7 +1195,7 @@ bool Intrinsifier::Build_DoubleCeil(FlowGraph* flow_graph) {
if (TargetCPUFeatures::double_truncate_round_supported()) return false;
GraphEntryInstr* graph_entry = flow_graph->graph_entry();
TargetEntryInstr* normal_entry = graph_entry->normal_entry();
auto normal_entry = graph_entry->normal_entry();
BlockBuilder builder(flow_graph, normal_entry);
return BuildInvokeMathCFunction(&builder, MethodRecognizer::kDoubleCeil);
@ -1204,7 +1208,7 @@ bool Intrinsifier::Build_DoubleFloor(FlowGraph* flow_graph) {
if (TargetCPUFeatures::double_truncate_round_supported()) return false;
GraphEntryInstr* graph_entry = flow_graph->graph_entry();
TargetEntryInstr* normal_entry = graph_entry->normal_entry();
auto normal_entry = graph_entry->normal_entry();
BlockBuilder builder(flow_graph, normal_entry);
return BuildInvokeMathCFunction(&builder, MethodRecognizer::kDoubleFloor);
@ -1217,7 +1221,7 @@ bool Intrinsifier::Build_DoubleTruncate(FlowGraph* flow_graph) {
if (TargetCPUFeatures::double_truncate_round_supported()) return false;
GraphEntryInstr* graph_entry = flow_graph->graph_entry();
TargetEntryInstr* normal_entry = graph_entry->normal_entry();
auto normal_entry = graph_entry->normal_entry();
BlockBuilder builder(flow_graph, normal_entry);
return BuildInvokeMathCFunction(&builder, MethodRecognizer::kDoubleTruncate);
@ -1227,7 +1231,7 @@ bool Intrinsifier::Build_DoubleRound(FlowGraph* flow_graph) {
if (!FlowGraphCompiler::SupportsUnboxedDoubles()) return false;
GraphEntryInstr* graph_entry = flow_graph->graph_entry();
TargetEntryInstr* normal_entry = graph_entry->normal_entry();
auto normal_entry = graph_entry->normal_entry();
BlockBuilder builder(flow_graph, normal_entry);
return BuildInvokeMathCFunction(&builder, MethodRecognizer::kDoubleRound);

View file

@ -203,7 +203,7 @@ constexpr bool kDartPrecompiledRuntime = false;
P(enable_slow_path_sharing, bool, true, "Enable sharing of slow-path code.") \
P(shared_slow_path_triggers_gc, bool, false, \
"TESTING: slow-path triggers a GC.") \
P(enable_multiple_entrypoints, bool, false, \
P(enable_multiple_entrypoints, bool, true, \
"Enable multiple entrypoints per-function and related optimizations.") \
R(enable_testing_pragmas, false, bool, false, \
"Enable magical pragmas for testing purposes. Use at your own risk!") \

View file

@ -113,11 +113,12 @@ IRRegExpMacroAssembler::IRRegExpMacroAssembler(
kMinStackSize / 4, Heap::kOld)));
// Create and generate all preset blocks.
entry_block_ = new (zone) GraphEntryInstr(
*parsed_function_,
new (zone) TargetEntryInstr(block_id_.Alloc(), kInvalidTryIndex,
GetNextDeoptId()),
osr_id);
entry_block_ = new (zone) GraphEntryInstr(*parsed_function_, osr_id);
auto function_entry = new (zone) FunctionEntryInstr(
entry_block_, block_id_.Alloc(), kInvalidTryIndex, GetNextDeoptId());
entry_block_->set_normal_entry(function_entry);
start_block_ = new (zone)
JoinEntryInstr(block_id_.Alloc(), kInvalidTryIndex, GetNextDeoptId());
success_block_ = new (zone)

View file

@ -0,0 +1,61 @@
// Copyright (c) 2018, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
// VMOptions=--optimization-filter=triggerBug --no-background-compilation --enable-inlining-annotations --optimization-counter-threshold=2
const String NeverInline = 'NeverInline';
@NeverInline
dynamic triggerGC() {
var a = [];
for (int i = 0; i < 100; ++i) {
a.add([]);
}
return a;
}
@NeverInline
void fillLowerStackWithReturnAddresses() {
recursive(20);
}
@NeverInline
dynamic recursive(dynamic n) {
if (n > 0) {
recursive(n - 1);
}
return 0x0deadbef;
}
class Box {
@NeverInline
Box get value => global;
}
Box global;
main() {
bool isTrue = true;
bool hasProblem = true;
@NeverInline
void triggerBug(Box box) {
triggerGC();
Box element = box.value;
if (isTrue) {
hasProblem = true;
return;
}
try {
Map map = {};
} finally {}
}
final st = new Box();
for (int i = 0; i < 1000; ++i) {
fillLowerStackWithReturnAddresses();
triggerBug(st);
}
}