Reapply "Deoptimization support in inlined code."

This reapplies r12488 with a fix.

R=kmillikin@google.com,fschneider@google.com

Review URL: https://codereview.chromium.org//10952002

git-svn-id: https://dart.googlecode.com/svn/branches/bleeding_edge/dart@12499 260f80e4-7a28-3924-810f-c04153c831b5
This commit is contained in:
zerny@google.com 2012-09-18 13:41:20 +00:00
parent 267fb0ae60
commit aa5281c88a
20 changed files with 337 additions and 214 deletions

View file

@ -1482,9 +1482,6 @@ void DeoptimizeAll() {
function = optimized_code.function();
unoptimized_code = function.unoptimized_code();
ASSERT(!unoptimized_code.IsNull());
uword continue_at_pc =
unoptimized_code.GetDeoptAfterPcAtDeoptId(deopt_id);
ASSERT(continue_at_pc != 0);
// The switch to unoptimized code may have already occured.
if (function.HasOptimizedCode()) {
function.SwitchToUnoptimizedCode();
@ -1609,9 +1606,9 @@ END_LEAF_RUNTIME_ENTRY
static void DeoptimizeWithDeoptInfo(const Code& code,
const DeoptInfo& deopt_info,
const StackFrame& caller_frame) {
static intptr_t DeoptimizeWithDeoptInfo(const Code& code,
const DeoptInfo& deopt_info,
const StackFrame& caller_frame) {
const intptr_t len = deopt_info.Length();
GrowableArray<DeoptInstr*> deopt_instructions(len);
for (intptr_t i = 0; i < len; i++) {
@ -1644,12 +1641,13 @@ static void DeoptimizeWithDeoptInfo(const Code& code,
deopt_instructions[i]->ToCString());
}
}
return deopt_context.GetCallerFp();
}
// The stack has been adjusted to fit all values for unoptimized frame.
// Fill the unoptimized frame.
DEFINE_LEAF_RUNTIME_ENTRY(void, DeoptimizeFillFrame, uword last_fp) {
DEFINE_LEAF_RUNTIME_ENTRY(intptr_t, DeoptimizeFillFrame, uword last_fp) {
Isolate* isolate = Isolate::Current();
Zone zone(isolate);
HANDLESCOPE(isolate);
@ -1672,25 +1670,14 @@ DEFINE_LEAF_RUNTIME_ENTRY(void, DeoptimizeFillFrame, uword last_fp) {
GetDeoptIxDescrAtPc(optimized_code, caller_frame->pc(),
&deopt_id, &deopt_reason, &deopt_index);
ASSERT(deopt_id != Isolate::kNoDeoptId);
uword continue_at_pc = 0;
if (deopt_reason == kDeoptAtCall) {
continue_at_pc = unoptimized_code.GetDeoptAfterPcAtDeoptId(deopt_id);
} else {
continue_at_pc = unoptimized_code.GetDeoptBeforePcAtDeoptId(deopt_id);
}
ASSERT(continue_at_pc != 0);
if (FLAG_trace_deopt) {
OS::Print(" -> continue at %#"Px"\n", continue_at_pc);
// TODO(srdjan): If we could allow GC, we could print the line where
// deoptimization occured.
}
const Array& deopt_info_array =
Array::Handle(optimized_code.deopt_info_array());
ASSERT(!deopt_info_array.IsNull());
DeoptInfo& deopt_info = DeoptInfo::Handle();
deopt_info ^= deopt_info_array.At(deopt_index);
ASSERT(!deopt_info.IsNull());
DeoptimizeWithDeoptInfo(optimized_code, deopt_info, *caller_frame);
const intptr_t caller_fp =
DeoptimizeWithDeoptInfo(optimized_code, deopt_info, *caller_frame);
isolate->SetDeoptFrameCopy(NULL, 0);
isolate->set_deopt_cpu_registers_copy(NULL);
@ -1707,6 +1694,7 @@ DEFINE_LEAF_RUNTIME_ENTRY(void, DeoptimizeFillFrame, uword last_fp) {
if (function.HasOptimizedCode()) {
function.SwitchToUnoptimizedCode();
}
return caller_fp;
}
END_LEAF_RUNTIME_ENTRY

View file

@ -57,24 +57,6 @@ DEFINE_RUNTIME_ENTRY(CompileFunction, 1) {
}
// Returns an array indexed by deopt id, containing the extracted ICData.
static RawArray* ExtractTypeFeedbackArray(const Code& code) {
ASSERT(!code.IsNull() && !code.is_optimized());
GrowableArray<intptr_t> deopt_ids;
const GrowableObjectArray& ic_data_objs =
GrowableObjectArray::Handle(GrowableObjectArray::New());
const intptr_t max_id =
code.ExtractIcDataArraysAtCalls(&deopt_ids, ic_data_objs);
const Array& result = Array::Handle(Array::New(max_id + 1));
for (intptr_t i = 0; i < deopt_ids.length(); i++) {
intptr_t result_index = deopt_ids[i];
ASSERT(result.At(result_index) == Object::null());
result.SetAt(result_index, Object::Handle(ic_data_objs.At(i)));
}
return result.raw();
}
RawError* Compiler::Compile(const Library& library, const Script& script) {
Isolate* isolate = Isolate::Current();
LongJump* base = isolate->long_jump_base();
@ -157,7 +139,7 @@ static bool CompileParsedFunctionHelper(const ParsedFunction& parsed_function,
const Code& unoptimized_code =
Code::Handle(parsed_function.function().unoptimized_code());
isolate->set_ic_data_array(
ExtractTypeFeedbackArray(unoptimized_code));
unoptimized_code.ExtractTypeFeedbackArray());
}
}

View file

@ -1,4 +1,4 @@
// Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.

View file

@ -28,16 +28,25 @@ DeoptimizationContext::DeoptimizationContext(intptr_t* to_frame_start,
from_frame_size_ = isolate_->deopt_frame_copy_size();
registers_copy_ = isolate_->deopt_cpu_registers_copy();
xmm_registers_copy_ = isolate_->deopt_xmm_registers_copy();
caller_fp_ = GetFromFp();
}
intptr_t* DeoptimizationContext::GetFromFpAddress() const {
return &from_frame_[from_frame_size_ - 1 - num_args_ - 1];
intptr_t DeoptimizationContext::GetFromFp() const {
return from_frame_[from_frame_size_ - 1 - num_args_ - 1];
}
intptr_t* DeoptimizationContext::GetFromPcAddress() const {
return &from_frame_[from_frame_size_ - 1 - num_args_];
intptr_t DeoptimizationContext::GetFromPc() const {
return from_frame_[from_frame_size_ - 1 - num_args_];
}
intptr_t DeoptimizationContext::GetCallerFp() const {
return caller_fp_;
}
void DeoptimizationContext::SetCallerFp(intptr_t caller_fp) {
caller_fp_ = caller_fp;
}
// Deoptimization instruction moving value from optimized frame at
@ -334,9 +343,10 @@ class DeoptCallerFpInstr : public DeoptInstr {
}
void Execute(DeoptimizationContext* deopt_context, intptr_t to_index) {
intptr_t* from_addr = deopt_context->GetFromFpAddress();
intptr_t from = deopt_context->GetCallerFp();
intptr_t* to_addr = deopt_context->GetToFrameAddressAt(to_index);
*to_addr = *from_addr;
*to_addr = from;
deopt_context->SetCallerFp(reinterpret_cast<intptr_t>(to_addr));
}
private:
@ -358,9 +368,9 @@ class DeoptCallerPcInstr : public DeoptInstr {
}
void Execute(DeoptimizationContext* deopt_context, intptr_t to_index) {
intptr_t* from_addr = deopt_context->GetFromPcAddress();
intptr_t from = deopt_context->GetFromPc();
intptr_t* to_addr = deopt_context->GetToFrameAddressAt(to_index);
*to_addr = *from_addr;
*to_addr = from;
}
private:

View file

@ -36,8 +36,11 @@ class DeoptimizationContext : public ValueObject {
return &to_frame_[index];
}
intptr_t* GetFromFpAddress() const;
intptr_t* GetFromPcAddress() const;
intptr_t GetFromFp() const;
intptr_t GetFromPc() const;
intptr_t GetCallerFp() const;
void SetCallerFp(intptr_t callers_fp);
RawObject* ObjectAt(intptr_t index) const {
return object_table_.At(index);
@ -64,6 +67,7 @@ class DeoptimizationContext : public ValueObject {
intptr_t* registers_copy_;
double* xmm_registers_copy_;
const intptr_t num_args_;
intptr_t caller_fp_;
Isolate* isolate_;
DISALLOW_COPY_AND_ASSIGN(DeoptimizationContext);

View file

@ -514,7 +514,9 @@ void FlowGraph::Rename(GrowableArray<PhiInstr*>* live_phis) {
start_env.Add(graph_entry_->constant_null());
}
graph_entry_->set_start_env(
Environment::From(start_env, num_non_copied_params_, NULL));
Environment::From(start_env,
num_non_copied_params_,
parsed_function_.function()));
BlockEntryInstr* normal_entry = graph_entry_->SuccessorAt(0);
ASSERT(normal_entry != NULL); // Must have entry.
@ -547,8 +549,12 @@ void FlowGraph::RenameRecursive(BlockEntryInstr* block_entry,
// Attach current environment to the instruction. First, each instruction
// gets a full copy of the environment. Later we optimize this by
// eliminating unnecessary environments.
current->set_env(
Environment::From(*env, num_non_copied_params_, NULL));
current->set_env(Environment::From(*env,
num_non_copied_params_,
parsed_function_.function()));
if (current->CanDeoptimize()) {
current->env()->set_deopt_id(current->deopt_id());
}
// 2a. Handle uses:
// Update expression stack environment for each use.
@ -778,6 +784,12 @@ void FlowGraph::InlineCall(Definition* call, FlowGraph* callee_graph) {
TargetEntryInstr* callee_entry = callee_graph->graph_entry()->normal_entry();
ZoneGrowableArray<ReturnInstr*>* callee_exits = callee_graph->exits();
// 0. Attach the outer environment on each instruction in the callee graph.
for (ForwardInstructionIterator it(callee_entry); !it.Done(); it.Advance()) {
Instruction* instr = it.Current();
if (instr->CanDeoptimize()) call->env()->DeepCopyToOuter(instr);
}
// 1. Insert the callee graph into the caller graph.
if (callee_exits->is_empty()) {
// If no normal exits exist, inline and truncate the block after inlining.

View file

@ -149,9 +149,12 @@ void FlowGraphAllocator::ComputeInitialSets() {
// Add non-argument uses from the deoptimization environment (pushed
// arguments are not allocated by the register allocator).
if (current->env() != NULL) {
for (intptr_t i = 0; i < current->env()->Length(); ++i) {
Value* value = current->env()->ValueAt(i);
if (!value->definition()->IsPushArgument()) {
for (Environment::DeepIterator env_it(current->env());
!env_it.Done();
env_it.Advance()) {
Value* value = env_it.CurrentValue();
if (!value->definition()->IsPushArgument() &&
!value->BindsToConstant()) {
live_in->Add(value->definition()->ssa_temp_index());
}
}
@ -738,49 +741,53 @@ void FlowGraphAllocator::ConnectIncomingPhiMoves(BlockEntryInstr* block) {
void FlowGraphAllocator::ProcessEnvironmentUses(BlockEntryInstr* block,
Instruction* current) {
ASSERT(current->env() != NULL);
Environment* env = current->env();
while (env != NULL) {
// Any value mentioned in the deoptimization environment should survive
// until the end of instruction but it does not need to be in the register.
// Expected shape of live range:
//
// i i'
// value -----*
//
// Any value mentioned in the deoptimization environment should survive
// until the end of instruction but it does not need to be in the register.
// Expected shape of live range:
//
// i i'
// value -----*
//
if (env->Length() == 0) return;
const intptr_t block_start_pos = block->start_pos();
const intptr_t use_pos = current->lifetime_position() + 1;
Location* locations =
Isolate::Current()->current_zone()->Alloc<Location>(env->Length());
for (intptr_t i = 0; i < env->Length(); ++i) {
Value* value = env->ValueAt(i);
locations[i] = Location::Any();
Definition* def = value->definition();
if (def->IsPushArgument()) {
// Frame size is unknown until after allocation.
locations[i] = Location::NoLocation();
if (env->Length() == 0) {
env = env->outer();
continue;
}
ConstantInstr* constant = def->AsConstant();
if (constant != NULL) {
locations[i] = Location::Constant(constant->value());
continue;
const intptr_t block_start_pos = block->start_pos();
const intptr_t use_pos = current->lifetime_position() + 1;
Location* locations =
Isolate::Current()->current_zone()->Alloc<Location>(env->Length());
for (intptr_t i = 0; i < env->Length(); ++i) {
Value* value = env->ValueAt(i);
locations[i] = Location::Any();
Definition* def = value->definition();
if (def->IsPushArgument()) {
// Frame size is unknown until after allocation.
locations[i] = Location::NoLocation();
continue;
}
ConstantInstr* constant = def->AsConstant();
if (constant != NULL) {
locations[i] = Location::Constant(constant->value());
continue;
}
const intptr_t vreg = def->ssa_temp_index();
LiveRange* range = GetLiveRange(vreg);
range->AddUseInterval(block_start_pos, use_pos);
range->AddUse(use_pos, &locations[i]);
}
const intptr_t vreg = def->ssa_temp_index();
LiveRange* range = GetLiveRange(vreg);
range->AddUseInterval(block_start_pos, use_pos);
range->AddUse(use_pos, &locations[i]);
env->set_locations(locations);
env = env->outer();
}
env->set_locations(locations);
}
@ -1371,6 +1378,7 @@ LiveRange* LiveRange::SplitAt(intptr_t split_pos) {
last_before_split = interval;
}
ASSERT(last_before_split != NULL);
ASSERT(last_before_split->next() == first_after_split);
ASSERT(last_before_split->end() <= split_pos);
ASSERT(split_pos <= first_after_split->start());

View file

@ -66,7 +66,6 @@ void EffectGraphVisitor::Append(const EffectGraphVisitor& other_fragment) {
Value* EffectGraphVisitor::Bind(Definition* definition) {
ASSERT(is_open());
ASSERT(!owner()->InInliningContext() || !definition->CanDeoptimize());
DeallocateTempIndex(definition->InputCount());
definition->set_use_kind(Definition::kValue);
definition->set_temp_index(AllocateTempIndex());
@ -82,7 +81,6 @@ Value* EffectGraphVisitor::Bind(Definition* definition) {
void EffectGraphVisitor::Do(Definition* definition) {
ASSERT(is_open());
ASSERT(!owner()->InInliningContext() || !definition->CanDeoptimize());
DeallocateTempIndex(definition->InputCount());
definition->set_use_kind(Definition::kEffect);
if (is_empty()) {
@ -417,8 +415,6 @@ void TestGraphVisitor::ReturnDefinition(Definition* definition) {
// Special handling for AND/OR.
void TestGraphVisitor::VisitBinaryOpNode(BinaryOpNode* node) {
InlineBailout("TestGraphVisitor::VisitBinaryOpNode");
// Operators "&&" and "||" cannot be overloaded therefore do not call
// operator.
if ((node->kind() == Token::kAND) || (node->kind() == Token::kOR)) {
@ -471,7 +467,7 @@ void EffectGraphVisitor::VisitReturnNode(ReturnNode* node) {
Append(for_value);
for (intptr_t i = 0; i < node->inlined_finally_list_length(); i++) {
InlineBailout("EffectGraphVisitor::VisitReturnNode (finally)");
InlineBailout("EffectGraphVisitor::VisitReturnNode (exception)");
EffectGraphVisitor for_effect(owner(), temp_index());
node->InlinedFinallyNodeAt(i)->Visit(&for_effect);
Append(for_effect);
@ -628,7 +624,6 @@ void EffectGraphVisitor::VisitBinaryOpNode(BinaryOpNode* node) {
}
return;
}
InlineBailout("EffectGraphVisitor::VisitBinaryOpNode (deopt)");
ValueGraphVisitor for_left_value(owner(), temp_index());
node->left()->Visit(&for_left_value);
Append(for_left_value);
@ -675,7 +670,6 @@ void ValueGraphVisitor::VisitBinaryOpNode(BinaryOpNode* node) {
node->right()->Visit(&for_right);
Value* right_value = for_right.value();
if (FLAG_enable_type_checks) {
InlineBailout("ValueGraphVisitor::VisitBinaryOpNode (type check)");
right_value =
for_right.Bind(new AssertBooleanInstr(node->right()->token_pos(),
right_value));
@ -745,7 +739,6 @@ AssertAssignableInstr* EffectGraphVisitor::BuildAssertAssignable(
Value* value,
const AbstractType& dst_type,
const String& dst_name) {
InlineBailout("EffectGraphVisitor::BuildAssertAssignable (deopt)");
// Build the type check computation.
Value* instantiator = NULL;
Value* instantiator_type_arguments = NULL;
@ -843,7 +836,6 @@ void ValueGraphVisitor::BuildTypeTest(ComparisonNode* node) {
ReturnDefinition(result);
return;
}
InlineBailout("ValueGraphVisitor::BuildTypeTest (deopt)");
ValueGraphVisitor for_left_value(owner(), temp_index());
node->left()->Visit(&for_left_value);
@ -858,6 +850,8 @@ void ValueGraphVisitor::BuildTypeTest(ComparisonNode* node) {
&instantiator,
&instantiator_type_arguments);
}
// TODO(zerny): Remove this when issues 5216 and 5217 are fixed.
InlineBailout("instance of");
InstanceOfInstr* instance_of =
new InstanceOfInstr(node->token_pos(),
for_left_value.value(),
@ -911,7 +905,6 @@ void EffectGraphVisitor::VisitComparisonNode(ComparisonNode* node) {
ReturnDefinition(comp);
return;
}
InlineBailout("EffectGraphVisitor::VisitComparisonNode (deopt)");
if ((node->kind() == Token::kEQ) || (node->kind() == Token::kNE)) {
ValueGraphVisitor for_left_value(owner(), temp_index());
@ -973,7 +966,6 @@ void EffectGraphVisitor::VisitUnaryOpNode(UnaryOpNode* node) {
ReturnDefinition(negate);
return;
}
InlineBailout("EffectGraphVisitor::VisitUnaryOpNode (deopt)");
ValueGraphVisitor for_value(owner(), temp_index());
node->operand()->Visit(&for_value);
@ -1093,7 +1085,7 @@ void EffectGraphVisitor::VisitSwitchNode(SwitchNode* node) {
// Note: The specification of switch/case is under discussion and may change
// drastically.
void EffectGraphVisitor::VisitCaseNode(CaseNode* node) {
InlineBailout("EffectGraphVisitor::VisitCaseNode");
InlineBailout("EffectGraphVisitor::VisitCaseNode (control)");
const intptr_t len = node->case_expressions()->length();
// Create case statements instructions.
EffectGraphVisitor for_case_statements(owner(), temp_index());
@ -1186,7 +1178,7 @@ void EffectGraphVisitor::VisitCaseNode(CaseNode* node) {
// f) loop-exit-target
// g) break-join (optional)
void EffectGraphVisitor::VisitWhileNode(WhileNode* node) {
InlineBailout("EffectGraphVisitor::VisitWhileNode");
InlineBailout("EffectGraphVisitor::VisitWhileNode (control)");
TestGraphVisitor for_test(owner(),
temp_index(),
node->condition()->token_pos());
@ -1224,7 +1216,7 @@ void EffectGraphVisitor::VisitWhileNode(WhileNode* node) {
// f) loop-exit-target
// g) break-join
void EffectGraphVisitor::VisitDoWhileNode(DoWhileNode* node) {
InlineBailout("EffectGraphVisitor::VisitDoWhileNode");
InlineBailout("EffectGraphVisitor::VisitDoWhileNode (control)");
// Traverse body first in order to generate continue and break labels.
EffectGraphVisitor for_body(owner(), temp_index());
for_body.Do(
@ -1275,7 +1267,7 @@ void EffectGraphVisitor::VisitDoWhileNode(DoWhileNode* node) {
// h) loop-exit-target
// i) break-join
void EffectGraphVisitor::VisitForNode(ForNode* node) {
InlineBailout("EffectGraphVisitor::VisitForNode");
InlineBailout("EffectGraphVisitor::VisitForNode (control)");
EffectGraphVisitor for_initializer(owner(), temp_index());
node->initializer()->Visit(&for_initializer);
Append(for_initializer);
@ -1347,7 +1339,7 @@ void EffectGraphVisitor::VisitForNode(ForNode* node) {
void EffectGraphVisitor::VisitJumpNode(JumpNode* node) {
InlineBailout("EffectGraphVisitor::VisitJumpNode");
InlineBailout("EffectGraphVisitor::VisitJumpNode (control)");
for (intptr_t i = 0; i < node->inlined_finally_list_length(); i++) {
EffectGraphVisitor for_effect(owner(), temp_index());
node->InlinedFinallyNodeAt(i)->Visit(&for_effect);
@ -1512,7 +1504,6 @@ void EffectGraphVisitor::BuildPushArguments(
void EffectGraphVisitor::VisitInstanceCallNode(InstanceCallNode* node) {
InlineBailout("EffectGraphVisitor::VisitInstanceCallNode (deopt)");
ValueGraphVisitor for_receiver(owner(), temp_index());
node->receiver()->Visit(&for_receiver);
Append(for_receiver);
@ -1534,7 +1525,6 @@ void EffectGraphVisitor::VisitInstanceCallNode(InstanceCallNode* node) {
// <Expression> ::= StaticCall { function: Function
// arguments: <ArgumentList> }
void EffectGraphVisitor::VisitStaticCallNode(StaticCallNode* node) {
InlineBailout("EffectGraphVisitor::VisitStaticCallNode (deopt)");
ZoneGrowableArray<PushArgumentInstr*>* arguments =
new ZoneGrowableArray<PushArgumentInstr*>(node->arguments()->length());
BuildPushArguments(*node->arguments(), arguments);
@ -1549,7 +1539,6 @@ void EffectGraphVisitor::VisitStaticCallNode(StaticCallNode* node) {
ClosureCallInstr* EffectGraphVisitor::BuildClosureCall(
ClosureCallNode* node) {
InlineBailout("EffectGraphVisitor::BuildClosureCall (deopt)");
ValueGraphVisitor for_closure(owner(), temp_index());
node->closure()->Visit(&for_closure);
Append(for_closure);
@ -1582,7 +1571,7 @@ void ValueGraphVisitor::VisitClosureCallNode(ClosureCallNode* node) {
void EffectGraphVisitor::VisitCloneContextNode(CloneContextNode* node) {
InlineBailout("EffectGraphVisitor::VisitCloneContextNode (deopt)");
InlineBailout("EffectGraphVisitor::VisitCloneContextNode (context)");
Value* context = Bind(new CurrentContextInstr());
Value* clone = Bind(new CloneContextInstr(node->token_pos(), context));
ReturnDefinition(new StoreContextInstr(clone));
@ -1613,7 +1602,6 @@ Value* EffectGraphVisitor::BuildObjectAllocation(
// Although the type arguments may be uninstantiated at compile time, they
// may represent the identity vector and may be replaced by the instantiated
// type arguments of the instantiator at run time.
InlineBailout("EffectGraphVisitor::BuildObjectAllocation (deopt)");
allocate_comp = new AllocateObjectWithBoundsCheckInstr(node,
type_arguments,
instantiator);
@ -1634,7 +1622,6 @@ Value* EffectGraphVisitor::BuildObjectAllocation(
void EffectGraphVisitor::BuildConstructorCall(
ConstructorCallNode* node,
PushArgumentInstr* push_alloc_value) {
InlineBailout("EffectGraphVisitor::BuildConstructorCall (deopt)");
Value* ctor_arg = Bind(
new ConstantInstr(Smi::ZoneHandle(Smi::New(Function::kCtorPhaseAll))));
PushArgumentInstr* push_ctor_arg = PushArgument(ctor_arg);
@ -1677,7 +1664,6 @@ static intptr_t GetResultCidOfConstructor(ConstructorCallNode* node) {
void EffectGraphVisitor::VisitConstructorCallNode(ConstructorCallNode* node) {
if (node->constructor().IsFactory()) {
InlineBailout("EffectGraphVisitor::VisitConstructorCallNode (deopt)");
ZoneGrowableArray<PushArgumentInstr*>* arguments =
new ZoneGrowableArray<PushArgumentInstr*>();
PushArgumentInstr* push_type_arguments = PushArgument(
@ -1775,7 +1761,6 @@ Value* EffectGraphVisitor::BuildInstantiatorTypeArguments(
instantiator_class.type_arguments_instance_field_offset();
ASSERT(type_arguments_instance_field_offset != Class::kNoTypeArguments);
InlineBailout("EffectGraphVisitor::BuildInstantiatorTypeArguments (deopt)");
return Bind(new LoadFieldInstr(
instantiator,
type_arguments_instance_field_offset,
@ -1789,7 +1774,6 @@ Value* EffectGraphVisitor::BuildInstantiatedTypeArguments(
if (type_arguments.IsNull() || type_arguments.IsInstantiated()) {
return Bind(new ConstantInstr(type_arguments));
}
InlineBailout("EffectGraphVisitor::BuildInstantiatedTypeArguments (deopt)");
// The type arguments are uninstantiated.
Value* instantiator_value =
BuildInstantiatorTypeArguments(token_pos, NULL);
@ -1909,7 +1893,6 @@ void ValueGraphVisitor::VisitConstructorCallNode(ConstructorCallNode* node) {
void EffectGraphVisitor::VisitInstanceGetterNode(InstanceGetterNode* node) {
InlineBailout("EffectGraphVisitor::VisitInstanceGetterNode (deopt)");
ValueGraphVisitor for_receiver(owner(), temp_index());
node->receiver()->Visit(&for_receiver);
Append(for_receiver);
@ -1950,7 +1933,6 @@ void EffectGraphVisitor::BuildInstanceSetterArguments(
void EffectGraphVisitor::VisitInstanceSetterNode(InstanceSetterNode* node) {
InlineBailout("EffectGraphVisitor::VisitInstanceSetterNode (deopt)");
ZoneGrowableArray<PushArgumentInstr*>* arguments =
new ZoneGrowableArray<PushArgumentInstr*>(2);
BuildInstanceSetterArguments(node, arguments, kResultNotNeeded);
@ -1967,7 +1949,6 @@ void EffectGraphVisitor::VisitInstanceSetterNode(InstanceSetterNode* node) {
void ValueGraphVisitor::VisitInstanceSetterNode(InstanceSetterNode* node) {
InlineBailout("ValueGraphVisitor::VisitInstanceSetterNode (deopt)");
ZoneGrowableArray<PushArgumentInstr*>* arguments =
new ZoneGrowableArray<PushArgumentInstr*>(2);
BuildInstanceSetterArguments(node, arguments, kResultNeeded);
@ -1984,7 +1965,6 @@ void ValueGraphVisitor::VisitInstanceSetterNode(InstanceSetterNode* node) {
void EffectGraphVisitor::VisitStaticGetterNode(StaticGetterNode* node) {
InlineBailout("EffectGraphVisitor::VisitStaticGetterNode (deopt)");
const String& getter_name =
String::Handle(Field::GetterName(node->field_name()));
ZoneGrowableArray<PushArgumentInstr*>* arguments =
@ -2014,7 +1994,6 @@ void EffectGraphVisitor::VisitStaticGetterNode(StaticGetterNode* node) {
void EffectGraphVisitor::BuildStaticSetter(StaticSetterNode* node,
bool result_is_needed) {
InlineBailout("EffectGraphVisitor::VisitStaticSetter (deopt)");
const String& setter_name =
String::Handle(Field::SetterName(node->field_name()));
// A super setter is an instance setter whose setter function is
@ -2223,7 +2202,6 @@ void ValueGraphVisitor::VisitStoreStaticFieldNode(StoreStaticFieldNode* node) {
void EffectGraphVisitor::VisitLoadIndexedNode(LoadIndexedNode* node) {
InlineBailout("EffectGraphVisitor::VisitLoadIndexedNode (deopt)");
ZoneGrowableArray<PushArgumentInstr*>* arguments =
new ZoneGrowableArray<PushArgumentInstr*>(2);
ValueGraphVisitor for_array(owner(), temp_index());
@ -2252,7 +2230,6 @@ void EffectGraphVisitor::VisitLoadIndexedNode(LoadIndexedNode* node) {
Definition* EffectGraphVisitor::BuildStoreIndexedValues(
StoreIndexedNode* node,
bool result_is_needed) {
InlineBailout("EffectGraphVisitor::BuildStoreIndexedValues (deopt)");
ZoneGrowableArray<PushArgumentInstr*>* arguments =
new ZoneGrowableArray<PushArgumentInstr*>(3);
ValueGraphVisitor for_array(owner(), temp_index());
@ -2311,7 +2288,7 @@ bool EffectGraphVisitor::MustSaveRestoreContext(SequenceNode* node) const {
void EffectGraphVisitor::UnchainContext() {
InlineBailout("EffectGraphVisitor::UnchainContext (deopt)");
InlineBailout("EffectGraphVisitor::UnchainContext (context)");
Value* context = Bind(new CurrentContextInstr());
Value* parent = Bind(
new LoadFieldInstr(context,
@ -2330,7 +2307,7 @@ void EffectGraphVisitor::VisitSequenceNode(SequenceNode* node) {
(scope != NULL) ? scope->num_context_variables() : 0;
int previous_context_level = owner()->context_level();
if (num_context_variables > 0) {
InlineBailout("EffectGraphVisitor::VisitSequenceNode (deopt)");
InlineBailout("EffectGraphVisitor::VisitSequenceNode (context)");
// The loop local scope declares variables that are captured.
// Allocate and chain a new context.
// Allocate context computation (uses current CTX)
@ -2468,7 +2445,7 @@ void EffectGraphVisitor::VisitSequenceNode(SequenceNode* node) {
void EffectGraphVisitor::VisitCatchClauseNode(CatchClauseNode* node) {
InlineBailout("EffectGraphVisitor::VisitCatchClauseNode");
InlineBailout("EffectGraphVisitor::VisitCatchClauseNode (exception)");
// NOTE: The implicit variables ':saved_context', ':exception_var'
// and ':stacktrace_var' can never be captured variables.
// Restores CTX from local variable ':saved_context'.
@ -2482,7 +2459,7 @@ void EffectGraphVisitor::VisitCatchClauseNode(CatchClauseNode* node) {
void EffectGraphVisitor::VisitTryCatchNode(TryCatchNode* node) {
InlineBailout("EffectGraphVisitor::VisitTryCatchNode");
InlineBailout("EffectGraphVisitor::VisitTryCatchNode (exception)");
intptr_t old_try_index = owner()->try_index();
intptr_t try_index = owner()->AllocateTryIndex();
owner()->set_try_index(try_index);
@ -2542,7 +2519,7 @@ void EffectGraphVisitor::VisitTryCatchNode(TryCatchNode* node) {
void EffectGraphVisitor::BuildThrowNode(ThrowNode* node) {
// TODO(kmillikin) non-local control flow is not handled correctly
// by the inliner.
InlineBailout("EffectGraphVisitor::BuildThrowNode");
InlineBailout("EffectGraphVisitor::BuildThrowNode (exception)");
ValueGraphVisitor for_exception(owner(), temp_index());
node->exception()->Visit(&for_exception);
Append(for_exception);
@ -2577,7 +2554,7 @@ void ValueGraphVisitor::VisitThrowNode(ThrowNode* node) {
void EffectGraphVisitor::VisitInlinedFinallyNode(InlinedFinallyNode* node) {
InlineBailout("EffectGraphVisitor::VisitInlinedFinallyNode");
InlineBailout("EffectGraphVisitor::VisitInlinedFinallyNode (exception)");
const intptr_t try_index = owner()->try_index();
if (try_index >= 0) {
// We are about to generate code for an inlined finally block. Exceptions
@ -2622,6 +2599,9 @@ FlowGraph* FlowGraphBuilder::BuildGraph(InliningContext context) {
CatchClauseNode::kInvalidTryIndex);
graph_entry_ = new GraphEntryInstr(normal_entry);
EffectGraphVisitor for_effect(this, 0);
if (InInliningContext()) {
exits_ = new ZoneGrowableArray<ReturnInstr*>();
}
// TODO(kmillikin): We can eliminate stack checks in some cases (e.g., the
// stack check on entry for leaf routines).
for_effect.Do(new CheckStackOverflowInstr(function.token_pos()));

View file

@ -45,46 +45,69 @@ void CompilerDeoptInfoWithStub::BuildReturnAddress(DeoptInfoBuilder* builder,
}
RawDeoptInfo* CompilerDeoptInfo::CreateDeoptInfo(FlowGraphCompiler* compiler) {
if (deoptimization_env_ == NULL) return DeoptInfo::null();
const Function& function = compiler->parsed_function().function();
// For functions with optional arguments, all incoming are copied to local
// area below FP, deoptimization environment does not track them.
const intptr_t num_args =
function.HasOptionalParameters() ? 0 : function.num_fixed_parameters();
const intptr_t fixed_parameter_count =
deoptimization_env_->fixed_parameter_count();
DeoptInfoBuilder builder(compiler->object_table(), num_args);
intptr_t slot_ix = 0;
BuildReturnAddress(&builder, function, slot_ix++);
// Assign locations to values pushed above spill slots with PushArgument.
intptr_t height = compiler->StackSize();
for (intptr_t i = 0; i < deoptimization_env_->Length(); i++) {
if (deoptimization_env_->LocationAt(i).IsInvalid()) {
ASSERT(deoptimization_env_->ValueAt(i)->definition()->IsPushArgument());
*deoptimization_env_->LocationSlotAt(i) = Location::StackSlot(height++);
// Assign locations to incoming arguments, i.e., values pushed above spill slots
// with PushArgument. Recursively allocates from outermost to innermost
// environment.
void CompilerDeoptInfo::AllocateIncomingParametersRecursive(
Environment* env,
intptr_t* stack_height) {
if (env == NULL) return;
AllocateIncomingParametersRecursive(env->outer(), stack_height);
for (Environment::ShallowIterator it(env); !it.Done(); it.Advance()) {
if (it.CurrentLocation().IsInvalid()) {
ASSERT(it.CurrentValue()->definition()->IsPushArgument());
it.SetCurrentLocation(Location::StackSlot((*stack_height)++));
}
}
}
for (intptr_t i = deoptimization_env_->Length() - 1;
i >= fixed_parameter_count;
i--) {
builder.AddCopy(deoptimization_env_->LocationAt(i),
*deoptimization_env_->ValueAt(i),
slot_ix++);
}
// PC marker, caller-fp, caller-pc.
builder.AddPcMarker(function, slot_ix++);
builder.AddCallerFp(slot_ix++);
builder.AddCallerPc(slot_ix++);
// Incoming arguments.
for (intptr_t i = fixed_parameter_count - 1; i >= 0; i--) {
builder.AddCopy(deoptimization_env_->LocationAt(i),
*deoptimization_env_->ValueAt(i),
slot_ix++);
RawDeoptInfo* CompilerDeoptInfo::CreateDeoptInfo(FlowGraphCompiler* compiler) {
if (deoptimization_env_ == NULL) return DeoptInfo::null();
intptr_t stack_height = compiler->StackSize();
AllocateIncomingParametersRecursive(deoptimization_env_, &stack_height);
const Function& function = compiler->parsed_function().function();
// For functions with optional arguments, all incoming arguments are copied
// to spill slots. The deoptimization environment does not track them.
const intptr_t incoming_arg_count =
function.HasOptionalParameters() ? 0 : function.num_fixed_parameters();
DeoptInfoBuilder builder(compiler->object_table(), incoming_arg_count);
intptr_t slot_ix = 0;
Environment* env = deoptimization_env_;
while (env != NULL) {
const Function& function = env->function();
const intptr_t fixed_parameter_count = env->fixed_parameter_count();
if (slot_ix == 0) {
// For the innermost environment call the virtual return builder.
BuildReturnAddress(&builder, function, slot_ix++);
} else {
// For any outer environment the deopt id is that of the call instruction
// which is recorded in the outer environment.
builder.AddReturnAddressAfter(function, env->deopt_id(), slot_ix++);
}
for (intptr_t i = env->Length() - 1; i >= fixed_parameter_count; i--) {
builder.AddCopy(env->LocationAt(i), *env->ValueAt(i), slot_ix++);
}
// PC marker and caller FP.
builder.AddPcMarker(function, slot_ix++);
builder.AddCallerFp(slot_ix++);
// On the outermost environment set caller PC and incoming arguments.
if (env->outer() == NULL) {
builder.AddCallerPc(slot_ix++);
for (intptr_t i = fixed_parameter_count - 1; i >= 0; i--) {
builder.AddCopy(env->LocationAt(i), *env->ValueAt(i), slot_ix++);
}
}
// Iterate on the outer environment.
env = env->outer();
}
const DeoptInfo& deopt_info = DeoptInfo::Handle(builder.CreateDeoptInfo());
@ -342,7 +365,7 @@ void FlowGraphCompiler::FinalizePcDescriptors(const Code& code) {
ASSERT(pc_descriptors_list_ != NULL);
const PcDescriptors& descriptors = PcDescriptors::Handle(
pc_descriptors_list_->FinalizePcDescriptors(code.EntryPoint()));
descriptors.Verify(parsed_function_.function().is_optimizable());
if (!is_optimizing_) descriptors.Verify(parsed_function_.function());
code.set_pc_descriptors(descriptors);
}

View file

@ -70,6 +70,9 @@ class CompilerDeoptInfo : public ZoneAllocated {
RawDeoptInfo* CreateDeoptInfo(FlowGraphCompiler* compiler);
void AllocateIncomingParametersRecursive(Environment* env,
intptr_t* stack_height);
// No code needs to be generated.
virtual void GenerateCode(FlowGraphCompiler* compiler, intptr_t stub_ix) {}
@ -85,7 +88,7 @@ class CompilerDeoptInfo : public ZoneAllocated {
private:
const intptr_t deopt_id_;
const DeoptReasonId reason_;
const Environment* deoptimization_env_;
Environment* deoptimization_env_;
DISALLOW_COPY_AND_ASSIGN(CompilerDeoptInfo);
};

View file

@ -4,10 +4,13 @@
#include "vm/flow_graph_inliner.h"
#include "vm/compiler.h"
#include "vm/flags.h"
#include "vm/flow_graph.h"
#include "vm/flow_graph_builder.h"
#include "vm/flow_graph_optimizer.h"
#include "vm/il_printer.h"
#include "vm/intrinsifier.h"
#include "vm/longjump.h"
#include "vm/object.h"
#include "vm/object_store.h"
@ -17,6 +20,7 @@ namespace dart {
DEFINE_FLAG(bool, trace_inlining, false, "Trace inlining");
DEFINE_FLAG(charp, inlining_filter, NULL, "Inline only in named function");
DECLARE_FLAG(bool, print_flow_graph);
DECLARE_FLAG(int, deoptimization_counter_threshold);
#define TRACE_INLINING(statement) \
do { \
@ -46,10 +50,19 @@ class CallSiteInliner : public FlowGraphVisitor {
// Assuming no optional parameters the actual/formal count should match.
ASSERT(arguments->length() == function.num_fixed_parameters());
// Abort if the callee has an intrinsic translation.
if (Intrinsifier::CanIntrinsify(function)) {
TRACE_INLINING(OS::Print(" Bailout: can intrinsify\n"));
return false;
}
Isolate* isolate = Isolate::Current();
// Save and clear IC data.
const Array& old_ic_data = Array::Handle(isolate->ic_data_array());
const Array& prev_ic_data = Array::Handle(isolate->ic_data_array());
isolate->set_ic_data_array(Array::null());
// Save and clear deopt id.
const intptr_t prev_deopt_id = isolate->deopt_id();
isolate->set_deopt_id(0);
// Install bailout jump.
LongJump* base = isolate->long_jump_base();
LongJump jump;
@ -59,40 +72,44 @@ class CallSiteInliner : public FlowGraphVisitor {
ParsedFunction parsed_function(function);
Parser::ParseFunction(&parsed_function);
parsed_function.AllocateVariables();
FlowGraphBuilder builder(parsed_function);
// Load IC data for the callee.
if ((function.deoptimization_counter() <
FLAG_deoptimization_counter_threshold) &&
function.HasCode()) {
const Code& unoptimized_code =
Code::Handle(function.unoptimized_code());
isolate->set_ic_data_array(unoptimized_code.ExtractTypeFeedbackArray());
}
// Build the callee graph.
FlowGraphBuilder builder(parsed_function);
FlowGraph* callee_graph =
builder.BuildGraph(FlowGraphBuilder::kValueContext);
// Abort if the callee graph contains control flow.
if (callee_graph->preorder().length() != 2) {
isolate->set_long_jump_base(base);
isolate->set_ic_data_array(old_ic_data.raw());
isolate->set_ic_data_array(prev_ic_data.raw());
TRACE_INLINING(OS::Print(" Bailout: control flow\n"));
return false;
}
if (FLAG_trace_inlining && FLAG_print_flow_graph) {
OS::Print("Callee graph before SSA %s\n",
parsed_function.function().ToFullyQualifiedCString());
FlowGraphPrinter printer(*callee_graph);
printer.PrintBlocks();
}
// Compute SSA on the callee graph. (catching bailouts)
// Compute SSA on the callee graph, catching bailouts.
callee_graph->ComputeSSA(next_ssa_temp_index_);
if (FLAG_trace_inlining && FLAG_print_flow_graph) {
OS::Print("Callee graph after SSA %s\n",
parsed_function.function().ToFullyQualifiedCString());
FlowGraphPrinter printer(*callee_graph);
printer.PrintBlocks();
}
callee_graph->ComputeUseLists();
// TODO(zerny): Do optimization passes on the callee graph.
// TODO(zerny): Do more optimization passes on the callee graph.
FlowGraphOptimizer optimizer(callee_graph);
optimizer.ApplyICData();
callee_graph->ComputeUseLists();
if (FLAG_trace_inlining && FLAG_print_flow_graph) {
OS::Print("Callee graph for inlining %s\n",
parsed_function.function().ToFullyQualifiedCString());
FlowGraphPrinter printer(*callee_graph);
printer.PrintBlocks();
}
// TODO(zerny): If result is more than size threshold then abort.
@ -102,14 +119,17 @@ class CallSiteInliner : public FlowGraphVisitor {
caller_graph_->InlineCall(call, callee_graph);
next_ssa_temp_index_ = caller_graph_->max_virtual_register_number();
// Remove (all) push arguments of the call.
// Check that inlining maintains use lists.
DEBUG_ASSERT(caller_graph_->ValidateUseLists());
// Remove push arguments of the call.
for (intptr_t i = 0; i < call->ArgumentCount(); ++i) {
PushArgumentInstr* push = call->ArgumentAt(i);
push->ReplaceUsesWith(push->value()->definition());
push->RemoveFromGraph();
}
// Replace all the formal parameters with the actuals.
// Replace formal parameters with actuals.
for (intptr_t i = 0; i < arguments->length(); ++i) {
Value* val = callee_graph->graph_entry()->start_env()->ValueAt(i);
ParameterInstr* param = val->definition()->AsParameter();
@ -126,14 +146,16 @@ class CallSiteInliner : public FlowGraphVisitor {
// Build succeeded so we restore the bailout jump.
inlined_ = true;
isolate->set_long_jump_base(base);
isolate->set_ic_data_array(old_ic_data.raw());
isolate->set_deopt_id(prev_deopt_id);
isolate->set_ic_data_array(prev_ic_data.raw());
return true;
} else {
Error& error = Error::Handle();
error = isolate->object_store()->sticky_error();
isolate->object_store()->clear_sticky_error();
isolate->set_long_jump_base(base);
isolate->set_ic_data_array(old_ic_data.raw());
isolate->set_deopt_id(prev_deopt_id);
isolate->set_ic_data_array(prev_ic_data.raw());
TRACE_INLINING(OS::Print(" Bailout: %s\n", error.ToErrorCString()));
return false;
}
@ -199,6 +221,10 @@ void FlowGraphInliner::Inline() {
return;
}
TRACE_INLINING(OS::Print(
"Inlining calls in %s\n",
flow_graph_->parsed_function().function().ToCString()));
if (FLAG_trace_inlining && FLAG_print_flow_graph) {
OS::Print("Before Inlining of %s\n", flow_graph_->
parsed_function().function().ToFullyQualifiedCString());
@ -206,9 +232,6 @@ void FlowGraphInliner::Inline() {
printer.PrintBlocks();
}
TRACE_INLINING(OS::Print(
"Inlining calls in %s\n",
flow_graph_->parsed_function().function().ToCString()));
CallSiteInliner inliner(flow_graph_);
inliner.VisitBlocks();

View file

@ -787,9 +787,14 @@ void ParallelMoveInstr::PrintToVisualizer(BufferFormatter* f) const {
void Environment::PrintTo(BufferFormatter* f) const {
f->Print(" env={ ");
int arg_count = 0;
for (intptr_t i = 0; i < values_.length(); ++i) {
if (i > 0) f->Print(", ");
values_[i]->PrintTo(f);
if (values_[i]->definition()->IsPushArgument()) {
f->Print("a%d", arg_count++);
} else {
values_[i]->PrintTo(f);
}
if ((locations_ != NULL) && !locations_[i].IsInvalid()) {
f->Print(" [");
locations_[i].PrintTo(f);

View file

@ -1653,12 +1653,13 @@ void PushArgumentInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
Environment* Environment::From(const GrowableArray<Definition*>& definitions,
intptr_t fixed_parameter_count,
const Environment* outer) {
const Function& function) {
Environment* env =
new Environment(definitions.length(),
fixed_parameter_count,
Isolate::kNoDeoptId,
(outer == NULL) ? NULL : outer->DeepCopy());
function,
NULL);
for (intptr_t i = 0; i < definitions.length(); ++i) {
env->values_.Add(new Value(definitions[i]));
}
@ -1671,6 +1672,7 @@ Environment* Environment::DeepCopy() const {
new Environment(values_.length(),
fixed_parameter_count_,
deopt_id_,
function_,
(outer_ == NULL) ? NULL : outer_->DeepCopy());
for (intptr_t i = 0; i < values_.length(); ++i) {
copy->values_.Add(values_[i]->Copy());
@ -1693,6 +1695,22 @@ void Environment::DeepCopyTo(Instruction* instr) const {
}
// Copies the environment as outer on an inlined instruction and updates the
// environment use lists.
void Environment::DeepCopyToOuter(Instruction* instr) const {
ASSERT(instr->env()->outer() == NULL);
Environment* copy = DeepCopy();
intptr_t use_index = instr->env()->Length(); // Start index after inner.
for (Environment::DeepIterator it(copy); !it.Done(); it.Advance()) {
Value* value = it.CurrentValue();
value->set_instruction(instr);
value->set_use_index(use_index++);
value->AddToEnvUseList();
}
instr->env()->outer_ = copy;
}
#undef __
} // namespace dart

View file

@ -3494,6 +3494,16 @@ class Environment : public ZoneAllocated {
environment_->values_[index_] = value;
}
Location CurrentLocation() const {
ASSERT(!Done());
return environment_->locations_[index_];
}
void SetCurrentLocation(Location loc) {
ASSERT(!Done());
environment_->locations_[index_] = loc;
}
private:
Environment* environment_;
intptr_t index_;
@ -3525,6 +3535,16 @@ class Environment : public ZoneAllocated {
iterator_.SetCurrentValue(value);
}
Location CurrentLocation() const {
ASSERT(!Done());
return iterator_.CurrentLocation();
}
void SetCurrentLocation(Location loc) {
ASSERT(!Done());
iterator_.SetCurrentLocation(loc);
}
private:
void SkipDone() {
while (!Done() && iterator_.Done()) {
@ -3538,7 +3558,7 @@ class Environment : public ZoneAllocated {
// Construct an environment by constructing uses from an array of definitions.
static Environment* From(const GrowableArray<Definition*>& definitions,
intptr_t fixed_parameter_count,
const Environment* outer);
const Function& function);
void set_locations(Location* locations) {
ASSERT(locations_ == NULL);
@ -3583,7 +3603,10 @@ class Environment : public ZoneAllocated {
return fixed_parameter_count_;
}
const Function& function() const { return function_; }
void DeepCopyTo(Instruction* instr) const;
void DeepCopyToOuter(Instruction* instr) const;
void PrintTo(BufferFormatter* f) const;
@ -3593,11 +3616,13 @@ class Environment : public ZoneAllocated {
Environment(intptr_t length,
intptr_t fixed_parameter_count,
intptr_t deopt_id,
const Function& function,
Environment* outer)
: values_(length),
locations_(NULL),
fixed_parameter_count_(fixed_parameter_count),
deopt_id_(deopt_id),
function_(function),
outer_(outer) { }
Environment* DeepCopy() const;
@ -3606,6 +3631,7 @@ class Environment : public ZoneAllocated {
Location* locations_;
const intptr_t fixed_parameter_count_;
intptr_t deopt_id_;
const Function& function_;
Environment* outer_;
DISALLOW_COPY_AND_ASSIGN(Environment);

View file

@ -50,7 +50,7 @@ static bool TestFunction(const Function& function,
}
bool Intrinsifier::Intrinsify(const Function& function, Assembler* assembler) {
bool Intrinsifier::CanIntrinsify(const Function& function) {
if (!FLAG_intrinsify) return false;
// Closure functions may have different arguments.
if (function.IsClosureFunction()) return false;
@ -66,6 +66,23 @@ bool Intrinsifier::Intrinsify(const Function& function, Assembler* assembler) {
(function_class.library() != math_lib.raw())) {
return false;
}
#define FIND_INTRINSICS(test_class_name, test_function_name, destination) \
if (TestFunction(function, \
class_name, function_name, \
#test_class_name, #test_function_name)) { \
return true; \
} \
INTRINSIC_LIST(FIND_INTRINSICS);
#undef FIND_INTRINSICS
return false;
}
bool Intrinsifier::Intrinsify(const Function& function, Assembler* assembler) {
if (!CanIntrinsify(function)) return false;
const char* function_name = String::Handle(function.name()).ToCString();
const Class& function_class = Class::Handle(function.Owner());
const char* class_name = String::Handle(function_class.Name()).ToCString();
#define FIND_INTRINSICS(test_class_name, test_function_name, destination) \
if (TestFunction(function, \
class_name, function_name, \

View file

@ -94,6 +94,7 @@ class Intrinsifier : public AllStatic {
// completely and the code does not need to be generated (i.e., no slow
// path possible).
static bool Intrinsify(const Function& function, Assembler* assembler);
static bool CanIntrinsify(const Function& function);
private:
#define DECLARE_FUNCTION(test_class_name, test_function_name, destination) \

View file

@ -7114,7 +7114,7 @@ const char* PcDescriptors::ToCString() const {
// - No two ic-call descriptors have the same deoptimization id (type feedback).
// A function without unique ids is marked as non-optimizable (e.g., because of
// finally blocks).
void PcDescriptors::Verify(bool check_ids) const {
void PcDescriptors::Verify(const Function& function) const {
#if defined(DEBUG)
// TODO(srdjan): Implement a more efficient way to check, currently drop
// the check for too large number of descriptors.
@ -7124,15 +7124,15 @@ void PcDescriptors::Verify(bool check_ids) const {
}
return;
}
// Only check ids for unoptimized code that is optimizable.
if (!function.is_optimizable()) return;
for (intptr_t i = 0; i < Length(); i++) {
PcDescriptors::Kind kind = DescriptorKind(i);
// 'deopt_id' is set for kDeopt and kIcCall and must be unique for one kind.
intptr_t deopt_id = Isolate::kNoDeoptId;
if (check_ids) {
if ((DescriptorKind(i) == PcDescriptors::kDeoptBefore) ||
(DescriptorKind(i) == PcDescriptors::kIcCall)) {
deopt_id = DeoptId(i);
}
if ((DescriptorKind(i) == PcDescriptors::kDeoptBefore) ||
(DescriptorKind(i) == PcDescriptors::kIcCall)) {
deopt_id = DeoptId(i);
}
for (intptr_t k = i + 1; k < Length(); k++) {
if (kind == DescriptorKind(k)) {
@ -7770,6 +7770,23 @@ intptr_t Code::ExtractIcDataArraysAtCalls(
}
RawArray* Code::ExtractTypeFeedbackArray() const {
ASSERT(!IsNull() && !is_optimized());
GrowableArray<intptr_t> deopt_ids;
const GrowableObjectArray& ic_data_objs =
GrowableObjectArray::Handle(GrowableObjectArray::New());
const intptr_t max_id =
ExtractIcDataArraysAtCalls(&deopt_ids, ic_data_objs);
const Array& result = Array::Handle(Array::New(max_id + 1));
for (intptr_t i = 0; i < deopt_ids.length(); i++) {
intptr_t result_index = deopt_ids[i];
ASSERT(result.At(result_index) == Object::null());
result.SetAt(result_index, Object::Handle(ic_data_objs.At(i)));
}
return result.raw();
}
RawStackmap* Code::GetStackmap(uword pc, Array* maps, Stackmap* map) const {
// This code is used during iterating frames during a GC and hence it
// should not in turn start a GC.

View file

@ -2371,7 +2371,7 @@ class PcDescriptors : public Object {
uword GetPcForKind(Kind kind) const;
// Verify (assert) assumptions about pc descriptors in debug mode.
void Verify(bool check_ids) const;
void Verify(const Function& function) const;
static void PrintHeaderString();
@ -2720,6 +2720,9 @@ class Code : public Object {
GrowableArray<intptr_t>* node_ids,
const GrowableObjectArray& ic_data_objs) const;
// Returns an array indexed by deopt id, containing the extracted ICData.
RawArray* ExtractTypeFeedbackArray() const;
private:
// An object finder visitor interface.
class FindRawCodeVisitor : public FindObjectVisitor {

View file

@ -1,4 +1,4 @@
// Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
@ -551,20 +551,21 @@ static void GenerateDeoptimizationSequence(Assembler* assembler,
__ ReserveAlignedFrameSpace(1 * kWordSize);
__ movl(Address(ESP, 0), ECX);
__ CallRuntime(kDeoptimizeFillFrameRuntimeEntry);
// Result (EAX) is our FP.
if (preserve_eax) {
// Restore result into EAX.
__ movl(EAX, Address(EBP, -1 * kWordSize));
// Restore result into EBX.
__ movl(EBX, Address(EBP, -1 * kWordSize));
}
// Code above cannot cause GC.
__ LeaveFrame();
__ movl(EBP, EAX);
// Frame is fully rewritten at this point and it is safe to perform a GC.
// Materialize any objects that were deferred by FillFrame because they
// require allocation.
__ EnterFrame(0);
if (preserve_eax) {
__ pushl(EAX); // Preserve result, it will be GC-d here.
__ pushl(EBX); // Preserve result, it will be GC-d here.
}
__ CallRuntime(kDeoptimizeMaterializeDoublesRuntimeEntry);
if (preserve_eax) {

View file

@ -544,19 +544,21 @@ static void GenerateDeoptimizationSequence(Assembler* assembler,
__ ReserveAlignedFrameSpace(0);
__ movq(RDI, RCX); // Set up argument 1 last_fp.
__ CallRuntime(kDeoptimizeFillFrameRuntimeEntry);
// Result (RAX) is our FP.
if (preserve_rax) {
// Restore result into RAX.
__ movq(RAX, Address(RBP, -1 * kWordSize));
// Restore result into RBX.
__ movq(RBX, Address(RBP, -1 * kWordSize));
}
// Code above cannot cause GC.
__ LeaveFrame();
__ movq(RBP, RAX);
// Frame is fully rewritten at this point and it is safe to perform a GC.
// Materialize any objects that were deferred by FillFrame because they
// require allocation.
__ EnterFrame(0);
if (preserve_rax) {
__ pushq(RAX); // Preserve result, it will be GC-d here.
__ pushq(RBX); // Preserve result, it will be GC-d here.
}
__ CallRuntime(kDeoptimizeMaterializeDoublesRuntimeEntry);
if (preserve_rax) {