Precompilation: Specialize instance calls when the call receiver is the method receiver and the method class has a small number of concrete subclasses (currently 5).

CompileOnceHelloHtml (ARMv7HF) +17.194%
DeltaBlueClosures (ARMv7HF) +28.379%
DeltaBlue (ARMv7HF) +30.190%

precompiled dart2js arm 19071745 -> 19504726 (+2.3%)

R=fschneider@google.com

Review URL: https://codereview.chromium.org/1867913004 .
This commit is contained in:
Ryan Macnak 2016-04-11 12:52:28 -07:00
parent baa84c1435
commit e048774776
18 changed files with 245 additions and 72 deletions

View file

@ -2,7 +2,7 @@
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
class _IntegerImplementation {
abstract class _IntegerImplementation {
// The Dart class _Bigint extending _IntegerImplementation requires a
// default constructor.

View file

@ -47,7 +47,7 @@ patch class String {
* [_StringBase] contains common methods used by concrete String
* implementations, e.g., _OneByteString.
*/
class _StringBase {
abstract class _StringBase {
// Constants used by replaceAll encoding of string slices between matches.
// A string slice (start+length) is encoded in a single Smi to save memory
// overhead in the common case.

View file

@ -29,6 +29,10 @@
namespace dart {
DEFINE_FLAG(int, max_exhaustive_polymorphic_checks, 5,
"If a call receiver is known to be of at most this many classes, "
"generate exhaustive class tests instead of a megamorphic call");
// Quick access to the current isolate and zone.
#define I (isolate())
#define Z (zone())
@ -281,11 +285,11 @@ void AotOptimizer::SpecializePolymorphicInstanceCall(
return;
}
const bool with_checks = false;
PolymorphicInstanceCallInstr* specialized =
new(Z) PolymorphicInstanceCallInstr(call->instance_call(),
ic_data,
with_checks);
/* with_checks = */ false,
/* complete = */ false);
call->ReplaceWith(specialized, current_iterator());
}
@ -2441,7 +2445,8 @@ void AotOptimizer::VisitInstanceCall(InstanceCallInstr* instr) {
instr, function_kind)) {
PolymorphicInstanceCallInstr* call =
new(Z) PolymorphicInstanceCallInstr(instr, unary_checks,
/* with_checks = */ false);
/* with_checks = */ false,
/* complete = */ true);
instr->ReplaceWith(call, current_iterator());
return;
}
@ -2514,13 +2519,71 @@ void AotOptimizer::VisitInstanceCall(InstanceCallInstr* instr) {
ic_data.AddReceiverCheck(receiver_class.id(), function);
PolymorphicInstanceCallInstr* call =
new(Z) PolymorphicInstanceCallInstr(instr, ic_data,
/* with_checks = */ false);
/* with_checks = */ false,
/* complete = */ true);
instr->ReplaceWith(call, current_iterator());
return;
}
}
}
Definition* callee_receiver = instr->ArgumentAt(0);
const Function& function = flow_graph_->function();
if (function.IsDynamicFunction() &&
flow_graph_->IsReceiver(callee_receiver)) {
// Call receiver is method receiver.
Class& receiver_class = Class::Handle(Z, function.Owner());
GrowableArray<intptr_t> class_ids(6);
if (thread()->cha()->ConcreteSubclasses(receiver_class, &class_ids)) {
if (class_ids.length() <= FLAG_max_exhaustive_polymorphic_checks) {
if (FLAG_trace_cha) {
THR_Print(" **(CHA) Only %" Pd " concrete subclasses of %s for %s\n",
class_ids.length(),
receiver_class.ToCString(),
instr->function_name().ToCString());
}
const Array& args_desc_array = Array::Handle(Z,
ArgumentsDescriptor::New(instr->ArgumentCount(),
instr->argument_names()));
ArgumentsDescriptor args_desc(args_desc_array);
const ICData& ic_data = ICData::Handle(
ICData::New(function,
instr->function_name(),
args_desc_array,
Thread::kNoDeoptId,
/* args_tested = */ 1));
Function& target = Function::Handle(Z);
Class& cls = Class::Handle(Z);
bool includes_dispatcher_case = false;
for (intptr_t i = 0; i < class_ids.length(); i++) {
intptr_t cid = class_ids[i];
cls = isolate()->class_table()->At(cid);
target = Resolver::ResolveDynamicForReceiverClass(
cls,
instr->function_name(),
args_desc);
if (target.IsNull()) {
// noSuchMethod, call through getter or closurization
includes_dispatcher_case = true;
} else {
ic_data.AddReceiverCheck(cid, target);
}
}
if (!includes_dispatcher_case && (ic_data.NumberOfChecks() > 0)) {
PolymorphicInstanceCallInstr* call =
new(Z) PolymorphicInstanceCallInstr(instr, ic_data,
/* with_checks = */ true,
/* complete = */ true);
instr->ReplaceWith(call, current_iterator());
return;
}
}
}
}
// More than one targets. Generate generic polymorphic call without
// deoptimization.
if (instr->ic_data()->NumberOfUsedChecks() > 0) {
@ -2529,7 +2592,8 @@ void AotOptimizer::VisitInstanceCall(InstanceCallInstr* instr) {
// deoptimization is allowed.
PolymorphicInstanceCallInstr* call =
new(Z) PolymorphicInstanceCallInstr(instr, unary_checks,
/* with_checks = */ true);
/* with_checks = */ true,
/* complete = */ false);
instr->ReplaceWith(call, current_iterator());
return;
}

View file

@ -48,6 +48,31 @@ bool CHA::HasSubclasses(intptr_t cid) const {
}
bool CHA::ConcreteSubclasses(const Class& cls,
GrowableArray<intptr_t> *class_ids) {
if (cls.InVMHeap()) return false;
if (cls.IsObjectClass()) return false;
if (!cls.is_abstract()) {
class_ids->Add(cls.id());
}
const GrowableObjectArray& direct_subclasses =
GrowableObjectArray::Handle(cls.direct_subclasses());
if (direct_subclasses.IsNull()) {
return true;
}
Class& subclass = Class::Handle();
for (intptr_t i = 0; i < direct_subclasses.Length(); i++) {
subclass ^= direct_subclasses.At(i);
if (!ConcreteSubclasses(subclass, class_ids)) {
return false;
}
}
return true;
}
bool CHA::IsImplemented(const Class& cls) {
// Function type aliases have different type checking rules.
ASSERT(!cls.IsTypedefClass());

View file

@ -35,6 +35,11 @@ class CHA : public StackResource {
static bool HasSubclasses(const Class& cls);
bool HasSubclasses(intptr_t cid) const;
// Collect the concrete subclasses of 'cls' into 'class_ids'. Return true if
// the result is valid (may be invalid because we don't track the subclasses
// of classes allocated in the VM isolate or class Object).
bool ConcreteSubclasses(const Class& cls, GrowableArray<intptr_t> *class_ids);
// Return true if the class is implemented by some other class.
static bool IsImplemented(const Class& cls);

View file

@ -303,6 +303,8 @@ class FlowGraph : public ZoneAllocated {
// Remove environments from the instructions which do not deoptimize.
void EliminateEnvironments();
bool IsReceiver(Definition* def) const;
private:
friend class IfConverter;
friend class BranchSimplifier;
@ -355,7 +357,6 @@ class FlowGraph : public ZoneAllocated {
Value* use,
bool is_environment_use);
bool IsReceiver(Definition* def) const;
void ComputeIsReceiver(PhiInstr* phi) const;
void ComputeIsReceiverRecursive(PhiInstr* phi,
GrowableArray<PhiInstr*>* unmark) const;

View file

@ -1843,7 +1843,8 @@ void FlowGraphCompiler::EmitPolymorphicInstanceCall(
const Array& argument_names,
intptr_t deopt_id,
TokenPosition token_pos,
LocationSummary* locs) {
LocationSummary* locs,
bool complete) {
if (FLAG_polymorphic_with_deopt) {
Label* deopt = AddDeoptStub(deopt_id,
ICData::kDeoptPolymorphicInstanceCallTestFail);
@ -1851,23 +1852,32 @@ void FlowGraphCompiler::EmitPolymorphicInstanceCall(
EmitTestAndCall(ic_data, argument_count, argument_names,
deopt, // No cid match.
&ok, // Found cid.
deopt_id, token_pos, locs);
deopt_id, token_pos, locs, complete);
assembler()->Bind(&ok);
} else {
// Instead of deoptimizing, do a megamorphic call when no matching
// cid found.
Label ok;
MegamorphicSlowPath* slow_path =
if (complete) {
Label ok;
EmitTestAndCall(ic_data, argument_count, argument_names,
NULL, // No cid match.
&ok, // Found cid.
deopt_id, token_pos, locs, true);
assembler()->Bind(&ok);
} else {
// Instead of deoptimizing, do a megamorphic call when no matching
// cid found.
Label ok;
MegamorphicSlowPath* slow_path =
new MegamorphicSlowPath(ic_data, argument_count, deopt_id,
token_pos, locs, CurrentTryIndex());
AddSlowPathCode(slow_path);
EmitTestAndCall(ic_data, argument_count, argument_names,
slow_path->entry_label(), // No cid match.
&ok, // Found cid.
deopt_id, token_pos, locs);
AddSlowPathCode(slow_path);
EmitTestAndCall(ic_data, argument_count, argument_names,
slow_path->entry_label(), // No cid match.
&ok, // Found cid.
deopt_id, token_pos, locs, false);
assembler()->Bind(slow_path->exit_label());
assembler()->Bind(&ok);
assembler()->Bind(slow_path->exit_label());
assembler()->Bind(&ok);
}
}
}

View file

@ -451,7 +451,8 @@ class FlowGraphCompiler : public ValueObject {
const Array& argument_names,
intptr_t deopt_id,
TokenPosition token_pos,
LocationSummary* locs);
LocationSummary* locs,
bool complete);
// Pass a value for try-index where block is not available (e.g. slow path).
void EmitMegamorphicInstanceCall(
@ -476,7 +477,8 @@ class FlowGraphCompiler : public ValueObject {
Label* match_found,
intptr_t deopt_id,
TokenPosition token_index,
LocationSummary* locs);
LocationSummary* locs,
bool complete);
Condition EmitEqualityRegConstCompare(Register reg,
const Object& obj,

View file

@ -1596,7 +1596,8 @@ void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data,
Label* match_found,
intptr_t deopt_id,
TokenPosition token_index,
LocationSummary* locs) {
LocationSummary* locs,
bool complete) {
ASSERT(is_optimizing());
__ Comment("EmitTestAndCall");
const Array& arguments_descriptor =
@ -1613,8 +1614,8 @@ void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data,
ASSERT(!ic_data.IsNull() && (kNumChecks > 0));
Label after_smi_test;
__ tst(R0, Operand(kSmiTagMask));
if (kFirstCheckIsSmi) {
__ tst(R0, Operand(kSmiTagMask));
// Jump if receiver is not Smi.
if (kNumChecks == 1) {
__ b(failed, NE);
@ -1638,7 +1639,10 @@ void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data,
} else {
// Receiver is Smi, but Smi is not a valid class therefore fail.
// (Smi class must be first in the list).
__ b(failed, EQ);
if (!complete) {
__ tst(R0, Operand(kSmiTagMask));
__ b(failed, EQ);
}
}
__ Bind(&after_smi_test);
@ -1657,11 +1661,18 @@ void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data,
const bool kIsLastCheck = (i == (kSortedLen - 1));
ASSERT(sorted[i].cid != kSmiCid);
Label next_test;
__ CompareImmediate(R2, sorted[i].cid);
if (kIsLastCheck) {
__ b(failed, NE);
if (!complete) {
__ CompareImmediate(R2, sorted[i].cid);
if (kIsLastCheck) {
__ b(failed, NE);
} else {
__ b(&next_test, NE);
}
} else {
__ b(&next_test, NE);
if (!kIsLastCheck) {
__ CompareImmediate(R2, sorted[i].cid);
__ b(&next_test, NE);
}
}
// Do not use the code from the function, but let the code be patched so
// that we can record the outgoing edges to other code.

View file

@ -1551,7 +1551,8 @@ void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data,
Label* match_found,
intptr_t deopt_id,
TokenPosition token_index,
LocationSummary* locs) {
LocationSummary* locs,
bool complete) {
ASSERT(is_optimizing());
__ Comment("EmitTestAndCall");
@ -1569,8 +1570,8 @@ void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data,
ASSERT(!ic_data.IsNull() && (kNumChecks > 0));
Label after_smi_test;
__ tsti(R0, Immediate(kSmiTagMask));
if (kFirstCheckIsSmi) {
__ tsti(R0, Immediate(kSmiTagMask));
// Jump if receiver is not Smi.
if (kNumChecks == 1) {
__ b(failed, NE);
@ -1594,7 +1595,10 @@ void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data,
} else {
// Receiver is Smi, but Smi is not a valid class therefore fail.
// (Smi class must be first in the list).
__ b(failed, EQ);
if (!complete) {
__ tsti(R0, Immediate(kSmiTagMask));
__ b(failed, EQ);
}
}
__ Bind(&after_smi_test);
@ -1613,11 +1617,18 @@ void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data,
const bool kIsLastCheck = (i == (kSortedLen - 1));
ASSERT(sorted[i].cid != kSmiCid);
Label next_test;
__ CompareImmediate(R2, sorted[i].cid);
if (kIsLastCheck) {
__ b(failed, NE);
if (!complete) {
__ CompareImmediate(R2, sorted[i].cid);
if (kIsLastCheck) {
__ b(failed, NE);
} else {
__ b(&next_test, NE);
}
} else {
__ b(&next_test, NE);
if (!kIsLastCheck) {
__ CompareImmediate(R2, sorted[i].cid);
__ b(&next_test, NE);
}
}
// Do not use the code from the function, but let the code be patched so
// that we can record the outgoing edges to other code.

View file

@ -1505,8 +1505,10 @@ void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data,
Label* match_found,
intptr_t deopt_id,
TokenPosition token_index,
LocationSummary* locs) {
LocationSummary* locs,
bool complete) {
ASSERT(is_optimizing());
ASSERT(!complete);
__ Comment("EmitTestAndCall");
const Array& arguments_descriptor =
Array::ZoneHandle(zone(), ArgumentsDescriptor::New(argument_count,

View file

@ -1620,7 +1620,8 @@ void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data,
Label* match_found,
intptr_t deopt_id,
TokenPosition token_index,
LocationSummary* locs) {
LocationSummary* locs,
bool complete) {
ASSERT(is_optimizing());
__ Comment("EmitTestAndCall");
const Array& arguments_descriptor =
@ -1637,8 +1638,8 @@ void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data,
ASSERT(!ic_data.IsNull() && (kNumChecks > 0));
Label after_smi_test;
__ andi(CMPRES1, T0, Immediate(kSmiTagMask));
if (kFirstCheckIsSmi) {
__ andi(CMPRES1, T0, Immediate(kSmiTagMask));
// Jump if receiver is not Smi.
if (kNumChecks == 1) {
__ bne(CMPRES1, ZR, failed);
@ -1662,7 +1663,10 @@ void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data,
} else {
// Receiver is Smi, but Smi is not a valid class therefore fail.
// (Smi class must be first in the list).
__ beq(CMPRES1, ZR, failed);
if (!complete) {
__ andi(CMPRES1, T0, Immediate(kSmiTagMask));
__ beq(CMPRES1, ZR, failed);
}
}
__ Bind(&after_smi_test);
@ -1681,10 +1685,16 @@ void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data,
const bool kIsLastCheck = (i == (kSortedLen - 1));
ASSERT(sorted[i].cid != kSmiCid);
Label next_test;
if (kIsLastCheck) {
__ BranchNotEqual(T2, Immediate(sorted[i].cid), failed);
if (!complete) {
if (kIsLastCheck) {
__ BranchNotEqual(T2, Immediate(sorted[i].cid), failed);
} else {
__ BranchNotEqual(T2, Immediate(sorted[i].cid), &next_test);
}
} else {
__ BranchNotEqual(T2, Immediate(sorted[i].cid), &next_test);
if (!kIsLastCheck) {
__ BranchNotEqual(T2, Immediate(sorted[i].cid), &next_test);
}
}
// Do not use the code from the function, but let the code be patched so
// that we can record the outgoing edges to other code.

View file

@ -1534,7 +1534,8 @@ void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data,
Label* match_found,
intptr_t deopt_id,
TokenPosition token_index,
LocationSummary* locs) {
LocationSummary* locs,
bool complete) {
ASSERT(is_optimizing());
__ Comment("EmitTestAndCall");
@ -1552,8 +1553,8 @@ void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data,
ASSERT(!ic_data.IsNull() && (kNumChecks > 0));
Label after_smi_test;
__ testq(RAX, Immediate(kSmiTagMask));
if (kFirstCheckIsSmi) {
__ testq(RAX, Immediate(kSmiTagMask));
// Jump if receiver is not Smi.
if (kNumChecks == 1) {
__ j(NOT_ZERO, failed);
@ -1577,7 +1578,10 @@ void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data,
} else {
// Receiver is Smi, but Smi is not a valid class therefore fail.
// (Smi class must be first in the list).
__ j(ZERO, failed);
if (!complete) {
__ testq(RAX, Immediate(kSmiTagMask));
__ j(ZERO, failed);
}
}
__ Bind(&after_smi_test);
@ -1596,11 +1600,18 @@ void FlowGraphCompiler::EmitTestAndCall(const ICData& ic_data,
const bool kIsLastCheck = (i == (kSortedLen - 1));
ASSERT(sorted[i].cid != kSmiCid);
Label next_test;
__ cmpl(RDI, Immediate(sorted[i].cid));
if (kIsLastCheck) {
__ j(NOT_EQUAL, failed);
if (!complete) {
__ cmpl(RDI, Immediate(sorted[i].cid));
if (kIsLastCheck) {
__ j(NOT_EQUAL, failed);
} else {
__ j(NOT_EQUAL, &next_test);
}
} else {
__ j(NOT_EQUAL, &next_test);
if (!kIsLastCheck) {
__ cmpl(RDI, Immediate(sorted[i].cid));
__ j(NOT_EQUAL, &next_test);
}
}
// Do not use the code from the function, but let the code be patched so
// that we can record the outgoing edges to other code.

View file

@ -1216,7 +1216,7 @@ class CallSiteInliner : public ValueObject {
PolymorphicInstanceCallInstr* call = call_info[call_idx].call;
if (call->with_checks()) {
// PolymorphicInliner introduces deoptimization paths.
if (!FLAG_polymorphic_with_deopt) {
if (!call->complete() && !FLAG_polymorphic_with_deopt) {
TRACE_INLINING(THR_Print(
" => %s\n Bailout: call with checks\n",
call->instance_call()->function_name().ToCString()));
@ -1633,18 +1633,21 @@ TargetEntryInstr* PolymorphicInliner::BuildDecisionGraph() {
if ((i == (inlined_variants_.length() - 1)) &&
non_inlined_variants_.is_empty()) {
// If it is the last variant use a check class id instruction which can
// deoptimize, followed unconditionally by the body.
RedefinitionInstr* cid_redefinition =
new RedefinitionInstr(new(Z) Value(load_cid));
cid_redefinition->set_ssa_temp_index(
owner_->caller_graph()->alloc_ssa_temp_index());
cursor = AppendInstruction(cursor, cid_redefinition);
CheckClassIdInstr* check_class_id = new(Z) CheckClassIdInstr(
new(Z) Value(cid_redefinition),
inlined_variants_[i].cid,
call_->deopt_id());
check_class_id->InheritDeoptTarget(zone(), call_);
cursor = AppendInstruction(cursor, check_class_id);
// deoptimize, followed unconditionally by the body. Omit the check if
// we know that we have covered all possible classes.
if (!call_->complete()) {
RedefinitionInstr* cid_redefinition =
new RedefinitionInstr(new(Z) Value(load_cid));
cid_redefinition->set_ssa_temp_index(
owner_->caller_graph()->alloc_ssa_temp_index());
cursor = AppendInstruction(cursor, cid_redefinition);
CheckClassIdInstr* check_class_id = new(Z) CheckClassIdInstr(
new(Z) Value(cid_redefinition),
inlined_variants_[i].cid,
call_->deopt_id());
check_class_id->InheritDeoptTarget(zone(), call_);
cursor = AppendInstruction(cursor, check_class_id);
}
// The next instruction is the first instruction of the inlined body.
// Handle the two possible cases (unshared and shared subsequent
@ -1777,7 +1780,8 @@ TargetEntryInstr* PolymorphicInliner::BuildDecisionGraph() {
PolymorphicInstanceCallInstr* fallback_call =
new PolymorphicInstanceCallInstr(call_->instance_call(),
new_checks,
true); // With checks.
/* with_checks = */ true,
call_->complete());
fallback_call->set_ssa_temp_index(
owner_->caller_graph()->alloc_ssa_temp_index());
fallback_call->InheritDeoptTarget(zone(), call_);
@ -2873,6 +2877,11 @@ bool FlowGraphInliner::TryInlineRecognizedMethod(FlowGraph* flow_graph,
const ICData& ic_data,
TargetEntryInstr** entry,
Definition** last) {
if (FLAG_precompiled_mode) {
// The graphs generated below include deopts.
return false;
}
ICData& value_check = ICData::ZoneHandle(Z);
MethodRecognizer::Kind kind = MethodRecognizer::RecognizeKind(target);
switch (kind) {

View file

@ -448,7 +448,10 @@ void PolymorphicInstanceCallInstr::PrintOperandsTo(BufferFormatter* f) const {
}
PrintICDataHelper(f, ic_data());
if (with_checks()) {
f->Print(" WITH CHECKS");
f->Print(" WITH-CHECKS");
}
if (complete()) {
f->Print(" COMPLETE");
}
}

View file

@ -3138,7 +3138,8 @@ void PolymorphicInstanceCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
instance_call()->argument_names(),
deopt_id(),
instance_call()->token_pos(),
locs());
locs(),
complete());
}

View file

@ -2892,17 +2892,20 @@ class PolymorphicInstanceCallInstr : public TemplateDefinition<0, Throws> {
public:
PolymorphicInstanceCallInstr(InstanceCallInstr* instance_call,
const ICData& ic_data,
bool with_checks)
bool with_checks,
bool complete)
: TemplateDefinition(instance_call->deopt_id()),
instance_call_(instance_call),
ic_data_(ic_data),
with_checks_(with_checks) {
with_checks_(with_checks),
complete_(complete) {
ASSERT(instance_call_ != NULL);
ASSERT(ic_data.NumberOfChecks() > 0);
}
InstanceCallInstr* instance_call() const { return instance_call_; }
bool with_checks() const { return with_checks_; }
bool complete() const { return complete_; }
virtual TokenPosition token_pos() const {
return instance_call_->token_pos();
}
@ -2934,6 +2937,7 @@ class PolymorphicInstanceCallInstr : public TemplateDefinition<0, Throws> {
InstanceCallInstr* instance_call_;
const ICData& ic_data_;
const bool with_checks_;
const bool complete_;
DISALLOW_COPY_AND_ASSIGN(PolymorphicInstanceCallInstr);
};

View file

@ -250,10 +250,12 @@ void JitOptimizer::SpecializePolymorphicInstanceCall(
}
const bool with_checks = false;
const bool complete = false;
PolymorphicInstanceCallInstr* specialized =
new(Z) PolymorphicInstanceCallInstr(call->instance_call(),
ic_data,
with_checks);
with_checks,
complete);
call->ReplaceWith(specialized, current_iterator());
}
@ -2756,7 +2758,8 @@ void JitOptimizer::VisitInstanceCall(InstanceCallInstr* instr) {
if (!flow_graph()->InstanceCallNeedsClassCheck(instr, function_kind)) {
PolymorphicInstanceCallInstr* call =
new(Z) PolymorphicInstanceCallInstr(instr, unary_checks,
/* call_with_checks = */ false);
/* call_with_checks = */ false,
/* complete = */ false);
instr->ReplaceWith(call, current_iterator());
return;
}
@ -2775,7 +2778,8 @@ void JitOptimizer::VisitInstanceCall(InstanceCallInstr* instr) {
}
PolymorphicInstanceCallInstr* call =
new(Z) PolymorphicInstanceCallInstr(instr, unary_checks,
call_with_checks);
call_with_checks,
/* complete = */ false);
instr->ReplaceWith(call, current_iterator());
}
}