[vm] Simplify and optimize method extractors

Previously, method extractors (getter functions which return
tear-offs) where implemented in the following way:

Method extractor body is an asm intrinsic which loads a few registers
and jumps to BuildMethodExtractor stubs.
BuildMethodExtractor stub loads more registers, calls AllocateClosure
stub and initializes a few fields of a closure (this stub also used
to call AllocateContext but it is no longer needed for tear-offs).
AllocateClosure allocates a closure object and initializes closure
fields.

Instead of doing 2 hops to the object allocation, method extractor
body now calls AllocateClosure stubs directly which initializes
all closure fields. This saves a jump and a handful of instructions
to load certain things on registers and set closure fields.

Assembler intrinsic for method extractor body and
architecture-specific BuildMethodExtractor stubs are now removed.

Also, as method extractors are really tiny, they are now always
inlined.

TEST=ci

Change-Id: Icfeed18414659a76c7d24d3f6f53a76c91e5bbd3
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/356302
Reviewed-by: Martin Kustermann <kustermann@google.com>
Commit-Queue: Alexander Markov <alexmarkov@google.com>
This commit is contained in:
Alexander Markov 2024-03-11 15:56:33 +00:00 committed by Commit Queue
parent 8682ca72ce
commit b9b341f4a7
37 changed files with 3502 additions and 3942 deletions

View file

@ -1290,15 +1290,6 @@ void ClassFinalizer::ClearAllCode(bool including_nonchanging_cids) {
ClearCodeVisitor visitor(zone, including_nonchanging_cids);
ProgramVisitor::WalkProgram(zone, isolate_group, &visitor);
// Apart from normal function code and allocation stubs we have two global
// code objects to clear.
if (including_nonchanging_cids) {
auto object_store = isolate_group->object_store();
auto& null_code = Code::Handle(zone);
object_store->set_build_generic_method_extractor_code(null_code);
object_store->set_build_nongeneric_method_extractor_code(null_code);
}
}
#endif // !defined(DART_PRECOMPILED_RUNTIME)

View file

@ -535,21 +535,6 @@ void Precompiler::DoCompileAll() {
IG->object_store()->set_##member(stub_code);
OBJECT_STORE_STUB_CODE_LIST(DO)
#undef DO
{
SafepointWriteRwLocker ml(T, T->isolate_group()->program_lock());
stub_code = StubCode::GetBuildGenericMethodExtractorStub(
global_object_pool_builder());
}
IG->object_store()->set_build_generic_method_extractor_code(stub_code);
{
SafepointWriteRwLocker ml(T, T->isolate_group()->program_lock());
stub_code = StubCode::GetBuildNonGenericMethodExtractorStub(
global_object_pool_builder());
}
IG->object_store()->set_build_nongeneric_method_extractor_code(
stub_code);
}
CollectDynamicFunctionNames();

View file

@ -1410,28 +1410,6 @@ bool FlowGraphCompiler::TryIntrinsifyHelper() {
compiler::Label exit;
set_intrinsic_slow_path_label(&exit);
if (FLAG_intrinsify) {
const auto& function = parsed_function().function();
if (function.IsMethodExtractor()) {
#if !defined(TARGET_ARCH_IA32)
auto& extracted_method =
Function::ZoneHandle(function.extracted_method_closure());
auto& klass = Class::Handle(extracted_method.Owner());
const intptr_t type_arguments_field_offset =
compiler::target::Class::HasTypeArgumentsField(klass)
? (compiler::target::Class::TypeArgumentsFieldOffset(klass) -
kHeapObjectTag)
: 0;
SpecialStatsBegin(CombinedCodeStatistics::kTagIntrinsics);
GenerateMethodExtractorIntrinsic(extracted_method,
type_arguments_field_offset);
SpecialStatsEnd(CombinedCodeStatistics::kTagIntrinsics);
return true;
#endif // !defined(TARGET_ARCH_IA32)
}
}
EnterIntrinsicMode();
SpecialStatsBegin(CombinedCodeStatistics::kTagIntrinsics);

View file

@ -1082,11 +1082,6 @@ class FlowGraphCompiler : public ValueObject {
compiler::Label* is_true,
compiler::Label* is_false);
void GenerateMethodExtractorIntrinsic(const Function& extracted_method,
intptr_t type_arguments_field_offset);
void GenerateGetterIntrinsic(const Function& accessor, const Field& field);
// Perform a greedy local register allocation. Consider all registers free.
void AllocateRegistersLocally(Instruction* instr);

View file

@ -231,50 +231,6 @@ void FlowGraphCompiler::GenerateBoolToJump(Register bool_register,
__ Bind(&fall_through);
}
void FlowGraphCompiler::GenerateMethodExtractorIntrinsic(
const Function& extracted_method,
intptr_t type_arguments_field_offset) {
// No frame has been setup here.
ASSERT(!__ constant_pool_allowed());
DEBUG_ASSERT(extracted_method.IsNotTemporaryScopedHandle());
const Code& build_method_extractor =
Code::ZoneHandle(extracted_method.IsGeneric()
? isolate_group()
->object_store()
->build_generic_method_extractor_code()
: isolate_group()
->object_store()
->build_nongeneric_method_extractor_code());
const intptr_t stub_index =
__ object_pool_builder().FindObject(build_method_extractor);
const intptr_t function_index =
__ object_pool_builder().FindObject(extracted_method);
// We use a custom pool register to preserve caller PP.
Register kPoolReg = R0;
// R1 = extracted function
// R4 = offset of type argument vector (or 0 if class is not generic)
if (FLAG_precompiled_mode) {
kPoolReg = PP;
} else {
__ LoadFieldFromOffset(kPoolReg, CODE_REG,
compiler::target::Code::object_pool_offset());
}
__ LoadImmediate(R4, type_arguments_field_offset);
__ LoadFieldFromOffset(
R1, kPoolReg,
compiler::target::ObjectPool::element_offset(function_index));
__ LoadFieldFromOffset(
CODE_REG, kPoolReg,
compiler::target::ObjectPool::element_offset(stub_index));
__ Branch(compiler::FieldAddress(
CODE_REG,
compiler::target::Code::entry_point_offset(Code::EntryKind::kUnchecked)));
}
void FlowGraphCompiler::EmitFrameEntry() {
const Function& function = parsed_function().function();
if (CanOptimizeFunction() && function.IsOptimizable() &&

View file

@ -222,50 +222,6 @@ void FlowGraphCompiler::GenerateBoolToJump(Register bool_register,
__ Bind(&fall_through);
}
void FlowGraphCompiler::GenerateMethodExtractorIntrinsic(
const Function& extracted_method,
intptr_t type_arguments_field_offset) {
// No frame has been setup here.
ASSERT(!__ constant_pool_allowed());
DEBUG_ASSERT(extracted_method.IsNotTemporaryScopedHandle());
const Code& build_method_extractor =
Code::ZoneHandle(extracted_method.IsGeneric()
? isolate_group()
->object_store()
->build_generic_method_extractor_code()
: isolate_group()
->object_store()
->build_nongeneric_method_extractor_code());
const intptr_t stub_index =
__ object_pool_builder().FindObject(build_method_extractor);
const intptr_t function_index =
__ object_pool_builder().FindObject(extracted_method);
// We use a custom pool register to preserve caller PP.
Register kPoolReg = R0;
// R1 = extracted function
// R4 = offset of type argument vector (or 0 if class is not generic)
intptr_t pp_offset = 0;
if (FLAG_precompiled_mode) {
// PP is not tagged on arm64.
kPoolReg = PP;
pp_offset = kHeapObjectTag;
} else {
__ LoadFieldFromOffset(kPoolReg, CODE_REG, Code::object_pool_offset());
}
__ LoadImmediate(R4, type_arguments_field_offset);
__ LoadFieldFromOffset(
R1, kPoolReg, ObjectPool::element_offset(function_index) + pp_offset);
__ LoadFieldFromOffset(CODE_REG, kPoolReg,
ObjectPool::element_offset(stub_index) + pp_offset);
__ LoadFieldFromOffset(R0, CODE_REG,
Code::entry_point_offset(Code::EntryKind::kUnchecked));
__ br(R0);
}
void FlowGraphCompiler::EmitFrameEntry() {
const Function& function = parsed_function().function();
if (CanOptimizeFunction() && function.IsOptimizable() &&

View file

@ -211,50 +211,6 @@ void FlowGraphCompiler::GenerateBoolToJump(Register bool_register,
__ Bind(&fall_through);
}
void FlowGraphCompiler::GenerateMethodExtractorIntrinsic(
const Function& extracted_method,
intptr_t type_arguments_field_offset) {
// No frame has been setup here.
ASSERT(!__ constant_pool_allowed());
DEBUG_ASSERT(extracted_method.IsNotTemporaryScopedHandle());
const Code& build_method_extractor =
Code::ZoneHandle(extracted_method.IsGeneric()
? isolate_group()
->object_store()
->build_generic_method_extractor_code()
: isolate_group()
->object_store()
->build_nongeneric_method_extractor_code());
const intptr_t stub_index =
__ object_pool_builder().FindObject(build_method_extractor);
const intptr_t function_index =
__ object_pool_builder().FindObject(extracted_method);
// We use a custom pool register to preserve caller PP.
Register kPoolReg = A1;
// T1 = extracted function
// T4 = offset of type argument vector (or 0 if class is not generic)
intptr_t pp_offset = 0;
if (FLAG_precompiled_mode) {
// PP is not tagged on riscv.
kPoolReg = PP;
pp_offset = kHeapObjectTag;
} else {
__ LoadFieldFromOffset(kPoolReg, CODE_REG, Code::object_pool_offset());
}
__ LoadImmediate(T4, type_arguments_field_offset);
__ LoadFieldFromOffset(
T1, kPoolReg, ObjectPool::element_offset(function_index) + pp_offset);
__ LoadFieldFromOffset(CODE_REG, kPoolReg,
ObjectPool::element_offset(stub_index) + pp_offset);
__ LoadFieldFromOffset(TMP, CODE_REG,
Code::entry_point_offset(Code::EntryKind::kUnchecked));
__ jr(TMP);
}
void FlowGraphCompiler::EmitFrameEntry() {
const Function& function = parsed_function().function();
if (CanOptimizeFunction() && function.IsOptimizable() &&

View file

@ -223,48 +223,6 @@ void FlowGraphCompiler::GenerateBoolToJump(Register bool_register,
__ Bind(&fall_through);
}
void FlowGraphCompiler::GenerateMethodExtractorIntrinsic(
const Function& extracted_method,
intptr_t type_arguments_field_offset) {
// No frame has been setup here.
ASSERT(!__ constant_pool_allowed());
DEBUG_ASSERT(extracted_method.IsNotTemporaryScopedHandle());
const Code& build_method_extractor =
Code::ZoneHandle(extracted_method.IsGeneric()
? isolate_group()
->object_store()
->build_generic_method_extractor_code()
: isolate_group()
->object_store()
->build_nongeneric_method_extractor_code());
ASSERT(!build_method_extractor.IsNull());
const intptr_t stub_index =
__ object_pool_builder().FindObject(build_method_extractor);
const intptr_t function_index =
__ object_pool_builder().FindObject(extracted_method);
// We use a custom pool register to preserve caller PP.
Register kPoolReg = RAX;
// RBX = extracted function
// RDX = offset of type argument vector (or 0 if class is not generic)
if (FLAG_precompiled_mode) {
kPoolReg = PP;
} else {
__ movq(kPoolReg,
compiler::FieldAddress(CODE_REG, Code::object_pool_offset()));
}
__ movq(RDX, compiler::Immediate(type_arguments_field_offset));
__ movq(RBX, compiler::FieldAddress(
kPoolReg, ObjectPool::element_offset(function_index)));
__ movq(CODE_REG, compiler::FieldAddress(
kPoolReg, ObjectPool::element_offset(stub_index)));
__ jmp(compiler::FieldAddress(
CODE_REG, Code::entry_point_offset(Code::EntryKind::kUnchecked)));
}
// NOTE: If the entry code shape changes, ReturnAddressLocator in profiler.cc
// needs to be updated to match.
void FlowGraphCompiler::EmitFrameEntry() {

View file

@ -955,7 +955,7 @@ Definition* AllocateClosureInstr::Canonicalize(FlowGraph* flow_graph) {
LocationSummary* AllocateClosureInstr::MakeLocationSummary(Zone* zone,
bool opt) const {
const intptr_t kNumInputs = inputs_.length();
const intptr_t kNumInputs = InputCount();
const intptr_t kNumTemps = 0;
LocationSummary* locs = new (zone)
LocationSummary(zone, kNumInputs, kNumTemps, LocationSummary::kCall);
@ -963,14 +963,31 @@ LocationSummary* AllocateClosureInstr::MakeLocationSummary(Zone* zone,
Location::RegisterLocation(AllocateClosureABI::kFunctionReg));
locs->set_in(kContextPos,
Location::RegisterLocation(AllocateClosureABI::kContextReg));
if (has_instantiator_type_args()) {
locs->set_in(kInstantiatorTypeArgsPos,
Location::RegisterLocation(
AllocateClosureABI::kInstantiatorTypeArgsReg));
}
locs->set_out(0, Location::RegisterLocation(AllocateClosureABI::kResultReg));
return locs;
}
void AllocateClosureInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
const Code& stub = Code::ZoneHandle(
compiler->zone(),
compiler->isolate_group()->object_store()->allocate_closure_stub());
auto object_store = compiler->isolate_group()->object_store();
Code& stub = Code::ZoneHandle(compiler->zone());
if (has_instantiator_type_args()) {
if (is_generic()) {
stub = object_store->allocate_closure_ta_generic_stub();
} else {
stub = object_store->allocate_closure_ta_stub();
}
} else {
if (is_generic()) {
stub = object_store->allocate_closure_generic_stub();
} else {
stub = object_store->allocate_closure_stub();
}
}
compiler->GenerateStubCall(source(), stub, UntaggedPcDescriptors::kOther,
locs(), deopt_id(), env());
}

View file

@ -7367,24 +7367,44 @@ class AllocateObjectInstr : public AllocationInstr {
// Allocates and null initializes a closure object, given the closure function
// and the context as values.
class AllocateClosureInstr : public TemplateAllocation<2> {
class AllocateClosureInstr : public TemplateAllocation<3> {
public:
enum Inputs { kFunctionPos = 0, kContextPos = 1 };
enum Inputs {
kFunctionPos = 0,
kContextPos = 1,
kInstantiatorTypeArgsPos = 2,
};
AllocateClosureInstr(const InstructionSource& source,
Value* closure_function,
Value* context,
Value* instantiator_type_args, // Optional.
bool is_generic,
intptr_t deopt_id)
: TemplateAllocation(source, deopt_id) {
: TemplateAllocation(source, deopt_id),
has_instantiator_type_args_(instantiator_type_args != nullptr),
is_generic_(is_generic) {
SetInputAt(kFunctionPos, closure_function);
SetInputAt(kContextPos, context);
if (has_instantiator_type_args_) {
SetInputAt(kInstantiatorTypeArgsPos, instantiator_type_args);
}
}
DECLARE_INSTRUCTION(AllocateClosure)
virtual CompileType ComputeType() const;
virtual intptr_t InputCount() const {
return has_instantiator_type_args() ? 3 : 2;
}
Value* closure_function() const { return inputs_[kFunctionPos]; }
Value* context() const { return inputs_[kContextPos]; }
bool has_instantiator_type_args() const {
return has_instantiator_type_args_;
}
bool is_generic() const { return is_generic_; }
const Function& known_function() const {
Value* const value = closure_function();
if (value->BindsToConstant()) {
@ -7400,6 +7420,10 @@ class AllocateClosureInstr : public TemplateAllocation<2> {
return &Slot::Closure_function();
case kContextPos:
return &Slot::Closure_context();
case kInstantiatorTypeArgsPos:
return has_instantiator_type_args()
? &Slot::Closure_instantiator_type_arguments()
: nullptr;
default:
return TemplateAllocation::SlotForInput(pos);
}
@ -7413,7 +7437,14 @@ class AllocateClosureInstr : public TemplateAllocation<2> {
return IsAllocatableInNewSpace(compiler::target::Closure::InstanceSize());
}
DECLARE_EMPTY_SERIALIZATION(AllocateClosureInstr, TemplateAllocation)
#define FIELD_LIST(F) \
F(const bool, has_instantiator_type_args_) \
F(const bool, is_generic_)
DECLARE_INSTRUCTION_SERIALIZABLE_FIELDS(AllocateClosureInstr,
TemplateAllocation,
FIELD_LIST)
#undef FIELD_LIST
private:
DISALLOW_COPY_AND_ASSIGN(AllocateClosureInstr);

View file

@ -642,6 +642,7 @@ class CallSites : public ValueObject {
const Function& function = static_call->function();
if (!inline_only_profitable_methods || function.IsRecognized() ||
function.IsDispatcherOrImplicitAccessor() ||
function.IsMethodExtractor() ||
(function.is_const() && function.IsGenerativeConstructor())) {
// Consider static call for further inlining. Note that it will
// still be subject to all the inlining heuristics.
@ -2625,6 +2626,11 @@ bool FlowGraphInliner::AlwaysInline(const Function& function) {
return true;
}
if (function.IsMethodExtractor()) {
// Tear-off closure allocation has about the same size as the call.
return true;
}
if (function.IsGetterFunction() || function.IsSetterFunction() ||
IsInlineableOperator(function) ||
(function.kind() == UntaggedFunction::kConstructor)) {

View file

@ -912,11 +912,16 @@ Fragment BaseFlowGraphBuilder::AllocateContext(
return Fragment(allocate);
}
Fragment BaseFlowGraphBuilder::AllocateClosure(TokenPosition position) {
Fragment BaseFlowGraphBuilder::AllocateClosure(TokenPosition position,
bool has_instantiator_type_args,
bool is_generic) {
Value* instantiator_type_args =
(has_instantiator_type_args ? Pop() : nullptr);
auto const context = Pop();
auto const function = Pop();
auto* allocate = new (Z) AllocateClosureInstr(
InstructionSource(position), function, context, GetNextDeoptId());
InstructionSource(position), function, context, instantiator_type_args,
is_generic, GetNextDeoptId());
Push(allocate);
return Fragment(allocate);
}

View file

@ -387,7 +387,9 @@ class BaseFlowGraphBuilder {
Fragment BooleanNegate();
Fragment AllocateContext(const ZoneGrowableArray<const Slot*>& scope);
// Top of the stack should be the closure function.
Fragment AllocateClosure(TokenPosition position = TokenPosition::kNoSource);
Fragment AllocateClosure(TokenPosition position,
bool has_instantiator_type_args,
bool is_generic);
Fragment CreateArray();
Fragment AllocateRecord(TokenPosition position, RecordShape shape);
Fragment AllocateSmallRecord(TokenPosition position, RecordShape shape);

View file

@ -1867,11 +1867,6 @@ Fragment StreamingFlowGraphBuilder::Goto(JoinEntryInstr* destination) {
return flow_graph_builder_->Goto(destination);
}
Fragment StreamingFlowGraphBuilder::BuildImplicitClosureCreation(
const Function& target) {
return flow_graph_builder_->BuildImplicitClosureCreation(target);
}
Fragment StreamingFlowGraphBuilder::CheckBoolean(TokenPosition position) {
return flow_graph_builder_->CheckBoolean(position);
}
@ -2582,7 +2577,7 @@ Fragment StreamingFlowGraphBuilder::BuildSuperPropertyGet(TokenPosition* p) {
ASSERT(!target.IsNull());
// Generate inline code for allocation closure object
// which captures `this`.
return BuildImplicitClosureCreation(target);
return B->BuildImplicitClosureCreation(position, target);
}
function = Resolver::ResolveDynamicFunction(Z, klass, getter_name);
if (!function.IsNull()) break;
@ -4431,7 +4426,11 @@ Fragment StreamingFlowGraphBuilder::BuildPartialTearoffInstantiation(
flow_graph_builder_->LoadNativeField(Slot::Closure_function());
instructions += LoadLocal(original_closure);
instructions += flow_graph_builder_->LoadNativeField(Slot::Closure_context());
instructions += flow_graph_builder_->AllocateClosure();
instructions += LoadLocal(original_closure);
instructions += flow_graph_builder_->LoadNativeField(
Slot::Closure_instantiator_type_arguments());
instructions += flow_graph_builder_->AllocateClosure(
position, /*has_instantiator_type_args=*/true, /*is_generic=*/false);
LocalVariable* new_closure = MakeTemporary();
intptr_t num_type_args = ReadListLength();
@ -4462,15 +4461,6 @@ Fragment StreamingFlowGraphBuilder::BuildPartialTearoffInstantiation(
StoreFieldInstr::Kind::kInitializing);
instructions += DropTemporary(&type_args_vec);
// Copy over the instantiator type arguments.
instructions += LoadLocal(new_closure);
instructions += LoadLocal(original_closure);
instructions += flow_graph_builder_->LoadNativeField(
Slot::Closure_instantiator_type_arguments());
instructions += flow_graph_builder_->StoreNativeField(
Slot::Closure_instantiator_type_arguments(),
StoreFieldInstr::Kind::kInitializing);
// Copy over the function type arguments.
instructions += LoadLocal(new_closure);
instructions += LoadLocal(original_closure);
@ -6075,17 +6065,15 @@ Fragment StreamingFlowGraphBuilder::BuildFunctionNode(
} else {
instructions += LoadLocal(parsed_function()->current_context_var());
}
instructions += flow_graph_builder_->AllocateClosure();
LocalVariable* closure = MakeTemporary();
// The function signature can have uninstantiated class type parameters.
if (!function.HasInstantiatedSignature(kCurrentClass)) {
instructions += LoadLocal(closure);
const bool has_instantiator_type_args =
!function.HasInstantiatedSignature(kCurrentClass);
if (has_instantiator_type_args) {
instructions += LoadInstantiatorTypeArguments();
instructions += flow_graph_builder_->StoreNativeField(
Slot::Closure_instantiator_type_arguments(),
StoreFieldInstr::Kind::kInitializing);
}
instructions += flow_graph_builder_->AllocateClosure(
position, has_instantiator_type_args, function.IsGeneric());
LocalVariable* closure = MakeTemporary();
// TODO(30455): We only need to save these if the closure uses any captured
// type parameters.
@ -6095,16 +6083,6 @@ Fragment StreamingFlowGraphBuilder::BuildFunctionNode(
Slot::Closure_function_type_arguments(),
StoreFieldInstr::Kind::kInitializing);
if (function.IsGeneric()) {
// Only generic functions need to have properly initialized
// delayed and default type arguments.
instructions += LoadLocal(closure);
instructions += Constant(Object::empty_type_arguments());
instructions += flow_graph_builder_->StoreNativeField(
Slot::Closure_delayed_type_arguments(),
StoreFieldInstr::Kind::kInitializing);
}
return instructions;
}

View file

@ -261,7 +261,6 @@ class StreamingFlowGraphBuilder : public KernelReaderHelper {
JoinEntryInstr* BuildJoinEntry();
JoinEntryInstr* BuildJoinEntry(intptr_t try_index);
Fragment Goto(JoinEntryInstr* destination);
Fragment BuildImplicitClosureCreation(const Function& target);
Fragment CheckBoolean(TokenPosition position);
Fragment CheckArgumentType(LocalVariable* variable, const AbstractType& type);
Fragment RecordCoverage(TokenPosition position);

View file

@ -2155,6 +2155,7 @@ Fragment FlowGraphBuilder::BuildTypedDataFactoryConstructor(
}
Fragment FlowGraphBuilder::BuildImplicitClosureCreation(
TokenPosition position,
const Function& target) {
// The function cannot be local and have parent generic functions.
ASSERT(!target.HasGenericParent());
@ -2163,25 +2164,14 @@ Fragment FlowGraphBuilder::BuildImplicitClosureCreation(
Fragment fragment;
fragment += Constant(target);
fragment += LoadLocal(parsed_function_->receiver_var());
fragment += AllocateClosure();
LocalVariable* closure = MakeTemporary();
// The function signature can have uninstantiated class type parameters.
if (!target.HasInstantiatedSignature(kCurrentClass)) {
fragment += LoadLocal(closure);
const bool has_instantiator_type_args =
!target.HasInstantiatedSignature(kCurrentClass);
if (has_instantiator_type_args) {
fragment += LoadInstantiatorTypeArguments();
fragment += StoreNativeField(Slot::Closure_instantiator_type_arguments(),
StoreFieldInstr::Kind::kInitializing);
}
if (target.IsGeneric()) {
// Only generic functions need to have properly initialized
// delayed and default type arguments.
fragment += LoadLocal(closure);
fragment += Constant(Object::empty_type_arguments());
fragment += StoreNativeField(Slot::Closure_delayed_type_arguments(),
StoreFieldInstr::Kind::kInitializing);
}
fragment +=
AllocateClosure(position, has_instantiator_type_args, target.IsGeneric());
return fragment;
}
@ -2512,7 +2502,7 @@ FlowGraph* FlowGraphBuilder::BuildGraphOfMethodExtractor(
Fragment body(normal_entry);
body += CheckStackOverflowInPrologue(method.token_pos());
body += BuildImplicitClosureCreation(function);
body += BuildImplicitClosureCreation(TokenPosition::kNoSource, function);
body += Return(TokenPosition::kNoSource);
// There is no prologue code for a method extractor.

View file

@ -260,7 +260,8 @@ class FlowGraphBuilder : public BaseFlowGraphBuilder {
Fragment ThrowLateInitializationError(TokenPosition position,
const char* throw_method_name,
const String& name);
Fragment BuildImplicitClosureCreation(const Function& target);
Fragment BuildImplicitClosureCreation(TokenPosition position,
const Function& target);
Fragment EvaluateAssertion();
Fragment CheckVariableTypeInCheckedMode(const AbstractType& dst_type,

File diff suppressed because it is too large Load diff

View file

@ -1183,11 +1183,14 @@ VM_TYPE_TESTING_STUB_CODE_LIST(GENERATE_BREAKPOINT_STUB)
// Input (preserved):
// AllocateClosureABI::kFunctionReg: closure function.
// AllocateClosureABI::kContextReg: closure context.
// AllocateClosureABI::kInstantiatorTypeArgs: instantiator type arguments.
// Output:
// AllocateClosureABI::kResultReg: new allocated Closure object.
// Clobbered:
// AllocateClosureABI::kScratchReg
void StubCodeCompiler::GenerateAllocateClosureStub() {
void StubCodeCompiler::GenerateAllocateClosureStub(
bool has_instantiator_type_args,
bool is_generic) {
const intptr_t instance_size =
target::RoundedAllocationSize(target::Closure::InstanceSize());
__ EnsureHasClassIdInDEBUG(kFunctionCid, AllocateClosureABI::kFunctionReg,
@ -1211,15 +1214,23 @@ void StubCodeCompiler::GenerateAllocateClosureStub() {
// Since the TryAllocateObject above did not go to the slow path, we're
// guaranteed an object in new space here, and thus no barriers are needed.
__ LoadObject(AllocateClosureABI::kScratchReg, NullObject());
__ StoreToSlotNoBarrier(AllocateClosureABI::kScratchReg,
AllocateClosureABI::kResultReg,
Slot::Closure_instantiator_type_arguments());
if (has_instantiator_type_args) {
__ StoreToSlotNoBarrier(AllocateClosureABI::kInstantiatorTypeArgsReg,
AllocateClosureABI::kResultReg,
Slot::Closure_instantiator_type_arguments());
} else {
__ StoreToSlotNoBarrier(AllocateClosureABI::kScratchReg,
AllocateClosureABI::kResultReg,
Slot::Closure_instantiator_type_arguments());
}
__ StoreToSlotNoBarrier(AllocateClosureABI::kScratchReg,
AllocateClosureABI::kResultReg,
Slot::Closure_function_type_arguments());
__ StoreToSlotNoBarrier(AllocateClosureABI::kScratchReg,
AllocateClosureABI::kResultReg,
Slot::Closure_delayed_type_arguments());
if (!is_generic) {
__ StoreToSlotNoBarrier(AllocateClosureABI::kScratchReg,
AllocateClosureABI::kResultReg,
Slot::Closure_delayed_type_arguments());
}
__ StoreToSlotNoBarrier(AllocateClosureABI::kFunctionReg,
AllocateClosureABI::kResultReg,
Slot::Closure_function());
@ -1229,6 +1240,12 @@ void StubCodeCompiler::GenerateAllocateClosureStub() {
__ StoreToSlotNoBarrier(AllocateClosureABI::kScratchReg,
AllocateClosureABI::kResultReg,
Slot::Closure_hash());
if (is_generic) {
__ LoadObject(AllocateClosureABI::kScratchReg, EmptyTypeArguments());
__ StoreToSlotNoBarrier(AllocateClosureABI::kScratchReg,
AllocateClosureABI::kResultReg,
Slot::Closure_delayed_type_arguments());
}
#if defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32)
if (FLAG_precompiled_mode) {
// Set the closure entry point in precompiled mode, either to the function
@ -1255,7 +1272,23 @@ void StubCodeCompiler::GenerateAllocateClosureStub() {
__ PushObject(NullObject()); // Space on the stack for the return value.
__ PushRegistersInOrder(
{AllocateClosureABI::kFunctionReg, AllocateClosureABI::kContextReg});
__ CallRuntime(kAllocateClosureRuntimeEntry, 2);
if (has_instantiator_type_args) {
__ PushRegister(AllocateClosureABI::kInstantiatorTypeArgsReg);
} else {
__ PushObject(NullObject());
}
if (is_generic) {
__ PushObject(EmptyTypeArguments());
} else {
__ PushObject(NullObject());
}
__ CallRuntime(kAllocateClosureRuntimeEntry, 4);
if (has_instantiator_type_args) {
__ Drop(1);
__ PopRegister(AllocateClosureABI::kInstantiatorTypeArgsReg);
} else {
__ Drop(2);
}
__ PopRegister(AllocateClosureABI::kContextReg);
__ PopRegister(AllocateClosureABI::kFunctionReg);
__ PopRegister(AllocateClosureABI::kResultReg);
@ -1267,6 +1300,26 @@ void StubCodeCompiler::GenerateAllocateClosureStub() {
__ Ret();
}
void StubCodeCompiler::GenerateAllocateClosureStub() {
GenerateAllocateClosureStub(/*has_instantiator_type_args=*/false,
/*is_generic=*/false);
}
void StubCodeCompiler::GenerateAllocateClosureGenericStub() {
GenerateAllocateClosureStub(/*has_instantiator_type_args=*/false,
/*is_generic=*/true);
}
void StubCodeCompiler::GenerateAllocateClosureTAStub() {
GenerateAllocateClosureStub(/*has_instantiator_type_args=*/true,
/*is_generic=*/false);
}
void StubCodeCompiler::GenerateAllocateClosureTAGenericStub() {
GenerateAllocateClosureStub(/*has_instantiator_type_args=*/true,
/*is_generic=*/true);
}
// Generates allocation stub for _GrowableList class.
// This stub exists solely for performance reasons: default allocation
// stub is slower as it doesn't use specialized inline allocation.

View file

@ -57,12 +57,6 @@ class StubCodeCompiler {
Assembler* assembler;
#if !defined(TARGET_ARCH_IA32)
void GenerateBuildMethodExtractorStub(const Code& closure_allocation_stub,
const Code& context_allocation_stub,
bool generic);
#endif
void EnsureIsNewOrRemembered();
static ArrayPtr BuildStaticCallsTable(
Zone* zone,
@ -202,6 +196,10 @@ class StubCodeCompiler {
// InitLateFinalInstanceField stubs.
void GenerateInitLateInstanceFieldStub(bool is_final);
// Common function for generating AllocateClosure[TA][Generic] stubs.
void GenerateAllocateClosureStub(bool has_instantiator_type_args,
bool is_generic);
// Common function for generating Allocate<TypedData>Array stubs.
void GenerateAllocateTypedDataArrayStub(intptr_t cid);

View file

@ -207,62 +207,6 @@ void StubCodeCompiler::GenerateSharedStub(
perform_runtime_call);
}
// R1: The extracted method.
// R4: The type_arguments_field_offset (or 0)
// SP+0: The object from which we are tearing a method off.
void StubCodeCompiler::GenerateBuildMethodExtractorStub(
const Code& closure_allocation_stub,
const Code& context_allocation_stub,
bool generic) {
const intptr_t kReceiverOffset = target::frame_layout.param_end_from_fp + 1;
__ EnterStubFrame();
// Build type_arguments vector (or null)
__ cmp(R4, Operand(0));
__ ldr(R3, Address(THR, target::Thread::object_null_offset()), EQ);
__ ldr(R0, Address(FP, kReceiverOffset * target::kWordSize), NE);
__ ldr(R3, Address(R0, R4), NE);
// Push type arguments.
__ Push(R3);
// Put function and context (receiver) in right registers for
// AllocateClosure stub.
__ MoveRegister(AllocateClosureABI::kFunctionReg, R1);
__ ldr(AllocateClosureABI::kContextReg,
Address(FP, target::kWordSize * kReceiverOffset));
// Allocate closure. After this point, we only use the registers in
// AllocateClosureABI.
__ LoadObject(CODE_REG, closure_allocation_stub);
__ ldr(AllocateClosureABI::kScratchReg,
FieldAddress(CODE_REG, target::Code::entry_point_offset()));
__ blx(AllocateClosureABI::kScratchReg);
// Populate closure object.
__ Pop(AllocateClosureABI::kScratchReg); // Pop type arguments.
__ StoreIntoObjectNoBarrier(
AllocateClosureABI::kResultReg,
FieldAddress(AllocateClosureABI::kResultReg,
target::Closure::instantiator_type_arguments_offset()),
AllocateClosureABI::kScratchReg);
// Keep delayed_type_arguments as null if non-generic (see Closure::New).
if (generic) {
__ LoadObject(AllocateClosureABI::kScratchReg, EmptyTypeArguments());
__ StoreIntoObjectNoBarrier(
AllocateClosureABI::kResultReg,
FieldAddress(AllocateClosureABI::kResultReg,
target::Closure::delayed_type_arguments_offset()),
AllocateClosureABI::kScratchReg);
}
__ LeaveStubFrame();
// No-op if the two are the same.
__ MoveRegister(R0, AllocateClosureABI::kResultReg);
__ Ret();
}
void StubCodeCompiler::GenerateEnterSafepointStub() {
RegisterSet all_registers;
all_registers.AddAllGeneralRegisters();

View file

@ -657,64 +657,6 @@ void StubCodeCompiler::GenerateFfiCallbackTrampolineStub() {
#endif // !defined(HOST_ARCH_ARM64)
}
// R1: The extracted method.
// R4: The type_arguments_field_offset (or 0)
void StubCodeCompiler::GenerateBuildMethodExtractorStub(
const Code& closure_allocation_stub,
const Code& context_allocation_stub,
bool generic) {
const intptr_t kReceiverOffset = target::frame_layout.param_end_from_fp + 1;
__ EnterStubFrame();
// Build type_arguments vector (or null)
Label no_type_args;
__ ldr(R3, Address(THR, target::Thread::object_null_offset()), kEightBytes);
__ cmp(R4, Operand(0));
__ b(&no_type_args, EQ);
__ ldr(R0, Address(FP, kReceiverOffset * target::kWordSize));
__ LoadCompressed(R3, Address(R0, R4));
__ Bind(&no_type_args);
// Push type arguments.
__ Push(R3);
// Put function and context (receiver) in right registers for
// AllocateClosure stub.
__ MoveRegister(AllocateClosureABI::kFunctionReg, R1);
__ ldr(AllocateClosureABI::kContextReg,
Address(FP, target::kWordSize * kReceiverOffset));
// Allocate closure. After this point, we only use the registers in
// AllocateClosureABI.
__ LoadObject(CODE_REG, closure_allocation_stub);
__ ldr(AllocateClosureABI::kScratchReg,
FieldAddress(CODE_REG, target::Code::entry_point_offset()));
__ blr(AllocateClosureABI::kScratchReg);
// Populate closure object.
__ Pop(AllocateClosureABI::kScratchReg); // Pop type arguments.
__ StoreCompressedIntoObjectNoBarrier(
AllocateClosureABI::kResultReg,
FieldAddress(AllocateClosureABI::kResultReg,
target::Closure::instantiator_type_arguments_offset()),
AllocateClosureABI::kScratchReg);
// Keep delayed_type_arguments as null if non-generic (see Closure::New).
if (generic) {
__ LoadObject(AllocateClosureABI::kScratchReg, EmptyTypeArguments());
__ StoreCompressedIntoObjectNoBarrier(
AllocateClosureABI::kResultReg,
FieldAddress(AllocateClosureABI::kResultReg,
target::Closure::delayed_type_arguments_offset()),
AllocateClosureABI::kScratchReg);
}
__ LeaveStubFrame();
// No-op if the two are the same.
__ MoveRegister(R0, AllocateClosureABI::kResultReg);
__ Ret();
}
void StubCodeCompiler::GenerateDispatchTableNullErrorStub() {
__ EnterStubFrame();
__ SmiTag(DispatchTableNullErrorABI::kClassIdReg);

View file

@ -528,65 +528,6 @@ void StubCodeCompiler::GenerateFfiCallbackTrampolineStub() {
#endif
}
// T1: The extracted method.
// T4: The type_arguments_field_offset (or 0)
void StubCodeCompiler::GenerateBuildMethodExtractorStub(
const Code& closure_allocation_stub,
const Code& context_allocation_stub,
bool generic) {
const intptr_t kReceiverOffset = target::frame_layout.param_end_from_fp + 1;
__ EnterStubFrame();
// Build type_arguments vector (or null)
Label no_type_args;
__ lx(T3, Address(THR, target::Thread::object_null_offset()));
__ CompareImmediate(T4, 0);
__ BranchIf(EQ, &no_type_args);
__ lx(T0, Address(FP, kReceiverOffset * target::kWordSize));
__ add(TMP, T0, T4);
__ LoadCompressed(T3, Address(TMP, 0));
__ Bind(&no_type_args);
// Push type arguments.
__ PushRegister(T3);
// Put function and context (receiver) in right registers for
// AllocateClosure stub.
__ MoveRegister(AllocateClosureABI::kFunctionReg, T1);
__ lx(AllocateClosureABI::kContextReg,
Address(FP, target::kWordSize * kReceiverOffset));
// Allocate closure. After this point, we only use the registers in
// AllocateClosureABI.
__ LoadObject(CODE_REG, closure_allocation_stub);
__ lx(AllocateClosureABI::kScratchReg,
FieldAddress(CODE_REG, target::Code::entry_point_offset()));
__ jalr(AllocateClosureABI::kScratchReg);
// Populate closure object.
__ PopRegister(AllocateClosureABI::kScratchReg); // Pop type arguments.
__ StoreCompressedIntoObjectNoBarrier(
AllocateClosureABI::kResultReg,
FieldAddress(AllocateClosureABI::kResultReg,
target::Closure::instantiator_type_arguments_offset()),
AllocateClosureABI::kScratchReg);
// Keep delayed_type_arguments as null if non-generic (see Closure::New).
if (generic) {
__ LoadObject(AllocateClosureABI::kScratchReg, EmptyTypeArguments());
__ StoreCompressedIntoObjectNoBarrier(
AllocateClosureABI::kResultReg,
FieldAddress(AllocateClosureABI::kResultReg,
target::Closure::delayed_type_arguments_offset()),
AllocateClosureABI::kScratchReg);
}
__ LeaveStubFrame();
// No-op if the two are the same.
__ MoveRegister(A0, AllocateClosureABI::kResultReg);
__ Ret();
}
void StubCodeCompiler::GenerateDispatchTableNullErrorStub() {
__ EnterStubFrame();
__ SmiTag(DispatchTableNullErrorABI::kClassIdReg);

View file

@ -615,61 +615,6 @@ void StubCodeCompiler::GenerateFfiCallbackTrampolineStub() {
#endif
}
// RBX: The extracted method.
// RDX: The type_arguments_field_offset (or 0)
void StubCodeCompiler::GenerateBuildMethodExtractorStub(
const Code& closure_allocation_stub,
const Code& context_allocation_stub,
bool generic) {
const intptr_t kReceiverOffsetInWords =
target::frame_layout.param_end_from_fp + 1;
__ EnterStubFrame();
// Push type_arguments vector (or null)
Label no_type_args;
__ movq(RCX, Address(THR, target::Thread::object_null_offset()));
__ cmpq(RDX, Immediate(0));
__ j(EQUAL, &no_type_args, Assembler::kNearJump);
__ movq(RAX, Address(RBP, target::kWordSize * kReceiverOffsetInWords));
__ LoadCompressed(RCX, Address(RAX, RDX, TIMES_1, 0));
__ Bind(&no_type_args);
__ pushq(RCX);
// Put function and context (receiver) in right registers for
// AllocateClosure stub.
__ MoveRegister(AllocateClosureABI::kFunctionReg, RBX);
__ movq(AllocateClosureABI::kContextReg,
Address(RBP, target::kWordSize * kReceiverOffsetInWords));
// Allocate closure. After this point, we only use the registers in
// AllocateClosureABI.
__ LoadObject(CODE_REG, closure_allocation_stub);
__ call(FieldAddress(CODE_REG, target::Code::entry_point_offset()));
// Populate closure object.
__ popq(AllocateClosureABI::kScratchReg); // Pop type argument vector.
__ StoreCompressedIntoObjectNoBarrier(
AllocateClosureABI::kResultReg,
FieldAddress(AllocateClosureABI::kResultReg,
target::Closure::instantiator_type_arguments_offset()),
AllocateClosureABI::kScratchReg);
// Keep delayed_type_arguments as null if non-generic (see Closure::New).
if (generic) {
__ LoadObject(AllocateClosureABI::kScratchReg, EmptyTypeArguments());
__ StoreCompressedIntoObjectNoBarrier(
AllocateClosureABI::kResultReg,
FieldAddress(AllocateClosureABI::kResultReg,
target::Closure::delayed_type_arguments_offset()),
AllocateClosureABI::kScratchReg);
}
__ LeaveStubFrame();
// No-op if the two are the same.
__ MoveRegister(RAX, AllocateClosureABI::kResultReg);
__ Ret();
}
void StubCodeCompiler::GenerateDispatchTableNullErrorStub() {
__ EnterStubFrame();
__ SmiTag(DispatchTableNullErrorABI::kClassIdReg);

View file

@ -504,6 +504,7 @@ struct AllocateClosureABI {
static constexpr Register kResultReg = AllocateObjectABI::kResultReg;
static constexpr Register kFunctionReg = R1;
static constexpr Register kContextReg = R2;
static constexpr Register kInstantiatorTypeArgsReg = R3;
static constexpr Register kScratchReg = R4;
};

View file

@ -342,6 +342,7 @@ struct AllocateClosureABI {
static constexpr Register kResultReg = AllocateObjectABI::kResultReg;
static constexpr Register kFunctionReg = R1;
static constexpr Register kContextReg = R2;
static constexpr Register kInstantiatorTypeArgsReg = R3;
static constexpr Register kScratchReg = R4;
};

View file

@ -244,6 +244,7 @@ struct AllocateClosureABI {
static constexpr Register kResultReg = AllocateObjectABI::kResultReg;
static constexpr Register kFunctionReg = EBX;
static constexpr Register kContextReg = ECX;
static constexpr Register kInstantiatorTypeArgsReg = EDI;
static constexpr Register kScratchReg = EDX;
};

View file

@ -349,8 +349,9 @@ struct AllocateObjectABI {
// ABI for AllocateClosureStub.
struct AllocateClosureABI {
static constexpr Register kResultReg = AllocateObjectABI::kResultReg;
static constexpr Register kFunctionReg = T2;
static constexpr Register kContextReg = T3;
static constexpr Register kFunctionReg = T1;
static constexpr Register kContextReg = T2;
static constexpr Register kInstantiatorTypeArgsReg = T3;
static constexpr Register kScratchReg = T4;
};

View file

@ -308,6 +308,7 @@ struct AllocateClosureABI {
static constexpr Register kResultReg = AllocateObjectABI::kResultReg;
static constexpr Register kFunctionReg = RBX;
static constexpr Register kContextReg = RDX;
static constexpr Register kInstantiatorTypeArgsReg = RCX;
static constexpr Register kScratchReg = R13;
};

View file

@ -884,6 +884,7 @@ ErrorPtr Dart::InitIsolateGroupFromSnapshot(
return Error::null();
}
#if !defined(DART_PRECOMPILED_RUNTIME)
// The runtime assumes it can create certain kinds of objects at-will without
// a check whether their class need to be finalized first.
//
@ -902,6 +903,7 @@ static void FinalizeBuiltinClasses(Thread* thread) {
}
}
}
#endif // !defined(DART_PRECOMPILED_RUNTIME)
ErrorPtr Dart::InitializeIsolateGroup(Thread* T,
const uint8_t* snapshot_data,
@ -920,40 +922,12 @@ ErrorPtr Dart::InitializeIsolateGroup(Thread* T,
auto IG = T->isolate_group();
DEBUG_ONLY(IG->heap()->Verify("InitializeIsolate", kForbidMarked));
#if defined(DART_PRECOMPILED_RUNTIME)
const bool kIsAotRuntime = true;
#else
const bool kIsAotRuntime = false;
#if !defined(DART_PRECOMPILED_RUNTIME)
FinalizeBuiltinClasses(T);
#endif
auto object_store = IG->object_store();
if (kIsAotRuntime) {
#if !defined(TARGET_ARCH_IA32)
ASSERT(object_store->build_generic_method_extractor_code() != Code::null());
ASSERT(object_store->build_nongeneric_method_extractor_code() !=
Code::null());
#endif
} else {
FinalizeBuiltinClasses(T);
#if !defined(TARGET_ARCH_IA32)
if (IG != Dart::vm_isolate_group()) {
if (object_store->build_generic_method_extractor_code() != nullptr ||
object_store->build_nongeneric_method_extractor_code() != nullptr) {
SafepointWriteRwLocker ml(T, IG->program_lock());
if (object_store->build_generic_method_extractor_code() != nullptr) {
object_store->set_build_generic_method_extractor_code(Code::Handle(
StubCode::GetBuildGenericMethodExtractorStub(nullptr)));
}
if (object_store->build_nongeneric_method_extractor_code() != nullptr) {
object_store->set_build_nongeneric_method_extractor_code(Code::Handle(
StubCode::GetBuildNonGenericMethodExtractorStub(nullptr)));
}
}
}
#endif // !defined(TARGET_ARCH_IA32)
}
if (snapshot_data == nullptr || kernel_buffer != nullptr) {
auto object_store = IG->object_store();
error ^= object_store->PreallocateObjects();
if (!error.IsNull()) {
return error.ptr();

View file

@ -26346,7 +26346,7 @@ ClosurePtr Closure::New(const TypeArguments& instantiator_type_arguments,
// We store null delayed type arguments, not empty ones, in closures with
// non-generic functions a) to make method extraction slightly faster and
// b) to make the Closure::IsGeneric check fast.
// Keep in sync with StubCodeCompiler::GenerateBuildMethodExtractorStub.
// Keep in sync with StubCodeCompiler::GenerateAllocateClosureStub.
return Closure::New(instantiator_type_arguments, function_type_arguments,
function.IsGeneric() ? Object::empty_type_arguments()
: Object::null_type_arguments(),

View file

@ -206,8 +206,6 @@ class ObjectPointerVisitor;
RW(Array, unique_dynamic_targets) \
RW(GrowableObjectArray, megamorphic_cache_table) \
RW(GrowableObjectArray, ffi_callback_code) \
RW(Code, build_generic_method_extractor_code) \
RW(Code, build_nongeneric_method_extractor_code) \
RW(Code, dispatch_table_null_error_stub) \
RW(Code, late_initialization_error_stub_with_fpu_regs_stub) \
RW(Code, late_initialization_error_stub_without_fpu_regs_stub) \
@ -246,6 +244,9 @@ class ObjectPointerVisitor;
RW(Code, allocate_int32x4_array_stub) \
RW(Code, allocate_float64x2_array_stub) \
RW(Code, allocate_closure_stub) \
RW(Code, allocate_closure_generic_stub) \
RW(Code, allocate_closure_ta_stub) \
RW(Code, allocate_closure_ta_generic_stub) \
RW(Code, allocate_context_stub) \
RW(Code, allocate_growable_array_stub) \
RW(Code, allocate_object_stub) \
@ -345,6 +346,9 @@ class ObjectPointerVisitor;
DO(allocate_int32x4_array_stub, AllocateInt32x4Array) \
DO(allocate_float64x2_array_stub, AllocateFloat64x2Array) \
DO(allocate_closure_stub, AllocateClosure) \
DO(allocate_closure_generic_stub, AllocateClosureGeneric) \
DO(allocate_closure_ta_stub, AllocateClosureTA) \
DO(allocate_closure_ta_generic_stub, AllocateClosureTAGeneric) \
DO(allocate_context_stub, AllocateContext) \
DO(allocate_growable_array_stub, AllocateGrowableArray) \
DO(allocate_object_stub, AllocateObject) \

View file

@ -686,19 +686,24 @@ DEFINE_RUNTIME_ENTRY(SubtypeCheck, 5) {
UNREACHABLE();
}
// Allocate a new closure and initializes its function and context fields with
// the arguments and all other fields to null.
// Allocate a new closure and initializes its function, context,
// instantiator type arguments and delayed type arguments fields.
// Arg0: function.
// Arg1: context.
// Arg2: instantiator type arguments.
// Arg3: delayed type arguments.
// Return value: newly allocated closure.
DEFINE_RUNTIME_ENTRY(AllocateClosure, 2) {
DEFINE_RUNTIME_ENTRY(AllocateClosure, 4) {
const auto& function = Function::CheckedHandle(zone, arguments.ArgAt(0));
const auto& context = Object::Handle(zone, arguments.ArgAt(1));
const auto& instantiator_type_args =
TypeArguments::CheckedHandle(zone, arguments.ArgAt(2));
const auto& delayed_type_args =
TypeArguments::CheckedHandle(zone, arguments.ArgAt(3));
const Closure& closure = Closure::Handle(
zone,
Closure::New(Object::null_type_arguments(), Object::null_type_arguments(),
Object::null_type_arguments(), function, context,
SpaceForRuntimeAllocation()));
zone, Closure::New(instantiator_type_args, Object::null_type_arguments(),
delayed_type_args, function, context,
SpaceForRuntimeAllocation()));
arguments.SetReturn(closure);
RuntimeAllocationEpilogue(thread);
}

View file

@ -312,50 +312,6 @@ CodePtr StubCode::GetAllocationStubForTypedData(classid_t class_id) {
}
#endif // !defined(DART_PRECOMPILED_RUNTIME)
#if !defined(TARGET_ARCH_IA32)
CodePtr StubCode::GetBuildMethodExtractorStub(compiler::ObjectPoolBuilder* pool,
bool generic) {
#if !defined(DART_PRECOMPILED_RUNTIME)
auto thread = Thread::Current();
auto Z = thread->zone();
auto object_store = thread->isolate_group()->object_store();
const auto& closure_allocation_stub =
Code::ZoneHandle(Z, object_store->allocate_closure_stub());
const auto& context_allocation_stub =
Code::ZoneHandle(Z, object_store->allocate_context_stub());
compiler::ObjectPoolBuilder object_pool_builder;
compiler::Assembler assembler(pool != nullptr ? pool : &object_pool_builder);
CompilerState compiler_state(thread, /*is_aot=*/FLAG_precompiled_mode,
/*is_optimizing=*/false);
compiler::StubCodeCompiler stubCodeCompiler(&assembler, nullptr);
stubCodeCompiler.GenerateBuildMethodExtractorStub(
closure_allocation_stub, context_allocation_stub, generic);
const char* name = generic ? "BuildGenericMethodExtractor"
: "BuildNonGenericMethodExtractor";
const Code& stub = Code::Handle(Code::FinalizeCodeAndNotify(
name, nullptr, &assembler, Code::PoolAttachment::kNotAttachPool,
/*optimized=*/false));
if (pool == nullptr) {
stub.set_object_pool(ObjectPool::NewFromBuilder(object_pool_builder));
}
#ifndef PRODUCT
if (FLAG_support_disassembler && FLAG_disassemble_stubs) {
Disassembler::DisassembleStub(name, stub);
}
#endif // !PRODUCT
return stub.ptr();
#else // !defined(DART_PRECOMPILED_RUNTIME)
UNIMPLEMENTED();
return nullptr;
#endif // !defined(DART_PRECOMPILED_RUNTIME)
}
#endif // !defined(TARGET_ARCH_IA32)
const Code& StubCode::UnoptimizedStaticCallEntry(intptr_t num_args_tested) {
switch (num_args_tested) {
case 0:
@ -386,8 +342,6 @@ const char* StubCode::NameOfStub(uword entry_point) {
return "_iso_stub_" #name "Stub"; \
}
OBJECT_STORE_STUB_CODE_LIST(MATCH)
MATCH(build_generic_method_extractor_code, BuildGenericMethodExtractor)
MATCH(build_nongeneric_method_extractor_code, BuildNonGenericMethodExtractor)
#undef MATCH
return nullptr;
}

View file

@ -86,17 +86,6 @@ class StubCode : public AllStatic {
static CodePtr GetAllocationStubForTypedData(classid_t class_id);
#endif // !defined(DART_PRECOMPILED_RUNTIME)
#if !defined(TARGET_ARCH_IA32)
static CodePtr GetBuildGenericMethodExtractorStub(
compiler::ObjectPoolBuilder* pool) {
return GetBuildMethodExtractorStub(pool, /*generic=*/true);
}
static CodePtr GetBuildNonGenericMethodExtractorStub(
compiler::ObjectPoolBuilder* pool) {
return GetBuildMethodExtractorStub(pool, /*generic=*/false);
}
#endif
#if !defined(DART_PRECOMPILED_RUNTIME)
// Generate the stub and finalize the generated code into the stub
// code executable area.
@ -132,9 +121,6 @@ class StubCode : public AllStatic {
private:
friend class MegamorphicCacheTable;
static CodePtr GetBuildMethodExtractorStub(compiler::ObjectPoolBuilder* pool,
bool generic);
enum {
#define STUB_CODE_ENTRY(name) k##name##Index,
VM_STUB_CODE_LIST(STUB_CODE_ENTRY)

View file

@ -51,6 +51,9 @@ namespace dart {
V(AllocateMintSharedWithFPURegs) \
V(AllocateMintSharedWithoutFPURegs) \
V(AllocateClosure) \
V(AllocateClosureGeneric) \
V(AllocateClosureTA) \
V(AllocateClosureTAGeneric) \
V(AllocateContext) \
V(AllocateGrowableArray) \
V(AllocateObject) \

View file

@ -176,6 +176,8 @@ class Thread;
V(BoolPtr, bool_true_, Object::bool_true().ptr(), nullptr) \
V(BoolPtr, bool_false_, Object::bool_false().ptr(), nullptr) \
V(ArrayPtr, empty_array_, Object::empty_array().ptr(), nullptr) \
V(TypeArgumentsPtr, empty_type_arguments_, \
Object::empty_type_arguments().ptr(), nullptr) \
V(TypePtr, dynamic_type_, Type::dynamic_type().ptr(), nullptr)
// List of VM-global objects/addresses cached in each Thread object.