Implemented missing instructions in ia32, more sharing, removed bailouts, enable optimiziations on ia32 with new compiler.

Review URL: https://chromiumcodereview.appspot.com//10538024

git-svn-id: https://dart.googlecode.com/svn/branches/bleeding_edge/dart@8403 260f80e4-7a28-3924-810f-c04153c831b5
This commit is contained in:
srdjan@google.com 2012-06-07 18:06:15 +00:00
parent 189934681b
commit 1c870b27e0
9 changed files with 557 additions and 505 deletions

View file

@ -2,10 +2,12 @@
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
#include "vm/flow_graph_compiler_shared.h"
#include "vm/globals.h" // Needed here to get TARGET_ARCH_XXX.
#include "vm/flow_graph_compiler.h"
#include "vm/debugger.h"
#include "vm/intermediate_language.h"
#include "vm/il_printer.h"
#include "vm/intrinsifier.h"
#include "vm/longjump.h"
#include "vm/parser.h"
@ -13,13 +15,15 @@
namespace dart {
DECLARE_FLAG(bool, code_comments);
DECLARE_FLAG(bool, enable_type_checks);
DECLARE_FLAG(bool, intrinsify);
DECLARE_FLAG(int, optimization_counter_threshold);
DECLARE_FLAG(bool, trace_functions);
DECLARE_FLAG(bool, report_usage_count);
DECLARE_FLAG(bool, trace_functions);
DECLARE_FLAG(int, optimization_counter_threshold);
FlowGraphCompilerShared::FlowGraphCompilerShared(
FlowGraphCompiler::FlowGraphCompiler(
Assembler* assembler,
const ParsedFunction& parsed_function,
const GrowableArray<BlockEntryInstr*>& block_order,
@ -38,7 +42,7 @@ FlowGraphCompilerShared::FlowGraphCompilerShared(
}
FlowGraphCompilerShared::~FlowGraphCompilerShared() {
FlowGraphCompiler::~FlowGraphCompiler() {
// BlockInfos are zone-allocated, so their destructors are not called.
// Verify the labels explicitly here.
for (int i = 0; i < block_info_.length(); ++i) {
@ -47,7 +51,8 @@ FlowGraphCompilerShared::~FlowGraphCompilerShared() {
}
}
void FlowGraphCompilerShared::InitCompiler() {
void FlowGraphCompiler::InitCompiler() {
pc_descriptors_list_ = new DescriptorList();
exception_handlers_list_ = new ExceptionHandlerList();
block_info_.Clear();
@ -57,36 +62,85 @@ void FlowGraphCompilerShared::InitCompiler() {
}
intptr_t FlowGraphCompilerShared::StackSize() const {
void FlowGraphCompiler::VisitBlocks() {
for (intptr_t i = 0; i < block_order().length(); ++i) {
assembler()->Comment("B%d", i);
// Compile the block entry.
set_current_block(block_order()[i]);
current_block()->PrepareEntry(this);
Instruction* instr = current_block()->StraightLineSuccessor();
// Compile all successors until an exit, branch, or a block entry.
while ((instr != NULL) && !instr->IsBlockEntry()) {
if (FLAG_code_comments) EmitComment(instr);
ASSERT(instr->locs() != NULL);
EmitInstructionPrologue(instr);
instr->EmitNativeCode(this);
instr = instr->StraightLineSuccessor();
}
BlockEntryInstr* successor =
(instr == NULL) ? NULL : instr->AsBlockEntry();
if (successor != NULL) {
// Block ended with a "goto". We can fall through if it is the
// next block in the list. Otherwise, we need a jump.
if ((i == block_order().length() - 1) ||
(block_order()[i + 1] != successor)) {
assembler()->jmp(GetBlockLabel(successor));
}
}
}
}
void FlowGraphCompiler::Bailout(const char* reason) {
const char* kFormat = "FlowGraphCompiler Bailout: %s %s.";
const char* function_name = parsed_function().function().ToCString();
intptr_t len = OS::SNPrint(NULL, 0, kFormat, function_name, reason) + 1;
char* chars = reinterpret_cast<char*>(
Isolate::Current()->current_zone()->Allocate(len));
OS::SNPrint(chars, len, kFormat, function_name, reason);
const Error& error = Error::Handle(
LanguageError::New(String::Handle(String::New(chars))));
Isolate::Current()->long_jump_base()->Jump(1, error);
}
intptr_t FlowGraphCompiler::StackSize() const {
return parsed_function_.stack_local_count() +
parsed_function_.copied_parameter_count();
}
Label* FlowGraphCompilerShared::GetBlockLabel(
Label* FlowGraphCompiler::GetBlockLabel(
BlockEntryInstr* block_entry) const {
intptr_t block_index = block_entry->postorder_number();
return &block_info_[block_index]->label;
}
bool FlowGraphCompilerShared::IsNextBlock(TargetEntryInstr* block_entry) const {
bool FlowGraphCompiler::IsNextBlock(TargetEntryInstr* block_entry) const {
intptr_t current_index = reverse_index(current_block()->postorder_number());
return block_order_[current_index + 1] == block_entry;
}
void FlowGraphCompilerShared::AddExceptionHandler(intptr_t try_index,
intptr_t pc_offset) {
void FlowGraphCompiler::GenerateDeferredCode() {
for (intptr_t i = 0; i < deopt_stubs_.length(); i++) {
deopt_stubs_[i]->GenerateCode(this);
}
}
void FlowGraphCompiler::AddExceptionHandler(intptr_t try_index,
intptr_t pc_offset) {
exception_handlers_list_->AddHandler(try_index, pc_offset);
}
// Uses current pc position and try-index.
void FlowGraphCompilerShared::AddCurrentDescriptor(PcDescriptors::Kind kind,
intptr_t cid,
intptr_t token_index,
intptr_t try_index) {
void FlowGraphCompiler::AddCurrentDescriptor(PcDescriptors::Kind kind,
intptr_t cid,
intptr_t token_index,
intptr_t try_index) {
pc_descriptors_list()->AddDescriptor(kind,
assembler()->CodeSize(),
cid,
@ -95,12 +149,12 @@ void FlowGraphCompilerShared::AddCurrentDescriptor(PcDescriptors::Kind kind,
}
Label* FlowGraphCompilerShared::AddDeoptStub(intptr_t deopt_id,
intptr_t deopt_token_index,
intptr_t try_index,
DeoptReasonId reason,
Register reg1,
Register reg2) {
Label* FlowGraphCompiler::AddDeoptStub(intptr_t deopt_id,
intptr_t deopt_token_index,
intptr_t try_index,
DeoptReasonId reason,
Register reg1,
Register reg2) {
DeoptimizationStub* stub =
new DeoptimizationStub(deopt_id, deopt_token_index, try_index, reason);
stub->Push(reg1);
@ -110,7 +164,7 @@ Label* FlowGraphCompilerShared::AddDeoptStub(intptr_t deopt_id,
}
void FlowGraphCompilerShared::FinalizeExceptionHandlers(const Code& code) {
void FlowGraphCompiler::FinalizeExceptionHandlers(const Code& code) {
ASSERT(exception_handlers_list_ != NULL);
const ExceptionHandlers& handlers = ExceptionHandlers::Handle(
exception_handlers_list_->FinalizeExceptionHandlers(code.EntryPoint()));
@ -118,7 +172,7 @@ void FlowGraphCompilerShared::FinalizeExceptionHandlers(const Code& code) {
}
void FlowGraphCompilerShared::FinalizePcDescriptors(const Code& code) {
void FlowGraphCompiler::FinalizePcDescriptors(const Code& code) {
ASSERT(pc_descriptors_list_ != NULL);
const PcDescriptors& descriptors = PcDescriptors::Handle(
pc_descriptors_list_->FinalizePcDescriptors(code.EntryPoint()));
@ -127,7 +181,7 @@ void FlowGraphCompilerShared::FinalizePcDescriptors(const Code& code) {
}
void FlowGraphCompilerShared::FinalizeStackmaps(const Code& code) {
void FlowGraphCompiler::FinalizeStackmaps(const Code& code) {
if (stackmap_builder_ == NULL) {
// The unoptimizing compiler has no stack maps.
code.set_stackmaps(Array::Handle());
@ -139,26 +193,67 @@ void FlowGraphCompilerShared::FinalizeStackmaps(const Code& code) {
}
void FlowGraphCompilerShared::FinalizeVarDescriptors(const Code& code) {
void FlowGraphCompiler::FinalizeVarDescriptors(const Code& code) {
const LocalVarDescriptors& var_descs = LocalVarDescriptors::Handle(
parsed_function_.node_sequence()->scope()->GetVarDescriptors());
code.set_var_descriptors(var_descs);
}
void FlowGraphCompilerShared::FinalizeComments(const Code& code) {
void FlowGraphCompiler::FinalizeComments(const Code& code) {
code.set_comments(assembler()->GetCodeComments());
}
void FlowGraphCompilerShared::GenerateDeferredCode() {
for (intptr_t i = 0; i < deopt_stubs_.length(); i++) {
deopt_stubs_[i]->GenerateCode(this);
}
static bool CanOptimize() {
return !FLAG_report_usage_count &&
(FLAG_optimization_counter_threshold >= 0) &&
!Isolate::Current()->debugger()->IsActive();
}
void FlowGraphCompilerShared::GenerateInstanceCall(
// Returns 'true' if code generation for this function is complete, i.e.,
// no fall-through to regular code is needed.
bool FlowGraphCompiler::TryIntrinsify() {
if (!CanOptimize()) return false;
// Intrinsification skips arguments checks, therefore disable if in checked
// mode.
if (FLAG_intrinsify && !FLAG_trace_functions && !FLAG_enable_type_checks) {
if ((parsed_function().function().kind() == RawFunction::kImplicitGetter)) {
// An implicit getter must have a specific AST structure.
const SequenceNode& sequence_node = *parsed_function().node_sequence();
ASSERT(sequence_node.length() == 1);
ASSERT(sequence_node.NodeAt(0)->IsReturnNode());
const ReturnNode& return_node = *sequence_node.NodeAt(0)->AsReturnNode();
ASSERT(return_node.value()->IsLoadInstanceFieldNode());
const LoadInstanceFieldNode& load_node =
*return_node.value()->AsLoadInstanceFieldNode();
GenerateInlinedGetter(load_node.field().Offset());
return true;
}
if ((parsed_function().function().kind() == RawFunction::kImplicitSetter)) {
// An implicit setter must have a specific AST structure.
// Sequence node has one store node and one return NULL node.
const SequenceNode& sequence_node = *parsed_function().node_sequence();
ASSERT(sequence_node.length() == 2);
ASSERT(sequence_node.NodeAt(0)->IsStoreInstanceFieldNode());
ASSERT(sequence_node.NodeAt(1)->IsReturnNode());
const StoreInstanceFieldNode& store_node =
*sequence_node.NodeAt(0)->AsStoreInstanceFieldNode();
GenerateInlinedSetter(store_node.field().Offset());
return true;
}
}
// Even if an intrinsified version of the function was successfully
// generated, it may fall through to the non-intrinsified method body.
if (!FLAG_trace_functions) {
return Intrinsifier::Intrinsify(parsed_function().function(), assembler());
}
return false;
}
void FlowGraphCompiler::GenerateInstanceCall(
intptr_t cid,
intptr_t token_index,
intptr_t try_index,
@ -198,12 +293,12 @@ void FlowGraphCompilerShared::GenerateInstanceCall(
}
void FlowGraphCompilerShared::GenerateStaticCall(intptr_t cid,
intptr_t token_index,
intptr_t try_index,
const Function& function,
intptr_t argument_count,
const Array& argument_names) {
void FlowGraphCompiler::GenerateStaticCall(intptr_t cid,
intptr_t token_index,
intptr_t try_index,
const Function& function,
intptr_t argument_count,
const Array& argument_names) {
const Array& arguments_descriptor =
CodeGenerator::ArgumentsDescriptor(argument_count, argument_names);
const intptr_t descr_offset = EmitStaticCall(function,
@ -217,72 +312,10 @@ void FlowGraphCompilerShared::GenerateStaticCall(intptr_t cid,
}
void FlowGraphCompilerShared::Bailout(const char* reason) {
const char* kFormat = "FlowGraphCompiler Bailout: %s %s.";
const char* function_name = parsed_function().function().ToCString();
intptr_t len = OS::SNPrint(NULL, 0, kFormat, function_name, reason) + 1;
char* chars = reinterpret_cast<char*>(
Isolate::Current()->current_zone()->Allocate(len));
OS::SNPrint(chars, len, kFormat, function_name, reason);
const Error& error = Error::Handle(
LanguageError::New(String::Handle(String::New(chars))));
Isolate::Current()->long_jump_base()->Jump(1, error);
}
static bool CanOptimize() {
return !FLAG_report_usage_count &&
(FLAG_optimization_counter_threshold >= 0) &&
!Isolate::Current()->debugger()->IsActive();
}
// Returns 'true' if code generation for this function is complete, i.e.,
// no fall-through to regular code is needed.
bool FlowGraphCompilerShared::TryIntrinsify() {
if (!CanOptimize()) return false;
// Intrinsification skips arguments checks, therefore disable if in checked
// mode.
if (FLAG_intrinsify && !FLAG_trace_functions && !FLAG_enable_type_checks) {
if ((parsed_function().function().kind() == RawFunction::kImplicitGetter)) {
// An implicit getter must have a specific AST structure.
const SequenceNode& sequence_node = *parsed_function().node_sequence();
ASSERT(sequence_node.length() == 1);
ASSERT(sequence_node.NodeAt(0)->IsReturnNode());
const ReturnNode& return_node = *sequence_node.NodeAt(0)->AsReturnNode();
ASSERT(return_node.value()->IsLoadInstanceFieldNode());
const LoadInstanceFieldNode& load_node =
*return_node.value()->AsLoadInstanceFieldNode();
GenerateInlinedGetter(load_node.field().Offset());
return true;
}
if ((parsed_function().function().kind() == RawFunction::kImplicitSetter)) {
// An implicit setter must have a specific AST structure.
// Sequence node has one store node and one return NULL node.
const SequenceNode& sequence_node = *parsed_function().node_sequence();
ASSERT(sequence_node.length() == 2);
ASSERT(sequence_node.NodeAt(0)->IsStoreInstanceFieldNode());
ASSERT(sequence_node.NodeAt(1)->IsReturnNode());
const StoreInstanceFieldNode& store_node =
*sequence_node.NodeAt(0)->AsStoreInstanceFieldNode();
GenerateInlinedSetter(store_node.field().Offset());
return true;
}
}
// Even if an intrinsified version of the function was successfully
// generated, it may fall through to the non-intrinsified method body.
if (!FLAG_trace_functions) {
return Intrinsifier::Intrinsify(parsed_function().function(), assembler());
}
return false;
}
void FlowGraphCompilerShared::GenerateNumberTypeCheck(
Register kClassIdReg,
const AbstractType& type,
Label* is_instance_lbl,
Label* is_not_instance_lbl) {
void FlowGraphCompiler::GenerateNumberTypeCheck(Register kClassIdReg,
const AbstractType& type,
Label* is_instance_lbl,
Label* is_not_instance_lbl) {
GrowableArray<intptr_t> args;
if (type.IsNumberInterface()) {
args.Add(kDouble);
@ -298,10 +331,9 @@ void FlowGraphCompilerShared::GenerateNumberTypeCheck(
}
void FlowGraphCompilerShared::GenerateStringTypeCheck(
Register kClassIdReg,
Label* is_instance_lbl,
Label* is_not_instance_lbl) {
void FlowGraphCompiler::GenerateStringTypeCheck(Register kClassIdReg,
Label* is_instance_lbl,
Label* is_not_instance_lbl) {
GrowableArray<intptr_t> args;
args.Add(kOneByteString);
args.Add(kTwoByteString);
@ -313,9 +345,8 @@ void FlowGraphCompilerShared::GenerateStringTypeCheck(
}
void FlowGraphCompilerShared::GenerateListTypeCheck(
Register kClassIdReg,
Label* is_instance_lbl) {
void FlowGraphCompiler::GenerateListTypeCheck(Register kClassIdReg,
Label* is_instance_lbl) {
Label unknown;
GrowableArray<intptr_t> args;
args.Add(kArray);
@ -325,6 +356,12 @@ void FlowGraphCompilerShared::GenerateListTypeCheck(
assembler()->Bind(&unknown);
}
void FlowGraphCompiler::EmitComment(Instruction* instr) {
char buffer[80];
BufferFormatter f(buffer, sizeof(buffer));
instr->PrintTo(&f);
assembler()->Comment("@%d: %s", instr->cid(), buffer);
}
} // namespace dart

View file

@ -17,7 +17,6 @@
namespace dart {
DECLARE_FLAG(bool, code_comments);
DECLARE_FLAG(bool, compiler_stats);
DECLARE_FLAG(bool, enable_type_checks);
DECLARE_FLAG(bool, print_ast);
@ -25,7 +24,7 @@ DECLARE_FLAG(bool, print_scopes);
DECLARE_FLAG(bool, trace_functions);
void DeoptimizationStub::GenerateCode(FlowGraphCompilerShared* compiler) {
void DeoptimizationStub::GenerateCode(FlowGraphCompiler* compiler) {
Assembler* assem = compiler->assembler();
#define __ assem->
__ Comment("Deopt stub for id %d", deopt_id_);
@ -45,16 +44,6 @@ void DeoptimizationStub::GenerateCode(FlowGraphCompilerShared* compiler) {
}
FlowGraphCompiler::FlowGraphCompiler(
Assembler* assembler,
const ParsedFunction& parsed_function,
const GrowableArray<BlockEntryInstr*>& block_order,
bool is_optimizing)
: FlowGraphCompilerShared(assembler,
parsed_function,
block_order,
is_optimizing) {}
#define __ assembler()->
@ -957,22 +946,6 @@ void FlowGraphCompiler::GenerateAssertAssignable(intptr_t cid,
}
void FlowGraphCompiler::EmitComment(Instruction* instr) {
char buffer[80];
BufferFormatter f(buffer, sizeof(buffer));
instr->PrintTo(&f);
__ Comment("@%d: %s", instr->cid(), buffer);
}
void FlowGraphCompiler::BailoutOnInstruction(Instruction* instr) {
char buffer[80];
BufferFormatter f(buffer, sizeof(buffer));
instr->PrintTo(&f);
Bailout(buffer);
}
void FlowGraphCompiler::EmitInstructionPrologue(Instruction* instr) {
LocationSummary* locs = instr->locs();
ASSERT(locs != NULL);
@ -988,37 +961,6 @@ void FlowGraphCompiler::EmitInstructionPrologue(Instruction* instr) {
}
void FlowGraphCompiler::VisitBlocks() {
for (intptr_t i = 0; i < block_order().length(); ++i) {
__ Comment("B%d", i);
// Compile the block entry.
set_current_block(block_order()[i]);
current_block()->PrepareEntry(this);
Instruction* instr = current_block()->StraightLineSuccessor();
// Compile all successors until an exit, branch, or a block entry.
while ((instr != NULL) && !instr->IsBlockEntry()) {
if (FLAG_code_comments) EmitComment(instr);
if (instr->locs() == NULL) {
BailoutOnInstruction(instr);
} else {
EmitInstructionPrologue(instr);
instr->EmitNativeCode(this);
instr = instr->StraightLineSuccessor();
}
}
BlockEntryInstr* successor =
(instr == NULL) ? NULL : instr->AsBlockEntry();
if (successor != NULL) {
// Block ended with a "goto". We can fall through if it is the
// next block in the list. Otherwise, we need a jump.
if ((i == block_order().length() - 1) ||
(block_order()[i + 1] != successor)) {
__ jmp(GetBlockLabel(successor));
}
}
}
}
#undef __
} // namespace dart

View file

@ -13,7 +13,6 @@
#include "vm/assembler_macros.h"
#include "vm/code_descriptors.h"
#include "vm/code_generator.h"
#include "vm/flow_graph_compiler_shared.h"
#include "vm/intermediate_language.h"
namespace dart {
@ -28,26 +27,57 @@ class StackMapBuilder;
// Stubbed out implementation of graph compiler, bails out immediately if
// CompileGraph is called. The rest of the public API is UNIMPLEMENTED.
class FlowGraphCompiler : public FlowGraphCompilerShared {
class FlowGraphCompiler : public ValueObject {
private:
struct BlockInfo : public ZoneAllocated {
public:
BlockInfo() : label() { }
Label label;
};
public:
FlowGraphCompiler(Assembler* assembler,
const ParsedFunction& parsed_function,
const GrowableArray<BlockEntryInstr*>& block_order,
bool is_optimizing);
virtual ~FlowGraphCompiler();
// Accessors.
Assembler* assembler() const { return assembler_; }
const ParsedFunction& parsed_function() const { return parsed_function_; }
const GrowableArray<BlockEntryInstr*>& block_order() const {
return block_order_;
}
DescriptorList* pc_descriptors_list() const {
return pc_descriptors_list_;
}
BlockEntryInstr* current_block() const { return current_block_; }
void set_current_block(BlockEntryInstr* value) {
current_block_ = value;
}
bool is_optimizing() const { return is_optimizing_; }
const GrowableArray<BlockInfo*>& block_info() const { return block_info_; }
// Constructor is lighweight, major initialization work should occur here.
// This makes it easier to measure time spent in the compiler.
void InitCompiler();
void CompileGraph();
void GenerateCallRuntime(intptr_t cid,
intptr_t token_index,
intptr_t try_index,
const RuntimeEntry& entry);
void VisitBlocks();
// Returns pc-offset (in bytes) of the pc after the call, can be used to emit
// pc-descriptor information.
virtual intptr_t EmitInstanceCall(ExternalLabel* target_label,
const ICData& ic_data,
const Array& arguments_descriptor,
intptr_t argument_count);
// Bail out of the flow graph compiler. Does not return to the caller.
void Bailout(const char* reason);
// Returns 'true' if code generation for this function is complete, i.e.,
// no fall-through to regular code is needed.
bool TryIntrinsify();
virtual void GenerateCallRuntime(intptr_t cid,
intptr_t token_index,
intptr_t try_index,
const RuntimeEntry& entry);
// Returns pc-offset (in bytes) of the pc after the call, can be used to emit
// pc-descriptor information.
@ -55,28 +85,91 @@ class FlowGraphCompiler : public FlowGraphCompilerShared {
const Array& arguments_descriptor,
intptr_t argument_count);
void GenerateCall(intptr_t token_index,
intptr_t try_index,
const ExternalLabel* label,
PcDescriptors::Kind kind);
void GenerateInstanceOf(intptr_t cid,
virtual void GenerateCall(intptr_t token_index,
intptr_t try_index,
const ExternalLabel* label,
PcDescriptors::Kind kind);
virtual void GenerateInstanceOf(intptr_t cid,
intptr_t token_index,
intptr_t try_index,
const AbstractType& type,
bool negate_result);
virtual void GenerateAssertAssignable(intptr_t cid,
intptr_t token_index,
intptr_t try_index,
const AbstractType& dst_type,
const String& dst_name);
void GenerateInstanceCall(intptr_t cid,
intptr_t token_index,
intptr_t try_index,
const String& function_name,
intptr_t argument_count,
const Array& argument_names,
intptr_t checked_argument_count);
void GenerateStaticCall(intptr_t cid,
intptr_t token_index,
intptr_t try_index,
const AbstractType& type,
bool negate_result);
void GenerateAssertAssignable(intptr_t cid,
intptr_t token_index,
intptr_t try_index,
const AbstractType& dst_type,
const String& dst_name);
const Function& function,
intptr_t argument_count,
const Array& argument_names);
void GenerateNumberTypeCheck(Register kClassIdReg,
const AbstractType& type,
Label* is_instance_lbl,
Label* is_not_instance_lbl);
void GenerateStringTypeCheck(Register kClassIdReg,
Label* is_instance_lbl,
Label* is_not_instance_lbl);
void GenerateListTypeCheck(Register kClassIdReg,
Label* is_instance_lbl);
// Returns pc-offset (in bytes) of the pc after the call, can be used to emit
// pc-descriptor information.
intptr_t EmitInstanceCall(ExternalLabel* target_label,
const ICData& ic_data,
const Array& arguments_descriptor,
intptr_t argument_count);
void EmitComment(Instruction* instr);
intptr_t StackSize() const;
// Returns assembler label associated with the given block entry.
Label* GetBlockLabel(BlockEntryInstr* block_entry) const;
// Returns true if the next block after current in the current block order
// is the given block.
bool IsNextBlock(TargetEntryInstr* block_entry) const;
void AddExceptionHandler(intptr_t try_index, intptr_t pc_offset);
void AddCurrentDescriptor(PcDescriptors::Kind kind,
intptr_t cid,
intptr_t token_index,
intptr_t try_index);
Label* AddDeoptStub(intptr_t deopt_id,
intptr_t deopt_token_index,
intptr_t try_index_,
DeoptReasonId reason,
Register reg1,
Register reg2);
void FinalizeExceptionHandlers(const Code& code);
void FinalizePcDescriptors(const Code& code);
void FinalizeStackmaps(const Code& code);
void FinalizeVarDescriptors(const Code& code);
void FinalizeComments(const Code& code);
static const int kLocalsOffsetFromFP = (-1 * kWordSize);
private:
friend class DeoptimizationStub;
virtual void VisitBlocks();
void GenerateDeferredCode();
void CopyParameters();
void EmitInstructionPrologue(Instruction* instr);
virtual void EmitInstructionPrologue(Instruction* instr);
virtual void GenerateInlinedGetter(intptr_t offset);
virtual void GenerateInlinedSetter(intptr_t offset);
@ -134,12 +227,64 @@ class FlowGraphCompiler : public FlowGraphCompilerShared {
Label* is_equal_lbl,
Label* is_not_equal_lbl);
void EmitComment(Instruction* instr);
void BailoutOnInstruction(Instruction* instr);
// Map a block number in a forward iteration into the block number in the
// corresponding reverse iteration. Used to obtain an index into
// block_order for reverse iterations.
intptr_t reverse_index(intptr_t index) const {
return block_order_.length() - index - 1;
}
class Assembler* assembler_;
const ParsedFunction& parsed_function_;
const GrowableArray<BlockEntryInstr*>& block_order_;
// Compiler specific per-block state. Indexed by postorder block number
// for convenience. This is not the block's index in the block order,
// which is reverse postorder.
BlockEntryInstr* current_block_;
ExceptionHandlerList* exception_handlers_list_;
DescriptorList* pc_descriptors_list_;
StackmapBuilder* stackmap_builder_;
GrowableArray<BlockInfo*> block_info_;
GrowableArray<DeoptimizationStub*> deopt_stubs_;
const bool is_optimizing_;
DISALLOW_COPY_AND_ASSIGN(FlowGraphCompiler);
};
class DeoptimizationStub : public ZoneAllocated {
public:
DeoptimizationStub(intptr_t deopt_id,
intptr_t deopt_token_index,
intptr_t try_index,
DeoptReasonId reason)
: deopt_id_(deopt_id),
deopt_token_index_(deopt_token_index),
try_index_(try_index),
reason_(reason),
registers_(2),
entry_label_() {}
void Push(Register reg) { registers_.Add(reg); }
Label* entry_label() { return &entry_label_; }
// Implementation is in architecture specific file.
void GenerateCode(FlowGraphCompiler* compiler);
private:
const intptr_t deopt_id_;
const intptr_t deopt_token_index_;
const intptr_t try_index_;
const DeoptReasonId reason_;
GrowableArray<Register> registers_;
Label entry_label_;
DISALLOW_COPY_AND_ASSIGN(DeoptimizationStub);
};
} // namespace dart
#endif // VM_FLOW_GRAPH_COMPILER_IA32_H_

View file

@ -1,198 +0,0 @@
// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
#ifndef VM_FLOW_GRAPH_COMPILER_SHARED_H_
#define VM_FLOW_GRAPH_COMPILER_SHARED_H_
#include "vm/allocation.h"
#include "vm/assembler.h"
#include "vm/code_descriptors.h"
#include "vm/code_generator.h"
#include "vm/growable_array.h"
namespace dart {
class BlockEntryInstr;
class ExceptionHandlerList;
class FlowGraphCompilerShared;
class ParsedFunction;
class TargetEntryInstr;
class DeoptimizationStub : public ZoneAllocated {
public:
DeoptimizationStub(intptr_t deopt_id,
intptr_t deopt_token_index,
intptr_t try_index,
DeoptReasonId reason)
: deopt_id_(deopt_id),
deopt_token_index_(deopt_token_index),
try_index_(try_index),
reason_(reason),
registers_(2),
entry_label_() {}
void Push(Register reg) { registers_.Add(reg); }
Label* entry_label() { return &entry_label_; }
// Implementation is in architecture specific file.
void GenerateCode(FlowGraphCompilerShared* compiler);
private:
const intptr_t deopt_id_;
const intptr_t deopt_token_index_;
const intptr_t try_index_;
const DeoptReasonId reason_;
GrowableArray<Register> registers_;
Label entry_label_;
DISALLOW_COPY_AND_ASSIGN(DeoptimizationStub);
};
class FlowGraphCompilerShared : public ValueObject {
public:
FlowGraphCompilerShared(Assembler* assembler,
const ParsedFunction& parsed_function,
const GrowableArray<BlockEntryInstr*>& block_order,
bool is_optimizing);
virtual ~FlowGraphCompilerShared();
// Constructor is lighweight, major initialization work should occur here.
// This makes it easier to measure time spent in the compiler.
void InitCompiler();
Assembler* assembler() const { return assembler_; }
const ParsedFunction& parsed_function() const { return parsed_function_; }
const GrowableArray<BlockEntryInstr*>& block_order() const {
return block_order_;
}
DescriptorList* pc_descriptors_list() const {
return pc_descriptors_list_;
}
BlockEntryInstr* current_block() const { return current_block_; }
void set_current_block(BlockEntryInstr* value) {
current_block_ = value;
}
bool is_optimizing() const { return is_optimizing_; }
intptr_t StackSize() const;
// Returns assembler label associated with the given block entry.
Label* GetBlockLabel(BlockEntryInstr* block_entry) const;
// Returns true if the next block after current in the current block order
// is the given block.
bool IsNextBlock(TargetEntryInstr* block_entry) const;
void AddExceptionHandler(intptr_t try_index, intptr_t pc_offset);
void AddCurrentDescriptor(PcDescriptors::Kind kind,
intptr_t cid,
intptr_t token_index,
intptr_t try_index);
Label* AddDeoptStub(intptr_t deopt_id,
intptr_t deopt_token_index,
intptr_t try_index_,
DeoptReasonId reason,
Register reg1,
Register reg2);
void FinalizeExceptionHandlers(const Code& code);
void FinalizePcDescriptors(const Code& code);
void FinalizeStackmaps(const Code& code);
void FinalizeVarDescriptors(const Code& code);
void FinalizeComments(const Code& code);
void GenerateInstanceCall(intptr_t cid,
intptr_t token_index,
intptr_t try_index,
const String& function_name,
intptr_t argument_count,
const Array& argument_names,
intptr_t checked_argument_count);
void GenerateStaticCall(intptr_t cid,
intptr_t token_index,
intptr_t try_index,
const Function& function,
intptr_t argument_count,
const Array& argument_names);
void GenerateNumberTypeCheck(Register kClassIdReg,
const AbstractType& type,
Label* is_instance_lbl,
Label* is_not_instance_lbl);
void GenerateStringTypeCheck(Register kClassIdReg,
Label* is_instance_lbl,
Label* is_not_instance_lbl);
void GenerateListTypeCheck(Register kClassIdReg,
Label* is_instance_lbl);
void GenerateDeferredCode();
// Returns 'true' if code generation for this function is complete, i.e.,
// no fall-through to regular code is needed.
bool TryIntrinsify();
virtual void GenerateInlinedGetter(intptr_t offset) = 0;
virtual void GenerateInlinedSetter(intptr_t offset) = 0;
// Returns pc-offset (in bytes) of the pc after the call, can be used to emit
// pc-descriptor information.
virtual intptr_t EmitInstanceCall(ExternalLabel* target_label,
const ICData& ic_data,
const Array& arguments_descriptor,
intptr_t argument_count) = 0;
// Returns pc-offset (in bytes) of the pc after the call, can be used to emit
// pc-descriptor information.
virtual intptr_t EmitStaticCall(const Function& function,
const Array& arguments_descriptor,
intptr_t argument_count) = 0;
virtual void CheckClassIds(Register class_id_reg,
const GrowableArray<intptr_t>& class_ids,
Label* is_equal_lbl,
Label* is_not_equal_lbl) = 0;
struct BlockInfo : public ZoneAllocated {
public:
BlockInfo() : label() { }
Label label;
};
const GrowableArray<BlockInfo*>& block_info() const { return block_info_; }
// Bail out of the flow graph compiler. Does not return to the caller.
void Bailout(const char* reason);
private:
// Map a block number in a forward iteration into the block number in the
// corresponding reverse iteration. Used to obtain an index into
// block_order for reverse iterations.
intptr_t reverse_index(intptr_t index) const {
return block_order_.length() - index - 1;
}
class Assembler* assembler_;
const ParsedFunction& parsed_function_;
const GrowableArray<BlockEntryInstr*>& block_order_;
// Compiler specific per-block state. Indexed by postorder block number
// for convenience. This is not the block's index in the block order,
// which is reverse postorder.
BlockEntryInstr* current_block_;
ExceptionHandlerList* exception_handlers_list_;
DescriptorList* pc_descriptors_list_;
StackmapBuilder* stackmap_builder_;
GrowableArray<BlockInfo*> block_info_;
GrowableArray<DeoptimizationStub*> deopt_stubs_;
const bool is_optimizing_;
DISALLOW_COPY_AND_ASSIGN(FlowGraphCompilerShared);
};
} // namespace dart
#endif // VM_FLOW_GRAPH_COMPILER_SHARED_H_

View file

@ -24,10 +24,9 @@ DEFINE_FLAG(bool, print_scopes, false, "Print scopes of local variables.");
DEFINE_FLAG(bool, trace_functions, false, "Trace entry of each function.");
DECLARE_FLAG(bool, enable_type_checks);
DECLARE_FLAG(bool, print_ast);
DECLARE_FLAG(bool, code_comments);
void DeoptimizationStub::GenerateCode(FlowGraphCompilerShared* compiler) {
void DeoptimizationStub::GenerateCode(FlowGraphCompiler* compiler) {
Assembler* assem = compiler->assembler();
#define __ assem->
__ Comment("Deopt stub for id %d", deopt_id_);
@ -47,16 +46,6 @@ void DeoptimizationStub::GenerateCode(FlowGraphCompilerShared* compiler) {
}
FlowGraphCompiler::FlowGraphCompiler(
Assembler* assembler,
const ParsedFunction& parsed_function,
const GrowableArray<BlockEntryInstr*>& block_order,
bool is_optimizing)
: FlowGraphCompilerShared(assembler,
parsed_function,
block_order,
is_optimizing) {}
#define __ assembler()->
@ -659,44 +648,6 @@ void FlowGraphCompiler::EmitInstructionPrologue(Instruction* instr) {
}
void FlowGraphCompiler::VisitBlocks() {
for (intptr_t i = 0; i < block_order().length(); ++i) {
__ Comment("B%d", i);
// Compile the block entry.
set_current_block(block_order()[i]);
current_block()->PrepareEntry(this);
Instruction* instr = current_block()->StraightLineSuccessor();
// Compile all successors until an exit, branch, or a block entry.
while ((instr != NULL) && !instr->IsBlockEntry()) {
if (FLAG_code_comments) EmitComment(instr);
ASSERT(instr->locs() != NULL);
EmitInstructionPrologue(instr);
instr->EmitNativeCode(this);
instr = instr->StraightLineSuccessor();
}
BlockEntryInstr* successor =
(instr == NULL) ? NULL : instr->AsBlockEntry();
if (successor != NULL) {
// Block ended with a "goto". We can fall through if it is the
// next block in the list. Otherwise, we need a jump.
if ((i == block_order().length() - 1) ||
(block_order()[i + 1] != successor)) {
__ jmp(GetBlockLabel(successor));
}
}
}
}
void FlowGraphCompiler::EmitComment(Instruction* instr) {
char buffer[80];
BufferFormatter f(buffer, sizeof(buffer));
instr->PrintTo(&f);
__ Comment("@%d: %s", instr->cid(), buffer);
}
// Copied from CodeGenerator::CopyParameters (CodeGenerator will be deprecated).
void FlowGraphCompiler::CopyParameters() {
const Function& function = parsed_function().function();

View file

@ -13,7 +13,6 @@
#include "vm/assembler_macros.h"
#include "vm/code_descriptors.h"
#include "vm/code_generator.h"
#include "vm/flow_graph_compiler_shared.h"
#include "vm/intermediate_language.h"
namespace dart {
@ -24,52 +23,58 @@ class ExceptionHandlerList;
template <typename T> class GrowableArray;
class ParsedFunction;
class FlowGraphCompiler : public FlowGraphCompilerShared {
class FlowGraphCompiler : public ValueObject {
private:
struct BlockInfo : public ZoneAllocated {
public:
BlockInfo() : label() { }
Label label;
};
public:
FlowGraphCompiler(Assembler* assembler,
const ParsedFunction& parsed_function,
const GrowableArray<BlockEntryInstr*>& block_order,
bool is_optimizing);
virtual ~FlowGraphCompiler();
// Accessors.
Assembler* assembler() const { return assembler_; }
const ParsedFunction& parsed_function() const { return parsed_function_; }
const GrowableArray<BlockEntryInstr*>& block_order() const {
return block_order_;
}
DescriptorList* pc_descriptors_list() const {
return pc_descriptors_list_;
}
BlockEntryInstr* current_block() const { return current_block_; }
void set_current_block(BlockEntryInstr* value) {
current_block_ = value;
}
bool is_optimizing() const { return is_optimizing_; }
const GrowableArray<BlockInfo*>& block_info() const { return block_info_; }
// Constructor is lighweight, major initialization work should occur here.
// This makes it easier to measure time spent in the compiler.
void InitCompiler();
void CompileGraph();
void GenerateCallRuntime(intptr_t cid,
intptr_t token_index,
intptr_t try_index,
const RuntimeEntry& entry);
void VisitBlocks();
private:
friend class DeoptimizationStub;
// Bail out of the flow graph compiler. Does not return to the caller.
void Bailout(const char* reason);
// TODO(fschneider): Clean up friend-class declarations once all code
// generator templates have been moved to intermediate_language_x64.cc.
#define DECLARE_FRIEND(ShortName, ClassName) friend class ClassName;
FOR_EACH_COMPUTATION(DECLARE_FRIEND)
#undef DECLARE_FRIEND
static const int kLocalsOffsetFromFP = (-1 * kWordSize);
// Returns 'true' if code generation for this function is complete, i.e.,
// no fall-through to regular code is needed.
bool TryIntrinsify();
virtual void VisitBlocks();
void EmitInstructionPrologue(Instruction* instr);
// Emit code to load a Value into register 'dst'.
void LoadValue(Register dst, Value* value);
void EmitComment(Instruction* instr);
// Returns pc-offset (in bytes) of the pc after the call, can be used to emit
// pc-descriptor information.
virtual intptr_t EmitInstanceCall(ExternalLabel* target_label,
const ICData& ic_data,
const Array& arguments_descriptor,
intptr_t argument_count);
// Returns pc-offset (in bytes) of the pc after the call, can be used to emit
// pc-descriptor information.
virtual intptr_t EmitStaticCall(const Function& function,
const Array& arguments_descriptor,
intptr_t argument_count);
virtual void GenerateCallRuntime(intptr_t cid,
intptr_t token_index,
intptr_t try_index,
const RuntimeEntry& entry);
// Infrastructure copied from class CodeGenerator.
void GenerateCall(intptr_t token_index,
@ -77,6 +82,97 @@ class FlowGraphCompiler : public FlowGraphCompilerShared {
const ExternalLabel* label,
PcDescriptors::Kind kind);
void GenerateAssertAssignable(intptr_t cid,
intptr_t token_index,
intptr_t try_index,
const AbstractType& dst_type,
const String& dst_name);
void GenerateInstanceOf(intptr_t cid,
intptr_t token_index,
intptr_t try_index,
const AbstractType& type,
bool negate_result);
void GenerateInstanceCall(intptr_t cid,
intptr_t token_index,
intptr_t try_index,
const String& function_name,
intptr_t argument_count,
const Array& argument_names,
intptr_t checked_argument_count);
void GenerateStaticCall(intptr_t cid,
intptr_t token_index,
intptr_t try_index,
const Function& function,
intptr_t argument_count,
const Array& argument_names);
void GenerateNumberTypeCheck(Register kClassIdReg,
const AbstractType& type,
Label* is_instance_lbl,
Label* is_not_instance_lbl);
void GenerateStringTypeCheck(Register kClassIdReg,
Label* is_instance_lbl,
Label* is_not_instance_lbl);
void GenerateListTypeCheck(Register kClassIdReg,
Label* is_instance_lbl);
void EmitComment(Instruction* instr);
// Returns pc-offset (in bytes) of the pc after the call, can be used to emit
// pc-descriptor information.
intptr_t EmitInstanceCall(ExternalLabel* target_label,
const ICData& ic_data,
const Array& arguments_descriptor,
intptr_t argument_count);
intptr_t StackSize() const;
// Returns assembler label associated with the given block entry.
Label* GetBlockLabel(BlockEntryInstr* block_entry) const;
// Returns true if the next block after current in the current block order
// is the given block.
bool IsNextBlock(TargetEntryInstr* block_entry) const;
void AddExceptionHandler(intptr_t try_index, intptr_t pc_offset);
void AddCurrentDescriptor(PcDescriptors::Kind kind,
intptr_t cid,
intptr_t token_index,
intptr_t try_index);
Label* AddDeoptStub(intptr_t deopt_id,
intptr_t deopt_token_index,
intptr_t try_index_,
DeoptReasonId reason,
Register reg1,
Register reg2);
void FinalizeExceptionHandlers(const Code& code);
void FinalizePcDescriptors(const Code& code);
void FinalizeStackmaps(const Code& code);
void FinalizeVarDescriptors(const Code& code);
void FinalizeComments(const Code& code);
static const int kLocalsOffsetFromFP = (-1 * kWordSize);
private:
friend class DeoptimizationStub;
void GenerateDeferredCode();
virtual void EmitInstructionPrologue(Instruction* instr);
// Emit code to load a Value into register 'dst'.
void LoadValue(Register dst, Value* value);
// Returns pc-offset (in bytes) of the pc after the call, can be used to emit
// pc-descriptor information.
virtual intptr_t EmitStaticCall(const Function& function,
const Array& arguments_descriptor,
intptr_t argument_count);
// Type checking helper methods.
virtual void CheckClassIds(Register class_id_reg,
const GrowableArray<intptr_t>& class_ids,
@ -116,18 +212,6 @@ class FlowGraphCompiler : public FlowGraphCompilerShared {
Label* is_instance_lbl,
Label* is_not_instance_lbl);
void GenerateAssertAssignable(intptr_t cid,
intptr_t token_index,
intptr_t try_index,
const AbstractType& dst_type,
const String& dst_name);
void GenerateInstanceOf(intptr_t cid,
intptr_t token_index,
intptr_t try_index,
const AbstractType& type,
bool negate_result);
enum TypeTestStubKind {
kTestTypeOneArg,
kTestTypeTwoArgs,
@ -148,9 +232,62 @@ class FlowGraphCompiler : public FlowGraphCompilerShared {
virtual void GenerateInlinedGetter(intptr_t offset);
virtual void GenerateInlinedSetter(intptr_t offset);
// Map a block number in a forward iteration into the block number in the
// corresponding reverse iteration. Used to obtain an index into
// block_order for reverse iterations.
intptr_t reverse_index(intptr_t index) const {
return block_order_.length() - index - 1;
}
class Assembler* assembler_;
const ParsedFunction& parsed_function_;
const GrowableArray<BlockEntryInstr*>& block_order_;
// Compiler specific per-block state. Indexed by postorder block number
// for convenience. This is not the block's index in the block order,
// which is reverse postorder.
BlockEntryInstr* current_block_;
ExceptionHandlerList* exception_handlers_list_;
DescriptorList* pc_descriptors_list_;
StackmapBuilder* stackmap_builder_;
GrowableArray<BlockInfo*> block_info_;
GrowableArray<DeoptimizationStub*> deopt_stubs_;
const bool is_optimizing_;
DISALLOW_COPY_AND_ASSIGN(FlowGraphCompiler);
};
class DeoptimizationStub : public ZoneAllocated {
public:
DeoptimizationStub(intptr_t deopt_id,
intptr_t deopt_token_index,
intptr_t try_index,
DeoptReasonId reason)
: deopt_id_(deopt_id),
deopt_token_index_(deopt_token_index),
try_index_(try_index),
reason_(reason),
registers_(2),
entry_label_() {}
void Push(Register reg) { registers_.Add(reg); }
Label* entry_label() { return &entry_label_; }
// Implementation is in architecture specific file.
void GenerateCode(FlowGraphCompiler* compiler);
private:
const intptr_t deopt_id_;
const intptr_t deopt_token_index_;
const intptr_t try_index_;
const DeoptReasonId reason_;
GrowableArray<Register> registers_;
Label entry_label_;
DISALLOW_COPY_AND_ASSIGN(DeoptimizationStub);
};
} // namespace dart
#endif // VM_FLOW_GRAPH_COMPILER_X64_H_

View file

@ -299,7 +299,6 @@ void StoreIndexedComp::EmitNativeCode(FlowGraphCompiler* compiler) {
LocationSummary* InstanceSetterComp::MakeLocationSummary() const {
const intptr_t kNumInputs = 2;
return LocationSummary::Make(kNumInputs, Location::RequiresRegister());
return NULL;
}
@ -332,22 +331,46 @@ void InstanceSetterComp::EmitNativeCode(FlowGraphCompiler* compiler) {
LocationSummary* StaticSetterComp::MakeLocationSummary() const {
return NULL;
const intptr_t kNumInputs = 1;
return LocationSummary::Make(kNumInputs, Location::RequiresRegister());
}
void StaticSetterComp::EmitNativeCode(FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
Register value = locs()->in(0).reg();
Register result = locs()->out().reg();
// Preserve the argument as the result of the computation,
// then call the setter.
// Duplicate the argument.
// TODO(fschneider): Avoid preserving the value if the result is not used.
__ pushl(value);
__ pushl(value);
compiler->GenerateStaticCall(cid(),
token_index(),
try_index(),
setter_function(),
1,
Array::ZoneHandle());
__ popl(result);
}
LocationSummary* LoadInstanceFieldComp::MakeLocationSummary() const {
return NULL;
// TODO(fschneider): For this instruction the input register may be
// reused for the result (but is not required to) because the input
// is not used after the result is defined. We should consider adding
// this information to the input policy.
return LocationSummary::Make(1, Location::RequiresRegister());
}
void LoadInstanceFieldComp::EmitNativeCode(FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
Register instance = locs()->in(0).reg();
Register result = locs()->out().reg();
__ movl(result, FieldAddress(instance, field().Offset()));
}
@ -693,12 +716,27 @@ void CloneContextComp::EmitNativeCode(FlowGraphCompiler* compiler) {
LocationSummary* CatchEntryComp::MakeLocationSummary() const {
return NULL;
return LocationSummary::Make(0, Location::NoLocation());
}
// Restore stack and initialize the two exception variables:
// exception and stack trace variables.
void CatchEntryComp::EmitNativeCode(FlowGraphCompiler* compiler) {
UNIMPLEMENTED();
// Restore RSP from RBP as we are coming from a throw and the code for
// popping arguments has not been run.
const intptr_t locals_space_size = compiler->StackSize() * kWordSize;
ASSERT(locals_space_size >= 0);
const intptr_t offset_size =
-locals_space_size + FlowGraphCompiler::kLocalsOffsetFromFP;
__ leal(ESP, Address(EBP, offset_size));
ASSERT(!exception_var().is_captured());
ASSERT(!stacktrace_var().is_captured());
__ movl(Address(EBP, exception_var().index() * kWordSize),
kExceptionObjectReg);
__ movl(Address(EBP, stacktrace_var().index() * kWordSize),
kStackTraceObjectReg);
}

View file

@ -512,7 +512,8 @@ LocationSummary* InstantiateTypeArgumentsComp::MakeLocationSummary() const {
}
void InstantiateTypeArgumentsComp::EmitNativeCode(FlowGraphCompiler* compiler) {
void InstantiateTypeArgumentsComp::EmitNativeCode(
FlowGraphCompiler* compiler) {
Register instantiator_reg = locs()->in(0).reg();
Register result_reg = locs()->out().reg();

View file

@ -121,9 +121,8 @@
'flags_test.cc',
'flow_graph_builder.cc',
'flow_graph_builder.h',
'flow_graph_compiler.cc',
'flow_graph_compiler.h',
'flow_graph_compiler_shared.cc',
'flow_graph_compiler_shared.h',
'flow_graph_compiler_arm.cc',
'flow_graph_compiler_arm.h',
'flow_graph_compiler_ia32.cc',