Fix bug with optimized try-catch on ARM/MIPS.

The constant pool pointer must be restored before any parallel
moves at the catch-entry block.

In addition, this CL removes CatchEntryInstr and folds its
functionality into CatchBlockEntryInstr. Until now every catch-block
started with CatchBlockEntryInstr followed by a CatchEntryInstr.

This simplifies a lot of code in the compiler and avoids issues
with allocator-move code inserted between the two instructions.

BUG=https://code.google.com/p/dart/issues/detail?id=12291
TEST=tests/language/try_catch4_test.dart
R=regis@google.com, srdjan@google.com

Review URL: https://codereview.chromium.org//22590002

git-svn-id: https://dart.googlecode.com/svn/branches/bleeding_edge/dart@25918 260f80e4-7a28-3924-810f-c04153c831b5
This commit is contained in:
fschneider@google.com 2013-08-08 08:20:10 +00:00
parent 27ab3ec418
commit 2793f7ad63
15 changed files with 161 additions and 204 deletions

View file

@ -60,12 +60,6 @@ static intptr_t ToInstructionEnd(intptr_t pos) {
}
static intptr_t NextInstructionPos(intptr_t pos) {
ASSERT(IsInstructionStartPosition(pos));
return pos + 2;
}
FlowGraphAllocator::FlowGraphAllocator(const FlowGraph& flow_graph)
: flow_graph_(flow_graph),
reaching_defs_(flow_graph),
@ -522,16 +516,16 @@ void FlowGraphAllocator::BuildLiveRanges() {
range->DefineAt(catch_entry->start_pos()); // Defined at block entry.
ProcessInitialDefinition(defn, range, catch_entry);
}
// Block the two registers used by CatchEntryInstr from the block start to
// until the end of the instruction so that they are preserved.
ASSERT(catch_entry->next()->IsCatchEntry());
// Block the two fixed registers used by CatchBlockEntryInstr from the
// block start to until the end of the instruction so that they are
// preserved.
intptr_t start = catch_entry->start_pos();
BlockLocation(Location::RegisterLocation(kExceptionObjectReg),
start,
ToInstructionEnd(NextInstructionPos(start)));
ToInstructionEnd(start));
BlockLocation(Location::RegisterLocation(kStackTraceObjectReg),
start,
ToInstructionEnd(NextInstructionPos(start)));
ToInstructionEnd(start));
}
}

View file

@ -3298,11 +3298,7 @@ void EffectGraphVisitor::VisitSequenceNode(SequenceNode* node) {
void EffectGraphVisitor::VisitCatchClauseNode(CatchClauseNode* node) {
InlineBailout("EffectGraphVisitor::VisitCatchClauseNode (exception)");
// NOTE: The implicit variables ':saved_context', ':exception_var'
// and ':stacktrace_var' can never be captured variables.
// Restores CTX from local variable ':saved_context'.
AddInstruction(
new CatchEntryInstr(node->exception_var(), node->stacktrace_var()));
BuildRestoreContext(node->context_var());
EffectGraphVisitor for_catch(owner(), temp_index());
@ -3343,21 +3339,29 @@ void EffectGraphVisitor::VisitTryCatchNode(TryCatchNode* node) {
CatchClauseNode* catch_block = node->catch_block();
SequenceNode* finally_block = node->finally_block();
if (catch_block != NULL) {
// If there is a finally block, it is the handler for code in the catch
// block.
intptr_t catch_handler_index = (finally_block == NULL)
const intptr_t catch_handler_index = (finally_block == NULL)
? original_handler_index
: catch_block->catch_handler_index();
owner()->set_try_index(catch_handler_index);
EffectGraphVisitor for_catch(owner(), temp_index());
catch_block->Visit(&for_catch);
// NOTE: The implicit variables ':saved_context', ':exception_var'
// and ':stacktrace_var' can never be captured variables.
ASSERT(!catch_block->exception_var().is_captured());
ASSERT(!catch_block->stacktrace_var().is_captured());
CatchBlockEntryInstr* catch_entry =
new CatchBlockEntryInstr(owner()->AllocateBlockId(),
catch_handler_index,
catch_block->handler_types(),
try_handler_index);
try_handler_index,
catch_block->exception_var(),
catch_block->stacktrace_var());
owner()->AddCatchEntry(catch_entry);
ASSERT(!for_catch.is_open());
AppendFragment(catch_entry, for_catch);
@ -3374,9 +3378,6 @@ void EffectGraphVisitor::VisitTryCatchNode(TryCatchNode* node) {
// code in the finally block.
owner()->set_try_index(original_handler_index);
EffectGraphVisitor for_finally(owner(), temp_index());
for_finally.AddInstruction(
new CatchEntryInstr(catch_block->exception_var(),
catch_block->stacktrace_var()));
for_finally.BuildRestoreContext(catch_block->context_var());
finally_block->Visit(&for_finally);
@ -3399,11 +3400,13 @@ void EffectGraphVisitor::VisitTryCatchNode(TryCatchNode* node) {
new CatchBlockEntryInstr(owner()->AllocateBlockId(),
original_handler_index,
types,
catch_handler_index);
catch_handler_index,
catch_block->exception_var(),
catch_block->stacktrace_var());
owner()->AddCatchEntry(finally_entry);
AppendFragment(finally_entry, for_finally);
}
}
// Generate code for the finally block if one exists.
if ((finally_block != NULL) && is_open()) {
EffectGraphVisitor for_finally_block(owner(), temp_index());

View file

@ -790,10 +790,9 @@ void FlowGraphCompiler::EmitTrySync(Instruction* instr, intptr_t try_index) {
}
// Process locals. Skip exception_var and stacktrace_var.
CatchEntryInstr* catch_entry = catch_block->next()->AsCatchEntry();
intptr_t local_base = kFirstLocalSlotFromFp + num_non_copied_params;
intptr_t ex_idx = local_base - catch_entry->exception_var().index();
intptr_t st_idx = local_base - catch_entry->stacktrace_var().index();
intptr_t ex_idx = local_base - catch_block->exception_var().index();
intptr_t st_idx = local_base - catch_block->stacktrace_var().index();
for (; i < flow_graph().variable_count(); ++i) {
if (i == ex_idx || i == st_idx) continue;
if ((*idefs)[i]->IsConstant()) continue;

View file

@ -810,10 +810,9 @@ void FlowGraphCompiler::EmitTrySync(Instruction* instr, intptr_t try_index) {
}
// Process locals. Skip exception_var and stacktrace_var.
CatchEntryInstr* catch_entry = catch_block->next()->AsCatchEntry();
intptr_t local_base = kFirstLocalSlotFromFp + num_non_copied_params;
intptr_t ex_idx = local_base - catch_entry->exception_var().index();
intptr_t st_idx = local_base - catch_entry->stacktrace_var().index();
intptr_t ex_idx = local_base - catch_block->exception_var().index();
intptr_t st_idx = local_base - catch_block->stacktrace_var().index();
for (; i < flow_graph().variable_count(); ++i) {
if (i == ex_idx || i == st_idx) continue;
if ((*idefs)[i]->IsConstant()) continue;

View file

@ -817,10 +817,9 @@ void FlowGraphCompiler::EmitTrySync(Instruction* instr, intptr_t try_index) {
}
// Process locals. Skip exception_var and stacktrace_var.
CatchEntryInstr* catch_entry = catch_block->next()->AsCatchEntry();
intptr_t local_base = kFirstLocalSlotFromFp + num_non_copied_params;
intptr_t ex_idx = local_base - catch_entry->exception_var().index();
intptr_t st_idx = local_base - catch_entry->stacktrace_var().index();
intptr_t ex_idx = local_base - catch_block->exception_var().index();
intptr_t st_idx = local_base - catch_block->stacktrace_var().index();
for (; i < flow_graph().variable_count(); ++i) {
if (i == ex_idx || i == st_idx) continue;
if ((*idefs)[i]->IsConstant()) continue;

View file

@ -803,10 +803,9 @@ void FlowGraphCompiler::EmitTrySync(Instruction* instr, intptr_t try_index) {
}
// Process locals. Skip exception_var and stacktrace_var.
CatchEntryInstr* catch_entry = catch_block->next()->AsCatchEntry();
intptr_t local_base = kFirstLocalSlotFromFp + num_non_copied_params;
intptr_t ex_idx = local_base - catch_entry->exception_var().index();
intptr_t st_idx = local_base - catch_entry->stacktrace_var().index();
intptr_t ex_idx = local_base - catch_block->exception_var().index();
intptr_t st_idx = local_base - catch_block->stacktrace_var().index();
for (; i < flow_graph().variable_count(); ++i) {
if (i == ex_idx || i == st_idx) continue;
if ((*idefs)[i]->IsConstant()) continue;

View file

@ -3631,15 +3631,14 @@ void TryCatchAnalyzer::Optimize(FlowGraph* flow_graph) {
for (intptr_t catch_idx = 0;
catch_idx < catch_entries.length();
++catch_idx) {
CatchBlockEntryInstr* cb = catch_entries[catch_idx];
CatchEntryInstr* catch_entry = cb->next()->AsCatchEntry();
CatchBlockEntryInstr* catch_entry = catch_entries[catch_idx];
// Initialize cdefs with the original initial definitions (ParameterInstr).
// The following representation is used:
// ParameterInstr => unknown
// ConstantInstr => known constant
// NULL => non-constant
GrowableArray<Definition*>* idefs = cb->initial_definitions();
GrowableArray<Definition*>* idefs = catch_entry->initial_definitions();
GrowableArray<Definition*> cdefs(idefs->length());
cdefs.AddArray(*idefs);
@ -3652,7 +3651,7 @@ void TryCatchAnalyzer::Optimize(FlowGraph* flow_graph) {
!block_it.Done();
block_it.Advance()) {
BlockEntryInstr* block = block_it.Current();
if (block->try_index() == cb->catch_try_index()) {
if (block->try_index() == catch_entry->catch_try_index()) {
for (ForwardInstructionIterator instr_it(block);
!instr_it.Done();
instr_it.Advance()) {
@ -5726,9 +5725,6 @@ void ConstantPropagator::VisitBranch(BranchInstr* instr) {
void ConstantPropagator::VisitStoreContext(StoreContextInstr* instr) { }
void ConstantPropagator::VisitCatchEntry(CatchEntryInstr* instr) { }
void ConstantPropagator::VisitCheckStackOverflow(
CheckStackOverflowInstr* instr) { }

View file

@ -578,13 +578,6 @@ void AllocateContextInstr::PrintOperandsTo(BufferFormatter* f) const {
}
void CatchEntryInstr::PrintOperandsTo(BufferFormatter* f) const {
f->Print("%s, %s",
exception_var().name().ToCString(),
stacktrace_var().name().ToCString());
}
void BinarySmiOpInstr::PrintTo(BufferFormatter* f) const {
Definition::PrintTo(f);
f->Print(" %co", overflow_ ? '+' : '-');
@ -899,7 +892,7 @@ void CatchBlockEntryInstr::PrintTo(BufferFormatter* f) const {
f->Print("B%"Pd"[target catch try_idx %"Pd" catch_try_idx %"Pd"]",
block_id(), try_index(), catch_try_index());
if (HasParallelMove()) {
f->Print(" ");
f->Print("\n");
parallel_move()->PrintTo(f);
}

View file

@ -1601,24 +1601,6 @@ void TargetEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
}
LocationSummary* CatchBlockEntryInstr::MakeLocationSummary() const {
UNREACHABLE();
return NULL;
}
void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
__ Bind(compiler->GetJumpLabel(this));
compiler->AddExceptionHandler(catch_try_index(),
try_index(),
compiler->assembler()->CodeSize(),
catch_handler_types_);
if (HasParallelMove()) {
compiler->parallel_move_resolver()->EmitNativeCode(parallel_move());
}
}
LocationSummary* PhiInstr::MakeLocationSummary() const {
UNREACHABLE();
return NULL;

View file

@ -577,7 +577,6 @@ class EmbeddedArray<T, 0> {
M(ExtractConstructorInstantiator) \
M(AllocateContext) \
M(CloneContext) \
M(CatchEntry) \
M(BinarySmiOp) \
M(UnarySmiOp) \
M(CheckStackOverflow) \
@ -1489,11 +1488,15 @@ class CatchBlockEntryInstr : public BlockEntryInstr {
CatchBlockEntryInstr(intptr_t block_id,
intptr_t try_index,
const Array& handler_types,
intptr_t catch_try_index)
intptr_t catch_try_index,
const LocalVariable& exception_var,
const LocalVariable& stacktrace_var)
: BlockEntryInstr(block_id, try_index),
predecessor_(NULL),
catch_handler_types_(Array::ZoneHandle(handler_types.raw())),
catch_try_index_(catch_try_index) { }
catch_try_index_(catch_try_index),
exception_var_(exception_var),
stacktrace_var_(stacktrace_var) { }
DECLARE_INSTRUCTION(CatchBlockEntry)
@ -1505,6 +1508,9 @@ class CatchBlockEntryInstr : public BlockEntryInstr {
return predecessor_;
}
const LocalVariable& exception_var() const { return exception_var_; }
const LocalVariable& stacktrace_var() const { return stacktrace_var_; }
// Returns try index for the try block to which this catch handler
// corresponds.
intptr_t catch_try_index() const {
@ -1529,6 +1535,8 @@ class CatchBlockEntryInstr : public BlockEntryInstr {
const Array& catch_handler_types_;
const intptr_t catch_try_index_;
GrowableArray<Definition*> initial_definitions_;
const LocalVariable& exception_var_;
const LocalVariable& stacktrace_var_;
DISALLOW_COPY_AND_ASSIGN(CatchBlockEntryInstr);
};
@ -4295,35 +4303,6 @@ class CloneContextInstr : public TemplateDefinition<1> {
};
class CatchEntryInstr : public TemplateInstruction<0> {
public:
CatchEntryInstr(const LocalVariable& exception_var,
const LocalVariable& stacktrace_var)
: exception_var_(exception_var), stacktrace_var_(stacktrace_var) {}
const LocalVariable& exception_var() const { return exception_var_; }
const LocalVariable& stacktrace_var() const { return stacktrace_var_; }
DECLARE_INSTRUCTION(CatchEntry)
virtual intptr_t ArgumentCount() const { return 0; }
virtual void PrintOperandsTo(BufferFormatter* f) const;
virtual bool CanDeoptimize() const { return false; }
virtual EffectSet Effects() const { return EffectSet::All(); }
virtual bool MayThrow() const { return false; }
private:
const LocalVariable& exception_var_;
const LocalVariable& stacktrace_var_;
DISALLOW_COPY_AND_ASSIGN(CatchEntryInstr);
};
class CheckEitherNonSmiInstr : public TemplateInstruction<2> {
public:
CheckEitherNonSmiInstr(Value* left,

View file

@ -2043,16 +2043,26 @@ void CloneContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
}
LocationSummary* CatchEntryInstr::MakeLocationSummary() const {
return LocationSummary::Make(0,
Location::NoLocation(),
LocationSummary::kNoCall);
LocationSummary* CatchBlockEntryInstr::MakeLocationSummary() const {
UNREACHABLE();
return NULL;
}
// Restore stack and initialize the two exception variables:
// exception and stack trace variables.
void CatchEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
__ Bind(compiler->GetJumpLabel(this));
compiler->AddExceptionHandler(catch_try_index(),
try_index(),
compiler->assembler()->CodeSize(),
catch_handler_types_);
// Restore the pool pointer.
__ LoadPoolPointer();
if (HasParallelMove()) {
compiler->parallel_move_resolver()->EmitNativeCode(parallel_move());
}
// Restore SP from FP as we are coming from a throw and the code for
// popping arguments has not been run.
const intptr_t fp_sp_dist =
@ -2060,15 +2070,12 @@ void CatchEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
ASSERT(fp_sp_dist <= 0);
__ AddImmediate(SP, FP, fp_sp_dist);
ASSERT(!exception_var().is_captured());
ASSERT(!stacktrace_var().is_captured());
// Restore stack and initialize the two exception variables:
// exception and stack trace variables.
__ StoreToOffset(kWord, kExceptionObjectReg,
FP, exception_var().index() * kWordSize);
__ StoreToOffset(kWord, kStackTraceObjectReg,
FP, stacktrace_var().index() * kWordSize);
// Restore the pool pointer.
__ LoadPoolPointer();
}

View file

@ -2102,16 +2102,22 @@ void CloneContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
}
LocationSummary* CatchEntryInstr::MakeLocationSummary() const {
return LocationSummary::Make(0,
Location::NoLocation(),
LocationSummary::kNoCall);
LocationSummary* CatchBlockEntryInstr::MakeLocationSummary() const {
UNREACHABLE();
return NULL;
}
// Restore stack and initialize the two exception variables:
// exception and stack trace variables.
void CatchEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
__ Bind(compiler->GetJumpLabel(this));
compiler->AddExceptionHandler(catch_try_index(),
try_index(),
compiler->assembler()->CodeSize(),
catch_handler_types_);
if (HasParallelMove()) {
compiler->parallel_move_resolver()->EmitNativeCode(parallel_move());
}
// Restore ESP from EBP as we are coming from a throw and the code for
// popping arguments has not been run.
const intptr_t fp_sp_dist =
@ -2119,8 +2125,8 @@ void CatchEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
ASSERT(fp_sp_dist <= 0);
__ leal(ESP, Address(EBP, fp_sp_dist));
ASSERT(!exception_var().is_captured());
ASSERT(!stacktrace_var().is_captured());
// Restore stack and initialize the two exception variables:
// exception and stack trace variables.
__ movl(Address(EBP, exception_var().index() * kWordSize),
kExceptionObjectReg);
__ movl(Address(EBP, stacktrace_var().index() * kWordSize),

View file

@ -2126,16 +2126,29 @@ void CloneContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
}
LocationSummary* CatchEntryInstr::MakeLocationSummary() const {
return LocationSummary::Make(0,
Location::NoLocation(),
LocationSummary::kNoCall);
LocationSummary* CatchBlockEntryInstr::MakeLocationSummary() const {
UNREACHABLE();
return NULL;
}
// Restore stack and initialize the two exception variables:
// exception and stack trace variables.
void CatchEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
__ Bind(compiler->GetJumpLabel(this));
compiler->AddExceptionHandler(catch_try_index(),
try_index(),
compiler->assembler()->CodeSize(),
catch_handler_types_);
// Restore pool pointer.
__ GetNextPC(CMPRES, TMP);
const intptr_t object_pool_pc_dist =
Instructions::HeaderSize() - Instructions::object_pool_offset() +
compiler->assembler()->CodeSize() - 1 * Instr::kInstrSize;
__ LoadFromOffset(PP, CMPRES, -object_pool_pc_dist);
if (HasParallelMove()) {
compiler->parallel_move_resolver()->EmitNativeCode(parallel_move());
}
// Restore SP from FP as we are coming from a throw and the code for
// popping arguments has not been run.
const intptr_t fp_sp_dist =
@ -2143,22 +2156,12 @@ void CatchEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
ASSERT(fp_sp_dist <= 0);
__ AddImmediate(SP, FP, fp_sp_dist);
ASSERT(!exception_var().is_captured());
ASSERT(!stacktrace_var().is_captured());
// Restore stack and initialize the two exception variables:
// exception and stack trace variables.
__ sw(kExceptionObjectReg,
Address(FP, exception_var().index() * kWordSize));
__ sw(kStackTraceObjectReg,
Address(FP, stacktrace_var().index() * kWordSize));
__ GetNextPC(CMPRES, TMP);
// Calculate offset of pool pointer from the PC.
const intptr_t object_pool_pc_dist =
Instructions::HeaderSize() - Instructions::object_pool_offset() +
compiler->assembler()->CodeSize() - 1 * Instr::kInstrSize;
__ LoadFromOffset(PP, CMPRES, -object_pool_pc_dist);
}

View file

@ -2057,16 +2057,22 @@ void CloneContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
}
LocationSummary* CatchEntryInstr::MakeLocationSummary() const {
return LocationSummary::Make(0,
Location::NoLocation(),
LocationSummary::kNoCall);
LocationSummary* CatchBlockEntryInstr::MakeLocationSummary() const {
UNREACHABLE();
return NULL;
}
// Restore stack and initialize the two exception variables:
// exception and stack trace variables.
void CatchEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
__ Bind(compiler->GetJumpLabel(this));
compiler->AddExceptionHandler(catch_try_index(),
try_index(),
compiler->assembler()->CodeSize(),
catch_handler_types_);
if (HasParallelMove()) {
compiler->parallel_move_resolver()->EmitNativeCode(parallel_move());
}
// Restore RSP from RBP as we are coming from a throw and the code for
// popping arguments has not been run.
const intptr_t fp_sp_dist =
@ -2074,8 +2080,8 @@ void CatchEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
ASSERT(fp_sp_dist <= 0);
__ leaq(RSP, Address(RBP, fp_sp_dist));
ASSERT(!exception_var().is_captured());
ASSERT(!stacktrace_var().is_captured());
// Restore stack and initialize the two exception variables:
// exception and stack trace variables.
__ movq(Address(RBP, exception_var().index() * kWordSize),
kExceptionObjectReg);
__ movq(Address(RBP, stacktrace_var().index() * kWordSize),

View file

@ -301,13 +301,5 @@ prefix22_test: Pass
invocation_mirror_test: Fail, OK # hardcoded names.
super_call4_test: Fail, OK # hardcoded names.
[ $arch == simarm || $arch == arm ]
try_catch4_test: Crash, Fail
[ $arch == mips ]
*: Skip
[ $arch == simmips ]
try_catch4_test: Fail