[vm/nnbd] Use separate runtime entry for throwing _CastError in null check operator

Previously, NullError runtime entry was used both to throw
NoSuchMethodError and _CastError. _CastError was used if selector is
null. However, with --dwarf-stack-traces CodeSourceMap is not preserved
and selector is not available, so wrong exception type was thrown.

This is fixed by introducing separate NullCastError runtime entry along
with corresponding shared stubs.

NullErrorSlowPath is extended to support 3 exception types
(NoSuchMethodError, ArgumentError and CastError).
NullArgErrorSlowPath is removed as it duplicates NullErrorSlowPath.

Fixes vm/dart/null_checks_with_dwarf_stack_traces_test
Fixes https://github.com/dart-lang/sdk/issues/42149

Change-Id: I4f2b34ad2b161459491de60d8737bb4f87117336
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/153040
Reviewed-by: Ryan Macnak <rmacnak@google.com>
Commit-Queue: Alexander Markov <alexmarkov@google.com>
This commit is contained in:
Alexander Markov 2020-07-01 20:52:34 +00:00 committed by commit-bot@chromium.org
parent 51ac86c753
commit 06a8cc8dea
22 changed files with 1112 additions and 970 deletions

View file

@ -2576,6 +2576,64 @@ void ThrowErrorSlowPathCode::EmitNativeCode(FlowGraphCompiler* compiler) {
}
}
const char* NullErrorSlowPath::name() {
switch (exception_type()) {
case CheckNullInstr::kNoSuchMethod:
return "check null (nsm)";
case CheckNullInstr::kArgumentError:
return "check null (arg)";
case CheckNullInstr::kCastError:
return "check null (cast)";
}
UNREACHABLE();
}
const RuntimeEntry& NullErrorSlowPath::GetRuntimeEntry(
CheckNullInstr::ExceptionType exception_type) {
switch (exception_type) {
case CheckNullInstr::kNoSuchMethod:
return kNullErrorRuntimeEntry;
case CheckNullInstr::kArgumentError:
return kArgumentNullErrorRuntimeEntry;
case CheckNullInstr::kCastError:
return kNullCastErrorRuntimeEntry;
}
UNREACHABLE();
}
CodePtr NullErrorSlowPath::GetStub(FlowGraphCompiler* compiler,
CheckNullInstr::ExceptionType exception_type,
bool save_fpu_registers) {
auto object_store = compiler->isolate()->object_store();
switch (exception_type) {
case CheckNullInstr::kNoSuchMethod:
return save_fpu_registers
? object_store->null_error_stub_with_fpu_regs_stub()
: object_store->null_error_stub_without_fpu_regs_stub();
case CheckNullInstr::kArgumentError:
return save_fpu_registers
? object_store->null_arg_error_stub_with_fpu_regs_stub()
: object_store->null_arg_error_stub_without_fpu_regs_stub();
case CheckNullInstr::kCastError:
return save_fpu_registers
? object_store->null_cast_error_stub_with_fpu_regs_stub()
: object_store->null_cast_error_stub_without_fpu_regs_stub();
}
UNREACHABLE();
}
void NullErrorSlowPath::EmitSharedStubCall(FlowGraphCompiler* compiler,
bool save_fpu_registers) {
#if defined(TARGET_ARCH_IA32)
UNREACHABLE();
#else
const auto& stub =
Code::ZoneHandle(compiler->zone(),
GetStub(compiler, exception_type(), save_fpu_registers));
compiler->EmitCallToStub(stub);
#endif
}
void FlowGraphCompiler::EmitNativeMove(
const compiler::ffi::NativeLocation& destination,
const compiler::ffi::NativeLocation& source,

View file

@ -316,11 +316,15 @@ class NullErrorSlowPath : public ThrowErrorSlowPathCode {
NullErrorSlowPath(CheckNullInstr* instruction, intptr_t try_index)
: ThrowErrorSlowPathCode(instruction,
kNullErrorRuntimeEntry,
GetRuntimeEntry(instruction->exception_type()),
kNumberOfArguments,
try_index) {}
const char* name() override { return "check null (nsm)"; }
CheckNullInstr::ExceptionType exception_type() const {
return instruction()->AsCheckNull()->exception_type();
}
const char* name() override;
void EmitSharedStubCall(FlowGraphCompiler* compiler,
bool save_fpu_registers) override;
@ -329,27 +333,14 @@ class NullErrorSlowPath : public ThrowErrorSlowPathCode {
CheckNullInstr::AddMetadataForRuntimeCall(instruction()->AsCheckNull(),
compiler);
}
};
class NullArgErrorSlowPath : public ThrowErrorSlowPathCode {
public:
static const intptr_t kNumberOfArguments = 0;
static CodePtr GetStub(FlowGraphCompiler* compiler,
CheckNullInstr::ExceptionType exception_type,
bool save_fpu_registers);
NullArgErrorSlowPath(CheckNullInstr* instruction, intptr_t try_index)
: ThrowErrorSlowPathCode(instruction,
kArgumentNullErrorRuntimeEntry,
kNumberOfArguments,
try_index) {}
const char* name() override { return "check null (arg)"; }
void EmitSharedStubCall(FlowGraphCompiler* compiler,
bool save_fpu_registers) override;
void AddMetadataForRuntimeCall(FlowGraphCompiler* compiler) override {
CheckNullInstr::AddMetadataForRuntimeCall(instruction()->AsCheckNull(),
compiler);
}
private:
static const RuntimeEntry& GetRuntimeEntry(
CheckNullInstr::ExceptionType exception_type);
};
class RangeErrorSlowPath : public ThrowErrorSlowPathCode {
@ -960,7 +951,6 @@ class FlowGraphCompiler : public ValueObject {
friend class BoxInt64Instr; // For AddPcRelativeCallStubTarget().
friend class CheckNullInstr; // For AddPcRelativeCallStubTarget().
friend class NullErrorSlowPath; // For AddPcRelativeCallStubTarget().
friend class NullArgErrorSlowPath; // For AddPcRelativeCallStubTarget().
friend class CheckStackOverflowInstr; // For AddPcRelativeCallStubTarget().
friend class StoreIndexedInstr; // For AddPcRelativeCallStubTarget().
friend class StoreInstanceFieldInstr; // For AddPcRelativeCallStubTarget().

View file

@ -5195,36 +5195,6 @@ void CheckNullInstr::AddMetadataForRuntimeCall(CheckNullInstr* check_null,
compiler->AddNullCheck(check_null->token_pos(), check_null->function_name());
}
void NullErrorSlowPath::EmitSharedStubCall(FlowGraphCompiler* compiler,
bool save_fpu_registers) {
#if defined(TARGET_ARCH_IA32)
UNREACHABLE();
#else
auto object_store = compiler->isolate()->object_store();
const auto& stub = Code::ZoneHandle(
compiler->zone(),
save_fpu_registers
? object_store->null_error_stub_with_fpu_regs_stub()
: object_store->null_error_stub_without_fpu_regs_stub());
compiler->EmitCallToStub(stub);
#endif
}
void NullArgErrorSlowPath::EmitSharedStubCall(FlowGraphCompiler* compiler,
bool save_fpu_registers) {
#if defined(TARGET_ARCH_IA32)
UNREACHABLE();
#else
auto object_store = compiler->isolate()->object_store();
const auto& stub = Code::ZoneHandle(
compiler->zone(),
save_fpu_registers
? object_store->null_arg_error_stub_with_fpu_regs_stub()
: object_store->null_arg_error_stub_without_fpu_regs_stub());
compiler->EmitCallToStub(stub);
#endif
}
void RangeErrorSlowPath::EmitSharedStubCall(FlowGraphCompiler* compiler,
bool save_fpu_registers) {
#if defined(TARGET_ARCH_IA32)

View file

@ -8538,6 +8538,7 @@ class CheckNullInstr : public TemplateDefinition<1, Throws, Pure> {
enum ExceptionType {
kNoSuchMethod,
kArgumentError,
kCastError,
};
CheckNullInstr(Value* value,
@ -8556,7 +8557,6 @@ class CheckNullInstr : public TemplateDefinition<1, Throws, Pure> {
Value* value() const { return inputs_[0]; }
virtual TokenPosition token_pos() const { return token_pos_; }
const String& function_name() const { return function_name_; }
bool IsArgumentCheck() const { return exception_type_ == kArgumentError; }
ExceptionType exception_type() const { return exception_type_; }
virtual bool UseSharedSlowPathStub(bool is_optimizing) const {

View file

@ -6560,17 +6560,10 @@ void CheckNullInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
// in order to be able to allocate it on register.
__ CompareObject(value_reg, Object::null_object());
auto object_store = compiler->isolate()->object_store();
const bool live_fpu_regs = locs()->live_registers()->FpuRegisterCount() > 0;
const Code& stub = Code::ZoneHandle(
Code& stub = Code::ZoneHandle(
compiler->zone(),
IsArgumentCheck()
? (live_fpu_regs
? object_store->null_arg_error_stub_with_fpu_regs_stub()
: object_store->null_arg_error_stub_without_fpu_regs_stub())
: (live_fpu_regs
? object_store->null_error_stub_with_fpu_regs_stub()
: object_store->null_error_stub_without_fpu_regs_stub()));
NullErrorSlowPath::GetStub(compiler, exception_type(), live_fpu_regs));
const bool using_shared_stub = locs()->call_on_shared_slow_path();
if (FLAG_precompiled_mode && FLAG_use_bare_instructions &&
@ -6589,12 +6582,8 @@ void CheckNullInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
return;
}
ThrowErrorSlowPathCode* slow_path = nullptr;
if (IsArgumentCheck()) {
slow_path = new NullArgErrorSlowPath(this, compiler->CurrentTryIndex());
} else {
slow_path = new NullErrorSlowPath(this, compiler->CurrentTryIndex());
}
ThrowErrorSlowPathCode* slow_path =
new NullErrorSlowPath(this, compiler->CurrentTryIndex());
compiler->AddSlowPathCode(slow_path);
__ BranchIf(EQUAL, slow_path->entry_label());

View file

@ -5552,12 +5552,8 @@ void CheckSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
}
void CheckNullInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
ThrowErrorSlowPathCode* slow_path = nullptr;
if (IsArgumentCheck()) {
slow_path = new NullArgErrorSlowPath(this, compiler->CurrentTryIndex());
} else {
slow_path = new NullErrorSlowPath(this, compiler->CurrentTryIndex());
}
ThrowErrorSlowPathCode* slow_path =
new NullErrorSlowPath(this, compiler->CurrentTryIndex());
compiler->AddSlowPathCode(slow_path);
Register value_reg = locs()->in(0).reg();

View file

@ -5506,12 +5506,8 @@ void CheckSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
}
void CheckNullInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
ThrowErrorSlowPathCode* slow_path = nullptr;
if (IsArgumentCheck()) {
slow_path = new NullArgErrorSlowPath(this, compiler->CurrentTryIndex());
} else {
slow_path = new NullErrorSlowPath(this, compiler->CurrentTryIndex());
}
ThrowErrorSlowPathCode* slow_path =
new NullErrorSlowPath(this, compiler->CurrentTryIndex());
compiler->AddSlowPathCode(slow_path);
Register value_reg = locs()->in(0).reg();

View file

@ -307,7 +307,17 @@ void Definition::PrintTo(BufferFormatter* f) const {
void CheckNullInstr::PrintOperandsTo(BufferFormatter* f) const {
Definition::PrintOperandsTo(f);
f->Print(IsArgumentCheck() ? ", ArgumentError" : ", NoSuchMethodError");
switch (exception_type()) {
case kNoSuchMethod:
f->Print(", NoSuchMethodError");
break;
case kArgumentError:
f->Print(", ArgumentError");
break;
case kCastError:
f->Print(", CastError");
break;
}
}
void Definition::PrintOperandsTo(BufferFormatter* f) const {

View file

@ -5771,12 +5771,8 @@ void CheckSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
}
void CheckNullInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
ThrowErrorSlowPathCode* slow_path = nullptr;
if (IsArgumentCheck()) {
slow_path = new NullArgErrorSlowPath(this, compiler->CurrentTryIndex());
} else {
slow_path = new NullErrorSlowPath(this, compiler->CurrentTryIndex());
}
ThrowErrorSlowPathCode* slow_path =
new NullErrorSlowPath(this, compiler->CurrentTryIndex());
compiler->AddSlowPathCode(slow_path);
Register value_reg = locs()->in(0).reg();

View file

@ -1011,8 +1011,10 @@ Fragment BaseFlowGraphBuilder::CheckNull(TokenPosition position,
bool clear_the_temp /* = true */) {
Fragment instructions = LoadLocal(receiver);
CheckNullInstr* check_null =
new (Z) CheckNullInstr(Pop(), function_name, GetNextDeoptId(), position);
CheckNullInstr* check_null = new (Z)
CheckNullInstr(Pop(), function_name, GetNextDeoptId(), position,
function_name.IsNull() ? CheckNullInstr::kCastError
: CheckNullInstr::kNoSuchMethod);
// Does not use the redefinition, no `Push(check_null)`.
instructions <<= check_null;

View file

@ -1028,6 +1028,8 @@ class Thread : public AllStatic {
static word null_error_shared_with_fpu_regs_stub_offset();
static word null_arg_error_shared_without_fpu_regs_stub_offset();
static word null_arg_error_shared_with_fpu_regs_stub_offset();
static word null_cast_error_shared_without_fpu_regs_stub_offset();
static word null_cast_error_shared_with_fpu_regs_stub_offset();
static word range_error_shared_without_fpu_regs_stub_offset();
static word range_error_shared_with_fpu_regs_stub_offset();
static word stack_overflow_shared_without_fpu_regs_entry_point_offset();

File diff suppressed because it is too large Load diff

View file

@ -204,12 +204,14 @@
FIELD(Thread, switchable_call_miss_stub_offset) \
FIELD(Thread, no_scope_native_wrapper_entry_point_offset) \
FIELD(Thread, null_error_shared_with_fpu_regs_stub_offset) \
FIELD(Thread, null_arg_error_shared_with_fpu_regs_stub_offset) \
FIELD(Thread, range_error_shared_with_fpu_regs_stub_offset) \
\
FIELD(Thread, null_error_shared_without_fpu_regs_stub_offset) \
FIELD(Thread, null_arg_error_shared_with_fpu_regs_stub_offset) \
FIELD(Thread, null_arg_error_shared_without_fpu_regs_stub_offset) \
FIELD(Thread, null_cast_error_shared_with_fpu_regs_stub_offset) \
FIELD(Thread, null_cast_error_shared_without_fpu_regs_stub_offset) \
FIELD(Thread, range_error_shared_with_fpu_regs_stub_offset) \
FIELD(Thread, range_error_shared_without_fpu_regs_stub_offset) \
\
FIELD(Thread, object_null_offset) \
FIELD(Thread, predefined_symbols_address_offset) \
FIELD(Thread, resume_pc_offset) \

View file

@ -523,6 +523,22 @@ void StubCodeCompiler::GenerateNullArgErrorSharedWithFPURegsStub(
/*allow_return=*/false);
}
void StubCodeCompiler::GenerateNullCastErrorSharedWithoutFPURegsStub(
Assembler* assembler) {
GenerateSharedStub(
assembler, /*save_fpu_registers=*/false, &kNullCastErrorRuntimeEntry,
target::Thread::null_cast_error_shared_without_fpu_regs_stub_offset(),
/*allow_return=*/false);
}
void StubCodeCompiler::GenerateNullCastErrorSharedWithFPURegsStub(
Assembler* assembler) {
GenerateSharedStub(
assembler, /*save_fpu_registers=*/true, &kNullCastErrorRuntimeEntry,
target::Thread::null_cast_error_shared_with_fpu_regs_stub_offset(),
/*allow_return=*/false);
}
static void GenerateRangeError(Assembler* assembler, bool with_fpu_regs) {
auto perform_runtime_call = [&]() {
ASSERT(!GenericCheckBoundInstr::UseUnboxedRepresentation());

View file

@ -580,6 +580,22 @@ void StubCodeCompiler::GenerateNullArgErrorSharedWithFPURegsStub(
/*allow_return=*/false);
}
void StubCodeCompiler::GenerateNullCastErrorSharedWithoutFPURegsStub(
Assembler* assembler) {
GenerateSharedStub(
assembler, /*save_fpu_registers=*/false, &kNullCastErrorRuntimeEntry,
target::Thread::null_cast_error_shared_without_fpu_regs_stub_offset(),
/*allow_return=*/false);
}
void StubCodeCompiler::GenerateNullCastErrorSharedWithFPURegsStub(
Assembler* assembler) {
GenerateSharedStub(
assembler, /*save_fpu_registers=*/true, &kNullCastErrorRuntimeEntry,
target::Thread::null_cast_error_shared_with_fpu_regs_stub_offset(),
/*allow_return=*/false);
}
static void GenerateRangeError(Assembler* assembler, bool with_fpu_regs) {
auto perform_runtime_call = [&]() {
// If the generated code has unboxed index/length we need to box them before

View file

@ -323,6 +323,17 @@ void StubCodeCompiler::GenerateNullArgErrorSharedWithFPURegsStub(
Assembler* assembler) {
__ Breakpoint();
}
void StubCodeCompiler::GenerateNullCastErrorSharedWithoutFPURegsStub(
Assembler* assembler) {
__ Breakpoint();
}
void StubCodeCompiler::GenerateNullCastErrorSharedWithFPURegsStub(
Assembler* assembler) {
__ Breakpoint();
}
void StubCodeCompiler::GenerateRangeErrorSharedWithoutFPURegsStub(
Assembler* assembler) {
__ Breakpoint();

View file

@ -516,6 +516,22 @@ void StubCodeCompiler::GenerateNullArgErrorSharedWithFPURegsStub(
/*allow_return=*/false);
}
void StubCodeCompiler::GenerateNullCastErrorSharedWithoutFPURegsStub(
Assembler* assembler) {
GenerateSharedStub(
assembler, /*save_fpu_registers=*/false, &kNullCastErrorRuntimeEntry,
target::Thread::null_cast_error_shared_without_fpu_regs_stub_offset(),
/*allow_return=*/false);
}
void StubCodeCompiler::GenerateNullCastErrorSharedWithFPURegsStub(
Assembler* assembler) {
GenerateSharedStub(
assembler, /*save_fpu_registers=*/true, &kNullCastErrorRuntimeEntry,
target::Thread::null_cast_error_shared_with_fpu_regs_stub_offset(),
/*allow_return=*/false);
}
static void GenerateRangeError(Assembler* assembler, bool with_fpu_regs) {
auto perform_runtime_call = [&]() {
// If the generated code has unboxed index/length we need to box them before

View file

@ -180,6 +180,8 @@ class ObjectPointerVisitor;
RW(Code, null_error_stub_without_fpu_regs_stub) \
RW(Code, null_arg_error_stub_with_fpu_regs_stub) \
RW(Code, null_arg_error_stub_without_fpu_regs_stub) \
RW(Code, null_cast_error_stub_with_fpu_regs_stub) \
RW(Code, null_cast_error_stub_without_fpu_regs_stub) \
RW(Code, range_error_stub_with_fpu_regs_stub) \
RW(Code, range_error_stub_without_fpu_regs_stub) \
RW(Code, allocate_mint_with_fpu_regs_stub) \
@ -224,6 +226,9 @@ class ObjectPointerVisitor;
DO(null_arg_error_stub_with_fpu_regs_stub, NullArgErrorSharedWithFPURegs) \
DO(null_arg_error_stub_without_fpu_regs_stub, \
NullArgErrorSharedWithoutFPURegs) \
DO(null_cast_error_stub_with_fpu_regs_stub, NullCastErrorSharedWithFPURegs) \
DO(null_cast_error_stub_without_fpu_regs_stub, \
NullCastErrorSharedWithoutFPURegs) \
DO(range_error_stub_with_fpu_regs_stub, RangeErrorSharedWithFPURegs) \
DO(range_error_stub_without_fpu_regs_stub, RangeErrorSharedWithoutFPURegs) \
DO(allocate_mint_with_fpu_regs_stub, AllocateMintSharedWithFPURegs) \

View file

@ -227,6 +227,10 @@ DEFINE_RUNTIME_ENTRY(NullErrorWithSelector, 1) {
NullErrorHelper(zone, selector);
}
DEFINE_RUNTIME_ENTRY(NullCastError, 0) {
NullErrorHelper(zone, String::null_string());
}
DEFINE_RUNTIME_ENTRY(ArgumentNullError, 0) {
const String& error = String::Handle(String::New("argument value is null"));
Exceptions::ThrowArgumentError(error);

View file

@ -40,6 +40,7 @@ namespace dart {
V(RangeError) \
V(NullError) \
V(NullErrorWithSelector) \
V(NullCastError) \
V(ArgumentNullError) \
V(ArgumentError) \
V(ArgumentErrorUnboxedInt64) \

View file

@ -81,6 +81,8 @@ namespace dart {
V(NullErrorSharedWithoutFPURegs) \
V(NullArgErrorSharedWithFPURegs) \
V(NullArgErrorSharedWithoutFPURegs) \
V(NullCastErrorSharedWithFPURegs) \
V(NullCastErrorSharedWithoutFPURegs) \
V(RangeErrorSharedWithFPURegs) \
V(RangeErrorSharedWithoutFPURegs) \
V(StackOverflowSharedWithFPURegs) \

View file

@ -109,6 +109,10 @@ class Thread;
StubCode::NullArgErrorSharedWithoutFPURegs().raw(), nullptr) \
V(CodePtr, null_arg_error_shared_with_fpu_regs_stub_, \
StubCode::NullArgErrorSharedWithFPURegs().raw(), nullptr) \
V(CodePtr, null_cast_error_shared_without_fpu_regs_stub_, \
StubCode::NullCastErrorSharedWithoutFPURegs().raw(), nullptr) \
V(CodePtr, null_cast_error_shared_with_fpu_regs_stub_, \
StubCode::NullCastErrorSharedWithFPURegs().raw(), nullptr) \
V(CodePtr, range_error_shared_without_fpu_regs_stub_, \
StubCode::RangeErrorSharedWithoutFPURegs().raw(), nullptr) \
V(CodePtr, range_error_shared_with_fpu_regs_stub_, \