Remove the debugger_ field from Isolate in a PRODUCT build.

This is the first in a series of CL in which I am removing fields from
classes that aren't used in PRODUCT mode. This CL removes the
debugger_ field from Isolate.

R=rmacnak@google.com

Review-Url: https://codereview.chromium.org/2981173002 .
This commit is contained in:
Zachary Anderson 2017-07-18 13:29:38 -07:00
parent 84f079f24c
commit 952345ff84
22 changed files with 345 additions and 321 deletions

View file

@ -12,13 +12,13 @@
namespace dart {
DEFINE_NATIVE_ENTRY(AsyncStarMoveNext_debuggerStepCheck, 1) {
#if !defined(PRODUCT)
GET_NON_NULL_NATIVE_ARGUMENT(Closure, async_op, arguments->NativeArgAt(0));
if (FLAG_support_debugger) {
Debugger* debugger = isolate->debugger();
if (debugger != NULL) {
debugger->MaybeAsyncStepInto(async_op);
}
Debugger* debugger = isolate->debugger();
if (debugger != NULL) {
debugger->MaybeAsyncStepInto(async_op);
}
#endif
return Object::null();
}

View file

@ -21,14 +21,16 @@ namespace dart {
// Native implementations for the dart:developer library.
DEFINE_NATIVE_ENTRY(Developer_debugger, 2) {
GET_NON_NULL_NATIVE_ARGUMENT(Bool, when, arguments->NativeArgAt(0));
#if !defined(PRODUCT)
GET_NATIVE_ARGUMENT(String, msg, arguments->NativeArgAt(1));
Debugger* debugger = isolate->debugger();
if (!FLAG_support_debugger || !debugger) {
if (!debugger) {
return when.raw();
}
if (when.value()) {
debugger->PauseDeveloper(msg);
}
#endif
return when.raw();
}

View file

@ -110,14 +110,13 @@ DEFINE_NATIVE_ENTRY(StackTrace_asyncStackTraceHelper, 1) {
if (!FLAG_causal_async_stacks) {
return Object::null();
}
#if !defined(PRODUCT)
GET_NATIVE_ARGUMENT(Closure, async_op, arguments->NativeArgAt(0));
if (FLAG_support_debugger) {
Debugger* debugger = isolate->debugger();
if (debugger != NULL) {
debugger->MaybeAsyncStepInto(async_op);
}
Debugger* debugger = isolate->debugger();
if (debugger != NULL) {
debugger->MaybeAsyncStepInto(async_op);
}
#endif
return CurrentStackTrace(thread, true);
}

View file

@ -232,16 +232,16 @@ DEFINE_RUNTIME_ENTRY(CompileFunction, 1) {
}
bool Compiler::CanOptimizeFunction(Thread* thread, const Function& function) {
if (FLAG_support_debugger) {
Isolate* isolate = thread->isolate();
if (isolate->debugger()->IsStepping() ||
isolate->debugger()->HasBreakpoint(function, thread->zone())) {
// We cannot set breakpoints and single step in optimized code,
// so do not optimize the function.
function.set_usage_counter(0);
return false;
}
#if !defined(PRODUCT)
Isolate* isolate = thread->isolate();
if (isolate->debugger()->IsStepping() ||
isolate->debugger()->HasBreakpoint(function, thread->zone())) {
// We cannot set breakpoints and single step in optimized code,
// so do not optimize the function.
function.set_usage_counter(0);
return false;
}
#endif
if (function.deoptimization_counter() >=
FLAG_max_deoptimization_counter_threshold) {
if (FLAG_trace_failed_optimization_attempts ||
@ -554,29 +554,26 @@ RawCode* CompileParsedFunctionHelper::FinalizeCompilation(
code.set_is_optimized(optimized());
code.set_owner(function);
#if !defined(PRODUCT)
if (FLAG_support_debugger) {
ZoneGrowableArray<TokenPosition>* await_token_positions =
flow_graph->await_token_positions();
if (await_token_positions != NULL) {
Smi& token_pos_value = Smi::Handle(zone);
if (await_token_positions->length() > 0) {
const Array& await_to_token_map = Array::Handle(
zone, Array::New(await_token_positions->length(), Heap::kOld));
ASSERT(!await_to_token_map.IsNull());
for (intptr_t i = 0; i < await_token_positions->length(); i++) {
TokenPosition token_pos =
await_token_positions->At(i).FromSynthetic();
if (!token_pos.IsReal()) {
// Some async machinary uses sentinel values. Map them to
// no source position.
token_pos_value = Smi::New(TokenPosition::kNoSourcePos);
} else {
token_pos_value = Smi::New(token_pos.value());
}
await_to_token_map.SetAt(i, token_pos_value);
ZoneGrowableArray<TokenPosition>* await_token_positions =
flow_graph->await_token_positions();
if (await_token_positions != NULL) {
Smi& token_pos_value = Smi::Handle(zone);
if (await_token_positions->length() > 0) {
const Array& await_to_token_map = Array::Handle(
zone, Array::New(await_token_positions->length(), Heap::kOld));
ASSERT(!await_to_token_map.IsNull());
for (intptr_t i = 0; i < await_token_positions->length(); i++) {
TokenPosition token_pos = await_token_positions->At(i).FromSynthetic();
if (!token_pos.IsReal()) {
// Some async machinary uses sentinel values. Map them to
// no source position.
token_pos_value = Smi::New(TokenPosition::kNoSourcePos);
} else {
token_pos_value = Smi::New(token_pos.value());
}
code.set_await_token_positions(await_to_token_map);
await_to_token_map.SetAt(i, token_pos_value);
}
code.set_await_token_positions(await_to_token_map);
}
}
#endif // !defined(PRODUCT)
@ -1350,9 +1347,9 @@ static RawObject* CompileFunctionHelper(CompilationPipeline* pipeline,
per_compile_timer.TotalElapsedTime());
}
if (FLAG_support_debugger) {
isolate->debugger()->NotifyCompilation(function);
}
#if !defined(PRODUCT)
isolate->debugger()->NotifyCompilation(function);
#endif
if (FLAG_disassemble && FlowGraphPrinter::ShouldPrint(function)) {
Disassembler::DisassembleCode(function, result, optimized);

View file

@ -617,9 +617,10 @@ RawError* Dart::InitializeIsolate(const uint8_t* snapshot_data,
}
ServiceIsolate::SendIsolateStartupMessage();
if (FLAG_support_debugger) {
I->debugger()->NotifyIsolateCreated();
}
#if !defined(PRODUCT)
I->debugger()->NotifyIsolateCreated();
#endif
// Create tag table.
I->set_tag_table(GrowableObjectArray::Handle(GrowableObjectArray::New()));
// Set up default UserTag.
@ -646,10 +647,6 @@ const char* Dart::FeaturesString(Isolate* isolate, Snapshot::Kind kind) {
#endif
if (Snapshot::IncludesCode(kind)) {
if (FLAG_support_debugger) {
buffer.AddString(" support-debugger");
}
// Checked mode affects deopt ids.
#define ADD_FLAG(name, isolate_flag, flag) \
do { \

View file

@ -5633,14 +5633,14 @@ DART_EXPORT Dart_Handle Dart_FinalizeLoading(bool complete_futures) {
I->DoneFinalizing();
#if !defined(PRODUCT)
// Now that the newly loaded classes are finalized, notify the debugger
// that new code has been loaded. If there are latent breakpoints in
// the new code, the debugger convert them to unresolved source breakpoints.
// The code that completes the futures (invoked below) may call into the
// newly loaded code and trigger one of these breakpoints.
if (FLAG_support_debugger) {
I->debugger()->NotifyDoneLoading();
}
I->debugger()->NotifyDoneLoading();
#endif
#if !defined(DART_PRECOMPILED_RUNTIME)
if (FLAG_enable_mirrors) {

View file

@ -561,12 +561,14 @@ RawObject* DartLibraryCalls::HandleMessage(const Object& handler,
const Array& args = Array::Handle(zone, Array::New(kNumArguments));
args.SetAt(0, handler);
args.SetAt(1, message);
if (FLAG_support_debugger && isolate->debugger()->IsStepping()) {
#if !defined(PRODUCT)
if (isolate->debugger()->IsStepping()) {
// If the isolate is being debugged and the debugger was stepping
// through code, enable single stepping so debugger will stop
// at the first location the user is interested in.
isolate->debugger()->SetResumeAction(Debugger::kStepInto);
}
#endif
const Object& result =
Object::Handle(zone, DartEntry::InvokeFunction(function, args));
ASSERT(result.IsNull() || result.IsError());

View file

@ -763,13 +763,13 @@ void Exceptions::Throw(Thread* thread, const Instance& exception) {
// Do not notify debugger on stack overflow and out of memory exceptions.
// The VM would crash when the debugger calls back into the VM to
// get values of variables.
if (FLAG_support_debugger) {
Isolate* isolate = thread->isolate();
if (exception.raw() != isolate->object_store()->out_of_memory() &&
exception.raw() != isolate->object_store()->stack_overflow()) {
isolate->debugger()->PauseException(exception);
}
#if !defined(PRODUCT)
Isolate* isolate = thread->isolate();
if (exception.raw() != isolate->object_store()->out_of_memory() &&
exception.raw() != isolate->object_store()->stack_overflow()) {
isolate->debugger()->PauseException(exception);
}
#endif
// Null object is a valid exception object.
ThrowExceptionHelper(thread, exception, StackTrace::Handle(thread->zone()),
false);

View file

@ -147,7 +147,6 @@
"Debugger support async functions.") \
R(support_ast_printer, false, bool, true, "Support the AST printer.") \
R(support_compiler_stats, false, bool, true, "Support compiler stats.") \
C(support_debugger, false, false, bool, true, "Support the debugger.") \
R(support_disassembler, false, bool, true, "Support the disassembler.") \
R(support_il_printer, false, bool, true, "Support the IL printer.") \
C(support_reload, false, false, bool, true, "Support isolate reload.") \

View file

@ -1055,12 +1055,13 @@ void EffectGraphVisitor::VisitReturnNode(ReturnNode* node) {
// No debugger check is done in native functions or for return
// statements for which there is no associated source position.
const Function& function = owner()->function();
if (FLAG_support_debugger && node->token_pos().IsDebugPause() &&
!function.is_native()) {
#if !defined(PRODUCT)
if (node->token_pos().IsDebugPause() && !function.is_native()) {
AddInstruction(new (Z) DebugStepCheckInstr(node->token_pos(),
RawPcDescriptors::kRuntimeCall,
owner()->GetNextDeoptId()));
}
#endif
NestedContextAdjustment context_adjustment(owner(), owner()->context_level());
@ -2037,11 +2038,13 @@ void EffectGraphVisitor::VisitForNode(ForNode* node) {
}
void EffectGraphVisitor::VisitJumpNode(JumpNode* node) {
if (FLAG_support_debugger && owner()->function().is_debuggable()) {
#if !defined(PRODUCT)
if (owner()->function().is_debuggable()) {
AddInstruction(new (Z) DebugStepCheckInstr(node->token_pos(),
RawPcDescriptors::kRuntimeCall,
owner()->GetNextDeoptId()));
}
#endif
NestedContextAdjustment context_adjustment(owner(), owner()->context_level());
@ -3327,25 +3330,25 @@ void ValueGraphVisitor::VisitLoadLocalNode(LoadLocalNode* node) {
// <Expression> ::= StoreLocal { local: LocalVariable
// value: <Expression> }
void EffectGraphVisitor::VisitStoreLocalNode(StoreLocalNode* node) {
#if !defined(PRODUCT)
// If the right hand side is an expression that does not contain
// a safe point for the debugger to stop, add an explicit stub
// call. Exception: don't do this when assigning to or from internal
// variables, or for generated code that has no source position.
if (FLAG_support_debugger) {
AstNode* rhs = node->value();
if (rhs->IsAssignableNode()) {
rhs = rhs->AsAssignableNode()->expr();
}
if ((rhs->IsLiteralNode() || rhs->IsLoadStaticFieldNode() ||
(rhs->IsLoadLocalNode() &&
!rhs->AsLoadLocalNode()->local().IsInternal()) ||
rhs->IsClosureNode()) &&
!node->local().IsInternal() && node->token_pos().IsDebugPause()) {
AddInstruction(new (Z) DebugStepCheckInstr(node->token_pos(),
RawPcDescriptors::kRuntimeCall,
owner()->GetNextDeoptId()));
}
AstNode* rhs = node->value();
if (rhs->IsAssignableNode()) {
rhs = rhs->AsAssignableNode()->expr();
}
if ((rhs->IsLiteralNode() || rhs->IsLoadStaticFieldNode() ||
(rhs->IsLoadLocalNode() &&
!rhs->AsLoadLocalNode()->local().IsInternal()) ||
rhs->IsClosureNode()) &&
!node->local().IsInternal() && node->token_pos().IsDebugPause()) {
AddInstruction(new (Z) DebugStepCheckInstr(node->token_pos(),
RawPcDescriptors::kRuntimeCall,
owner()->GetNextDeoptId()));
}
#endif
ValueGraphVisitor for_value(owner());
node->value()->Visit(&for_value);
@ -3430,22 +3433,22 @@ Definition* EffectGraphVisitor::BuildStoreStaticField(
StoreStaticFieldNode* node,
bool result_is_needed,
TokenPosition token_pos) {
if (FLAG_support_debugger) {
// If the right hand side is an expression that does not contain
// a safe point for the debugger to stop, add an explicit stub
// call.
AstNode* rhs = node->value();
if (rhs->IsAssignableNode()) {
rhs = rhs->AsAssignableNode()->expr();
}
if ((rhs->IsLiteralNode() || rhs->IsLoadLocalNode() ||
rhs->IsLoadStaticFieldNode() || rhs->IsClosureNode()) &&
node->token_pos().IsDebugPause()) {
AddInstruction(new (Z) DebugStepCheckInstr(node->token_pos(),
RawPcDescriptors::kRuntimeCall,
owner()->GetNextDeoptId()));
}
#if !defined(PRODUCT)
// If the right hand side is an expression that does not contain
// a safe point for the debugger to stop, add an explicit stub
// call.
AstNode* rhs = node->value();
if (rhs->IsAssignableNode()) {
rhs = rhs->AsAssignableNode()->expr();
}
if ((rhs->IsLiteralNode() || rhs->IsLoadLocalNode() ||
rhs->IsLoadStaticFieldNode() || rhs->IsClosureNode()) &&
node->token_pos().IsDebugPause()) {
AddInstruction(new (Z) DebugStepCheckInstr(node->token_pos(),
RawPcDescriptors::kRuntimeCall,
owner()->GetNextDeoptId()));
}
#endif
ValueGraphVisitor for_value(owner());
node->value()->Visit(&for_value);
@ -3846,8 +3849,8 @@ void EffectGraphVisitor::VisitSequenceNode(SequenceNode* node) {
Do(call_async_set_thread_stack_trace);
}
if (FLAG_support_debugger && is_top_level_sequence &&
function.is_debuggable()) {
#if !defined(PRODUCT)
if (is_top_level_sequence && function.is_debuggable()) {
// Place a debug check at method entry to ensure breaking on a method always
// happens, even if there are no assignments/calls/runtimecalls in the first
// basic block. Place this check at the last parameter to ensure parameters
@ -3867,6 +3870,7 @@ void EffectGraphVisitor::VisitSequenceNode(SequenceNode* node) {
AddInstruction(new (Z) DebugStepCheckInstr(
check_pos, RawPcDescriptors::kRuntimeCall, owner()->GetNextDeoptId()));
}
#endif
// This check may be deleted if the generated code is leaf.
// Native functions don't need a stack check at entry.
@ -4266,16 +4270,16 @@ StaticCallInstr* EffectGraphVisitor::BuildThrowNoSuchMethodError(
}
void EffectGraphVisitor::BuildThrowNode(ThrowNode* node) {
if (FLAG_support_debugger) {
if (node->exception()->IsLiteralNode() ||
node->exception()->IsLoadLocalNode() ||
node->exception()->IsLoadStaticFieldNode() ||
node->exception()->IsClosureNode()) {
AddInstruction(new (Z) DebugStepCheckInstr(node->token_pos(),
RawPcDescriptors::kRuntimeCall,
owner()->GetNextDeoptId()));
}
#if !defined(PRODUCT)
if (node->exception()->IsLiteralNode() ||
node->exception()->IsLoadLocalNode() ||
node->exception()->IsLoadStaticFieldNode() ||
node->exception()->IsClosureNode()) {
AddInstruction(new (Z) DebugStepCheckInstr(node->token_pos(),
RawPcDescriptors::kRuntimeCall,
owner()->GetNextDeoptId()));
}
#endif
ValueGraphVisitor for_exception(owner());
node->exception()->Visit(&for_exception);
Append(for_exception);

View file

@ -85,7 +85,6 @@ static void PrecompilationModeHandler(bool value) {
// Set flags affecting runtime accordingly for dart_bootstrap.
// These flags are constants with PRODUCT and DART_PRECOMPILED_RUNTIME.
FLAG_collect_code = false;
FLAG_support_debugger = false;
FLAG_deoptimize_alot = false; // Used in some tests.
FLAG_deoptimize_every = 0; // Used in some tests.
FLAG_load_deferred_eagerly = true;

View file

@ -344,10 +344,12 @@ RawError* IsolateMessageHandler::HandleLibMessage(const Array& message) {
Object& obj = Object::Handle(zone, message.At(2));
if (!I->VerifyPauseCapability(obj)) return Error::null();
#if !defined(PRODUCT)
// If we are already paused, don't pause again.
if (FLAG_support_debugger && (I->debugger()->PauseEvent() == NULL)) {
if (I->debugger()->PauseEvent() == NULL) {
return I->debugger()->PauseInterrupted();
}
#endif
break;
}
@ -662,6 +664,7 @@ MessageHandler::MessageStatus IsolateMessageHandler::ProcessUnhandledException(
} else {
T->set_sticky_error(result);
}
#if !defined(PRODUCT)
// Notify the debugger about specific unhandled exceptions which are
// withheld when being thrown. Do this after setting the sticky error
// so the isolate has an error set when paused with the unhandled
@ -672,11 +675,10 @@ MessageHandler::MessageStatus IsolateMessageHandler::ProcessUnhandledException(
if ((exception == I->object_store()->out_of_memory()) ||
(exception == I->object_store()->stack_overflow())) {
// We didn't notify the debugger when the stack was full. Do it now.
if (FLAG_support_debugger) {
I->debugger()->PauseException(Instance::Handle(exception));
}
I->debugger()->PauseException(Instance::Handle(exception));
}
}
#endif // !defined(PRODUCT)
return kError;
}
}
@ -755,7 +757,9 @@ Isolate::Isolate(const Dart_IsolateFlags& api_flags)
environment_callback_(NULL),
library_tag_handler_(NULL),
api_state_(NULL),
#if !defined(PRODUCT)
debugger_(NULL),
#endif
resume_request_(false),
last_resume_timestamp_(OS::GetCurrentTimeMillis()),
random_(),
@ -825,9 +829,7 @@ Isolate::~Isolate() {
delete object_store_;
delete api_state_;
#ifndef PRODUCT
if (FLAG_support_debugger) {
delete debugger_;
}
delete debugger_;
#endif // !PRODUCT
#if defined(USING_SIMULATOR)
delete simulator_;
@ -920,10 +922,10 @@ Isolate* Isolate::Init(const char* name_prefix,
result->set_terminate_capability(result->random()->NextUInt64());
result->BuildName(name_prefix);
if (FLAG_support_debugger) {
result->debugger_ = new Debugger();
result->debugger_->Initialize(result);
}
#if !defined(PRODUCT)
result->debugger_ = new Debugger();
result->debugger_->Initialize(result);
#endif
if (FLAG_trace_isolates) {
if (name_prefix == NULL || strcmp(name_prefix, "vm-isolate") != 0) {
OS::Print(
@ -981,13 +983,15 @@ int64_t Isolate::UptimeMicros() const {
}
bool Isolate::IsPaused() const {
#if defined(PRODUCT)
return false;
#else
return (debugger_ != NULL) && (debugger_->PauseEvent() != NULL);
#endif
}
RawError* Isolate::PausePostRequest() {
if (!FLAG_support_debugger) {
return Error::null();
}
#if !defined(PRODUCT)
if (debugger_ == NULL) {
return Error::null();
}
@ -1001,6 +1005,7 @@ RawError* Isolate::PausePostRequest() {
UNREACHABLE();
}
}
#endif
return Error::null();
}
@ -1092,12 +1097,10 @@ bool Isolate::MakeRunnable() {
ASSERT(object_store()->root_library() != Library::null());
set_is_runnable(true);
#ifndef PRODUCT
if (FLAG_support_debugger) {
if (!ServiceIsolate::IsServiceIsolate(this)) {
debugger()->OnIsolateRunnable();
if (FLAG_pause_isolates_on_unhandled_exceptions) {
debugger()->SetExceptionPauseInfo(kPauseOnUnhandledExceptions);
}
if (!ServiceIsolate::IsServiceIsolate(this)) {
debugger()->OnIsolateRunnable();
if (FLAG_pause_isolates_on_unhandled_exceptions) {
debugger()->SetExceptionPauseInfo(kPauseOnUnhandledExceptions);
}
}
#endif // !PRODUCT
@ -1384,9 +1387,11 @@ static MessageHandler::MessageStatus RunIsolate(uword parameter) {
// way to debug. Set the breakpoint on the static function instead
// of its implicit closure function because that latter is merely
// a dispatcher that is marked as undebuggable.
if (FLAG_support_debugger && FLAG_break_at_isolate_spawn) {
#if !defined(PRODUCT)
if (FLAG_break_at_isolate_spawn) {
isolate->debugger()->OneTimeBreakAtEntry(func);
}
#endif
func = func.ImplicitClosureFunction();
@ -1563,10 +1568,10 @@ void Isolate::LowLevelShutdown() {
}
}
#if !defined(PRODUCT)
// Clean up debugger resources.
if (FLAG_support_debugger) {
debugger()->Shutdown();
}
debugger()->Shutdown();
#endif
// Close all the ports owned by this isolate.
PortMap::ClosePorts(message_handler());
@ -1783,12 +1788,10 @@ void Isolate::VisitObjectPointers(ObjectPointerVisitor* visitor,
// when at safepoint or the field_list_mutex_ lock has been taken.
visitor->VisitPointer(reinterpret_cast<RawObject**>(&boxed_field_list_));
// Visit objects in the debugger.
if (FLAG_support_debugger) {
debugger()->VisitObjectPointers(visitor);
}
#if !defined(PRODUCT)
// Visit objects in the debugger.
debugger()->VisitObjectPointers(visitor);
// Visit objects that are being used for isolate reload.
if (reload_context() != NULL) {
reload_context()->VisitObjectPointers(visitor);

View file

@ -319,13 +319,12 @@ class Isolate : public BaseIsolate {
}
Mutex* megamorphic_lookup_mutex() const { return megamorphic_lookup_mutex_; }
#if !defined(PRODUCT)
Debugger* debugger() const {
if (!FLAG_support_debugger) {
return NULL;
}
ASSERT(debugger_ != NULL);
return debugger_;
}
#endif
void set_single_step(bool value) { single_step_ = value; }
bool single_step() const { return single_step_; }
@ -766,7 +765,7 @@ class Isolate : public BaseIsolate {
Dart_EnvironmentCallback environment_callback_;
Dart_LibraryTagHandler library_tag_handler_;
ApiState* api_state_;
Debugger* debugger_;
NOT_IN_PRODUCT(Debugger* debugger_);
bool resume_request_;
int64_t last_resume_timestamp_;
Random random_;

View file

@ -1850,15 +1850,19 @@ Fragment FlowGraphBuilder::CheckVariableTypeInCheckedMode(
bool FlowGraphBuilder::NeedsDebugStepCheck(const Function& function,
TokenPosition position) {
return FLAG_support_debugger && position.IsDebugPause() &&
!function.is_native() && function.is_debuggable();
return position.IsDebugPause() && !function.is_native() &&
function.is_debuggable();
}
bool FlowGraphBuilder::NeedsDebugStepCheck(Value* value,
TokenPosition position) {
if (!FLAG_support_debugger || !position.IsDebugPause()) return false;
if (!position.IsDebugPause()) {
return false;
}
Definition* definition = value->definition();
if (definition->IsConstant() || definition->IsLoadStaticField()) return true;
if (definition->IsConstant() || definition->IsLoadStaticField()) {
return true;
}
if (definition->IsAllocateObject()) {
return !definition->AsAllocateObject()->closure_function().IsNull();
}

View file

@ -5173,11 +5173,12 @@ intptr_t Function::Hash() const {
}
bool Function::HasBreakpoint() const {
if (!FLAG_support_debugger) {
return false;
}
#if defined(PRODUCT)
return false;
#else
Thread* thread = Thread::Current();
return thread->isolate()->debugger()->HasBreakpoint(*this, thread->zone());
#endif
}
void Function::InstallOptimizedCode(const Code& code) const {
@ -5958,10 +5959,13 @@ void Function::SetIsOptimizable(bool value) const {
}
bool Function::CanBeInlined() const {
#if defined(PRODUCT)
return is_inlinable() && !is_external() && !is_generated_body();
#else
Thread* thread = Thread::Current();
return is_inlinable() && !is_external() && !is_generated_body() &&
(!FLAG_support_debugger ||
!thread->isolate()->debugger()->HasBreakpoint(*this, thread->zone()));
!thread->isolate()->debugger()->HasBreakpoint(*this, thread->zone());
#endif
}
intptr_t Function::NumParameters() const {
@ -13588,10 +13592,11 @@ void Code::set_static_calls_target_table(const Array& value) const {
}
bool Code::HasBreakpoint() const {
if (!FLAG_support_debugger) {
return false;
}
#if defined(PRODUCT)
return false;
#else
return Isolate::Current()->debugger()->HasBreakpoint(*this);
#endif
}
RawTypedData* Code::GetDeoptInfoAtPc(uword pc,

View file

@ -3975,10 +3975,9 @@ TEST_CASE(FunctionSourceFingerprint) {
EXPECT_EQ(a_test6.SourceFingerprint(), b_test6.SourceFingerprint());
}
#ifndef PRODUCT
TEST_CASE(FunctionWithBreakpointNotInlined) {
if (!FLAG_support_debugger) {
return;
}
const char* kScriptChars =
"class A {\n"
" a() {\n"
@ -4043,8 +4042,6 @@ ISOLATE_UNIT_TEST_CASE(SpecialClassesHaveEmptyArrays) {
EXPECT(array.IsArray());
}
#ifndef PRODUCT
class ObjectAccumulator : public ObjectVisitor {
public:
explicit ObjectAccumulator(GrowableArray<Object*>* objects)

View file

@ -9176,21 +9176,21 @@ AstNode* Parser::ParseAwaitForStatement(String* label_name) {
stream_expr_pos, new (Z) LoadLocalNode(stream_expr_pos, iterator_var),
Symbols::MoveNext(), no_args);
OpenBlock();
if (FLAG_support_debugger) {
// Call '_asyncStarMoveNextHelper' so that the debugger can intercept and
// handle single stepping into a async* generator.
const Function& async_star_move_next_helper = Function::ZoneHandle(
Z, isolate()->object_store()->async_star_move_next_helper());
ASSERT(!async_star_move_next_helper.IsNull());
ArgumentListNode* async_star_move_next_helper_args =
new (Z) ArgumentListNode(stream_expr_pos);
async_star_move_next_helper_args->Add(
new (Z) LoadLocalNode(stream_expr_pos, stream_var));
StaticCallNode* async_star_move_next_helper_call =
new (Z) StaticCallNode(stream_expr_pos, async_star_move_next_helper,
async_star_move_next_helper_args);
current_block_->statements->Add(async_star_move_next_helper_call);
}
#if !defined(PRODUCT)
// Call '_asyncStarMoveNextHelper' so that the debugger can intercept and
// handle single stepping into a async* generator.
const Function& async_star_move_next_helper = Function::ZoneHandle(
Z, isolate()->object_store()->async_star_move_next_helper());
ASSERT(!async_star_move_next_helper.IsNull());
ArgumentListNode* async_star_move_next_helper_args =
new (Z) ArgumentListNode(stream_expr_pos);
async_star_move_next_helper_args->Add(
new (Z) LoadLocalNode(stream_expr_pos, stream_var));
StaticCallNode* async_star_move_next_helper_call =
new (Z) StaticCallNode(stream_expr_pos, async_star_move_next_helper,
async_star_move_next_helper_args);
current_block_->statements->Add(async_star_move_next_helper_call);
#endif
AstNode* await_moveNext = new (Z) AwaitNode(
stream_expr_pos, iterator_moveNext, saved_try_ctx, async_saved_try_ctx,
outer_saved_try_ctx, outer_async_saved_try_ctx, current_block_->scope);

View file

@ -770,14 +770,15 @@ static void CheckResultError(const Object& result) {
}
}
#if !defined(TARGET_ARCH_DBC)
#if defined(PRODUCT)
DEFINE_RUNTIME_ENTRY(BreakpointRuntimeHandler, 0) {
UNREACHABLE();
return;
}
#elif !defined(TARGET_ARCH_DBC)
// Gets called from debug stub when code reaches a breakpoint
// set on a runtime stub call.
DEFINE_RUNTIME_ENTRY(BreakpointRuntimeHandler, 0) {
if (!FLAG_support_debugger) {
UNREACHABLE();
return;
}
DartFrameIterator iterator(thread,
StackFrameIterator::kNoCrossThreadIteration);
StackFrame* caller_frame = iterator.NextFrame();
@ -795,10 +796,6 @@ DEFINE_RUNTIME_ENTRY(BreakpointRuntimeHandler, 0) {
#else
// Gets called from the simulator when the breakpoint is reached.
DEFINE_RUNTIME_ENTRY(BreakpointRuntimeHandler, 0) {
if (!FLAG_support_debugger) {
UNREACHABLE();
return;
}
const Error& error = Error::Handle(isolate->debugger()->PauseBreakpoint());
if (!error.IsNull()) {
Exceptions::PropagateError(error);
@ -808,16 +805,17 @@ DEFINE_RUNTIME_ENTRY(BreakpointRuntimeHandler, 0) {
#endif // !defined(TARGET_ARCH_DBC)
DEFINE_RUNTIME_ENTRY(SingleStepHandler, 0) {
if (!FLAG_support_debugger) {
UNREACHABLE();
return;
}
#if defined(PRODUCT)
UNREACHABLE();
return;
#else
const Error& error =
Error::Handle(zone, isolate->debugger()->PauseStepping());
if (!error.IsNull()) {
Exceptions::PropagateError(error);
UNREACHABLE();
}
#endif
}
// An instance call of the form o.f(...) could not be resolved. Check if
@ -1691,7 +1689,8 @@ DEFINE_RUNTIME_ENTRY(StackOverflow, 0) {
}
#endif
}
if (FLAG_support_debugger && do_stacktrace) {
#if !defined(PRODUCT)
if (do_stacktrace) {
String& var_name = String::Handle();
Instance& var_value = Instance::Handle();
// Collecting the stack trace and accessing local variables
@ -1723,6 +1722,7 @@ DEFINE_RUNTIME_ENTRY(StackOverflow, 0) {
}
FLAG_stacktrace_every = saved_stacktrace_every;
}
#endif // !defined(PRODUCT)
const Error& error = Error::Handle(thread->HandleInterrupts());
if (!error.IsNull()) {

View file

@ -1379,8 +1379,9 @@ void StubCode::GenerateNArgsCheckInlineCacheStub(
}
#endif // DEBUG
#if !defined(PRODUCT)
Label stepping, done_stepping;
if (FLAG_support_debugger && !optimized) {
if (!optimized) {
__ Comment("Check single stepping");
__ LoadIsolate(R8);
__ ldrb(R8, Address(R8, Isolate::single_step_offset()));
@ -1388,6 +1389,7 @@ void StubCode::GenerateNArgsCheckInlineCacheStub(
__ b(&stepping, NE);
__ Bind(&done_stepping);
}
#endif
Label not_smi_or_overflow;
if (kind != Token::kILLEGAL) {
@ -1508,7 +1510,8 @@ void StubCode::GenerateNArgsCheckInlineCacheStub(
__ ldr(CODE_REG, FieldAddress(R0, Function::code_offset()));
__ bx(R2);
if (FLAG_support_debugger && !optimized) {
#if !defined(PRODUCT)
if (!optimized) {
__ Bind(&stepping);
__ EnterStubFrame();
__ Push(R9); // Preserve IC data.
@ -1518,6 +1521,7 @@ void StubCode::GenerateNArgsCheckInlineCacheStub(
__ LeaveStubFrame();
__ b(&done_stepping);
}
#endif
}
// Use inline cache data array to invoke the target or continue in inline
@ -1597,15 +1601,15 @@ void StubCode::GenerateZeroArgsUnoptimizedStaticCallStub(Assembler* assembler) {
}
#endif // DEBUG
#if !defined(PRODUCT)
// Check single stepping.
Label stepping, done_stepping;
if (FLAG_support_debugger) {
__ LoadIsolate(R8);
__ ldrb(R8, Address(R8, Isolate::single_step_offset()));
__ CompareImmediate(R8, 0);
__ b(&stepping, NE);
__ Bind(&done_stepping);
}
__ LoadIsolate(R8);
__ ldrb(R8, Address(R8, Isolate::single_step_offset()));
__ CompareImmediate(R8, 0);
__ b(&stepping, NE);
__ Bind(&done_stepping);
#endif
// R9: IC data object (preserved).
__ ldr(R8, FieldAddress(R9, ICData::ic_data_offset()));
@ -1631,16 +1635,16 @@ void StubCode::GenerateZeroArgsUnoptimizedStaticCallStub(Assembler* assembler) {
__ ldr(R2, FieldAddress(R0, Function::entry_point_offset()));
__ bx(R2);
if (FLAG_support_debugger) {
__ Bind(&stepping);
__ EnterStubFrame();
__ Push(R9); // Preserve IC data.
__ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
__ Pop(R9);
__ RestoreCodePointer();
__ LeaveStubFrame();
__ b(&done_stepping);
}
#if !defined(PRODUCT)
__ Bind(&stepping);
__ EnterStubFrame();
__ Push(R9); // Preserve IC data.
__ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
__ Pop(R9);
__ RestoreCodePointer();
__ LeaveStubFrame();
__ b(&done_stepping);
#endif
}
void StubCode::GenerateOneArgUnoptimizedStaticCallStub(Assembler* assembler) {
@ -2001,15 +2005,15 @@ static void GenerateIdenticalWithNumberCheckStub(Assembler* assembler,
// Return Zero condition flag set if equal.
void StubCode::GenerateUnoptimizedIdenticalWithNumberCheckStub(
Assembler* assembler) {
#if !defined(PRODUCT)
// Check single stepping.
Label stepping, done_stepping;
if (FLAG_support_debugger) {
__ LoadIsolate(R1);
__ ldrb(R1, Address(R1, Isolate::single_step_offset()));
__ CompareImmediate(R1, 0);
__ b(&stepping, NE);
__ Bind(&done_stepping);
}
__ LoadIsolate(R1);
__ ldrb(R1, Address(R1, Isolate::single_step_offset()));
__ CompareImmediate(R1, 0);
__ b(&stepping, NE);
__ Bind(&done_stepping);
#endif
const Register temp = R2;
const Register left = R1;
@ -2019,14 +2023,14 @@ void StubCode::GenerateUnoptimizedIdenticalWithNumberCheckStub(
GenerateIdenticalWithNumberCheckStub(assembler, left, right, temp);
__ Ret();
if (FLAG_support_debugger) {
__ Bind(&stepping);
__ EnterStubFrame();
__ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
__ RestoreCodePointer();
__ LeaveStubFrame();
__ b(&done_stepping);
}
#if !defined(PRODUCT)
__ Bind(&stepping);
__ EnterStubFrame();
__ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
__ RestoreCodePointer();
__ LeaveStubFrame();
__ b(&done_stepping);
#endif
}
// Called from optimized code only.

View file

@ -1422,8 +1422,9 @@ void StubCode::GenerateNArgsCheckInlineCacheStub(
}
#endif // DEBUG
#if !defined(PRODUCT)
Label stepping, done_stepping;
if (FLAG_support_debugger && !optimized) {
if (!optimized) {
__ Comment("Check single stepping");
__ LoadIsolate(R6);
__ LoadFromOffset(R6, R6, Isolate::single_step_offset(), kUnsignedByte);
@ -1431,6 +1432,7 @@ void StubCode::GenerateNArgsCheckInlineCacheStub(
__ b(&stepping, NE);
__ Bind(&done_stepping);
}
#endif
Label not_smi_or_overflow;
if (kind != Token::kILLEGAL) {
@ -1558,7 +1560,8 @@ void StubCode::GenerateNArgsCheckInlineCacheStub(
__ LoadFieldFromOffset(R2, R0, Function::entry_point_offset());
__ br(R2);
if (FLAG_support_debugger && !optimized) {
#if !defined(PRODUCT)
if (!optimized) {
__ Bind(&stepping);
__ EnterStubFrame();
__ Push(R5); // Preserve IC data.
@ -1568,6 +1571,7 @@ void StubCode::GenerateNArgsCheckInlineCacheStub(
__ LeaveStubFrame();
__ b(&done_stepping);
}
#endif
}
// Use inline cache data array to invoke the target or continue in inline
@ -1646,14 +1650,14 @@ void StubCode::GenerateZeroArgsUnoptimizedStaticCallStub(Assembler* assembler) {
#endif // DEBUG
// Check single stepping.
#if !defined(PRODUCT)
Label stepping, done_stepping;
if (FLAG_support_debugger) {
__ LoadIsolate(R6);
__ LoadFromOffset(R6, R6, Isolate::single_step_offset(), kUnsignedByte);
__ CompareImmediate(R6, 0);
__ b(&stepping, NE);
__ Bind(&done_stepping);
}
__ LoadIsolate(R6);
__ LoadFromOffset(R6, R6, Isolate::single_step_offset(), kUnsignedByte);
__ CompareImmediate(R6, 0);
__ b(&stepping, NE);
__ Bind(&done_stepping);
#endif
// R5: IC data object (preserved).
__ LoadFieldFromOffset(R6, R5, ICData::ic_data_offset());
@ -1679,16 +1683,16 @@ void StubCode::GenerateZeroArgsUnoptimizedStaticCallStub(Assembler* assembler) {
__ LoadFieldFromOffset(R2, R0, Function::entry_point_offset());
__ br(R2);
if (FLAG_support_debugger) {
__ Bind(&stepping);
__ EnterStubFrame();
__ Push(R5); // Preserve IC data.
__ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
__ Pop(R5);
__ RestoreCodePointer();
__ LeaveStubFrame();
__ b(&done_stepping);
}
#if !defined(PRODUCT)
__ Bind(&stepping);
__ EnterStubFrame();
__ Push(R5); // Preserve IC data.
__ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
__ Pop(R5);
__ RestoreCodePointer();
__ LeaveStubFrame();
__ b(&done_stepping);
#endif
}
void StubCode::GenerateOneArgUnoptimizedStaticCallStub(Assembler* assembler) {
@ -2037,15 +2041,15 @@ static void GenerateIdenticalWithNumberCheckStub(Assembler* assembler,
// Return Zero condition flag set if equal.
void StubCode::GenerateUnoptimizedIdenticalWithNumberCheckStub(
Assembler* assembler) {
#if !defined(PRODUCT)
// Check single stepping.
Label stepping, done_stepping;
if (FLAG_support_debugger) {
__ LoadIsolate(R1);
__ LoadFromOffset(R1, R1, Isolate::single_step_offset(), kUnsignedByte);
__ CompareImmediate(R1, 0);
__ b(&stepping, NE);
__ Bind(&done_stepping);
}
__ LoadIsolate(R1);
__ LoadFromOffset(R1, R1, Isolate::single_step_offset(), kUnsignedByte);
__ CompareImmediate(R1, 0);
__ b(&stepping, NE);
__ Bind(&done_stepping);
#endif
const Register left = R1;
const Register right = R0;
@ -2054,14 +2058,14 @@ void StubCode::GenerateUnoptimizedIdenticalWithNumberCheckStub(
GenerateIdenticalWithNumberCheckStub(assembler, left, right);
__ ret();
if (FLAG_support_debugger) {
__ Bind(&stepping);
__ EnterStubFrame();
__ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
__ RestoreCodePointer();
__ LeaveStubFrame();
__ b(&done_stepping);
}
#if !defined(PRODUCT)
__ Bind(&stepping);
__ EnterStubFrame();
__ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
__ RestoreCodePointer();
__ LeaveStubFrame();
__ b(&done_stepping);
#endif
}
// Called from optimized code only.

View file

@ -1299,14 +1299,16 @@ void StubCode::GenerateNArgsCheckInlineCacheStub(
}
#endif // DEBUG
#if !defined(PRODUCT)
Label stepping, done_stepping;
if (FLAG_support_debugger && !optimized) {
if (!optimized) {
__ Comment("Check single stepping");
__ LoadIsolate(EAX);
__ cmpb(Address(EAX, Isolate::single_step_offset()), Immediate(0));
__ j(NOT_EQUAL, &stepping);
__ Bind(&done_stepping);
}
#endif
Label not_smi_or_overflow;
if (kind != Token::kILLEGAL) {
EmitFastSmiOp(assembler, kind, num_args, &not_smi_or_overflow);
@ -1432,7 +1434,8 @@ void StubCode::GenerateNArgsCheckInlineCacheStub(
__ movl(EBX, FieldAddress(EAX, Function::entry_point_offset()));
__ jmp(EBX);
if (FLAG_support_debugger && !optimized) {
#if !defined(PRODUCT)
if (!optimized) {
__ Bind(&stepping);
__ EnterStubFrame();
__ pushl(ECX);
@ -1441,6 +1444,7 @@ void StubCode::GenerateNArgsCheckInlineCacheStub(
__ LeaveFrame();
__ jmp(&done_stepping);
}
#endif
}
// Use inline cache data array to invoke the target or continue in inline
@ -1531,14 +1535,15 @@ void StubCode::GenerateZeroArgsUnoptimizedStaticCallStub(Assembler* assembler) {
__ Bind(&ok);
}
#endif // DEBUG
#if !defined(PRODUCT)
// Check single stepping.
Label stepping, done_stepping;
if (FLAG_support_debugger) {
__ LoadIsolate(EAX);
__ cmpb(Address(EAX, Isolate::single_step_offset()), Immediate(0));
__ j(NOT_EQUAL, &stepping, Assembler::kNearJump);
__ Bind(&done_stepping);
}
__ LoadIsolate(EAX);
__ cmpb(Address(EAX, Isolate::single_step_offset()), Immediate(0));
__ j(NOT_EQUAL, &stepping, Assembler::kNearJump);
__ Bind(&done_stepping);
#endif
// ECX: IC data object (preserved).
__ movl(EBX, FieldAddress(ECX, ICData::ic_data_offset()));
@ -1561,15 +1566,15 @@ void StubCode::GenerateZeroArgsUnoptimizedStaticCallStub(Assembler* assembler) {
__ movl(EBX, FieldAddress(EAX, Function::entry_point_offset()));
__ jmp(EBX);
if (FLAG_support_debugger) {
__ Bind(&stepping);
__ EnterStubFrame();
__ pushl(ECX);
__ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
__ popl(ECX);
__ LeaveFrame();
__ jmp(&done_stepping, Assembler::kNearJump);
}
#if !defined(PRODUCT)
__ Bind(&stepping);
__ EnterStubFrame();
__ pushl(ECX);
__ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
__ popl(ECX);
__ LeaveFrame();
__ jmp(&done_stepping, Assembler::kNearJump);
#endif
}
void StubCode::GenerateOneArgUnoptimizedStaticCallStub(Assembler* assembler) {
@ -1928,15 +1933,15 @@ static void GenerateIdenticalWithNumberCheckStub(Assembler* assembler,
// Returns ZF set.
void StubCode::GenerateUnoptimizedIdenticalWithNumberCheckStub(
Assembler* assembler) {
#if !defined(PRODUCT)
// Check single stepping.
Label stepping, done_stepping;
if (FLAG_support_debugger) {
__ LoadIsolate(EAX);
__ movzxb(EAX, Address(EAX, Isolate::single_step_offset()));
__ cmpl(EAX, Immediate(0));
__ j(NOT_EQUAL, &stepping);
__ Bind(&done_stepping);
}
__ LoadIsolate(EAX);
__ movzxb(EAX, Address(EAX, Isolate::single_step_offset()));
__ cmpl(EAX, Immediate(0));
__ j(NOT_EQUAL, &stepping);
__ Bind(&done_stepping);
#endif
const Register left = EAX;
const Register right = EDX;
@ -1946,13 +1951,13 @@ void StubCode::GenerateUnoptimizedIdenticalWithNumberCheckStub(
GenerateIdenticalWithNumberCheckStub(assembler, left, right, temp);
__ ret();
if (FLAG_support_debugger) {
__ Bind(&stepping);
__ EnterStubFrame();
__ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
__ LeaveFrame();
__ jmp(&done_stepping);
}
#if !defined(PRODUCT)
__ Bind(&stepping);
__ EnterStubFrame();
__ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
__ LeaveFrame();
__ jmp(&done_stepping);
#endif
}
// Called from optimized code only.

View file

@ -1355,14 +1355,16 @@ void StubCode::GenerateNArgsCheckInlineCacheStub(
}
#endif // DEBUG
#if !defined(PRODUCT)
Label stepping, done_stepping;
if (FLAG_support_debugger && !optimized) {
if (!optimized) {
__ Comment("Check single stepping");
__ LoadIsolate(RAX);
__ cmpb(Address(RAX, Isolate::single_step_offset()), Immediate(0));
__ j(NOT_EQUAL, &stepping);
__ Bind(&done_stepping);
}
#endif
Label not_smi_or_overflow;
if (kind != Token::kILLEGAL) {
@ -1478,7 +1480,8 @@ void StubCode::GenerateNArgsCheckInlineCacheStub(
__ movq(RCX, FieldAddress(RAX, Function::entry_point_offset()));
__ jmp(RCX);
if (FLAG_support_debugger && !optimized) {
#if !defined(PRODUCT)
if (!optimized) {
__ Bind(&stepping);
__ EnterStubFrame();
__ pushq(RBX);
@ -1488,6 +1491,7 @@ void StubCode::GenerateNArgsCheckInlineCacheStub(
__ LeaveStubFrame();
__ jmp(&done_stepping);
}
#endif
}
// Use inline cache data array to invoke the target or continue in inline
@ -1578,20 +1582,20 @@ void StubCode::GenerateZeroArgsUnoptimizedStaticCallStub(Assembler* assembler) {
}
#endif // DEBUG
#if !defined(PRODUCT)
// Check single stepping.
Label stepping, done_stepping;
if (FLAG_support_debugger) {
__ LoadIsolate(RAX);
__ movzxb(RAX, Address(RAX, Isolate::single_step_offset()));
__ cmpq(RAX, Immediate(0));
__ LoadIsolate(RAX);
__ movzxb(RAX, Address(RAX, Isolate::single_step_offset()));
__ cmpq(RAX, Immediate(0));
#if defined(DEBUG)
static const bool kJumpLength = Assembler::kFarJump;
static const bool kJumpLength = Assembler::kFarJump;
#else
static const bool kJumpLength = Assembler::kNearJump;
static const bool kJumpLength = Assembler::kNearJump;
#endif // DEBUG
__ j(NOT_EQUAL, &stepping, kJumpLength);
__ Bind(&done_stepping);
}
__ j(NOT_EQUAL, &stepping, kJumpLength);
__ Bind(&done_stepping);
#endif
// RBX: IC data object (preserved).
__ movq(R12, FieldAddress(RBX, ICData::ic_data_offset()));
@ -1615,16 +1619,16 @@ void StubCode::GenerateZeroArgsUnoptimizedStaticCallStub(Assembler* assembler) {
__ movq(RCX, FieldAddress(RAX, Function::entry_point_offset()));
__ jmp(RCX);
if (FLAG_support_debugger) {
__ Bind(&stepping);
__ EnterStubFrame();
__ pushq(RBX); // Preserve IC data object.
__ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
__ popq(RBX);
__ RestoreCodePointer();
__ LeaveStubFrame();
__ jmp(&done_stepping, Assembler::kNearJump);
}
#if !defined(PRODUCT)
__ Bind(&stepping);
__ EnterStubFrame();
__ pushq(RBX); // Preserve IC data object.
__ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
__ popq(RBX);
__ RestoreCodePointer();
__ LeaveStubFrame();
__ jmp(&done_stepping, Assembler::kNearJump);
#endif
}
void StubCode::GenerateOneArgUnoptimizedStaticCallStub(Assembler* assembler) {
@ -1981,15 +1985,15 @@ static void GenerateIdenticalWithNumberCheckStub(Assembler* assembler,
// Returns ZF set.
void StubCode::GenerateUnoptimizedIdenticalWithNumberCheckStub(
Assembler* assembler) {
#if !defined(PRODUCT)
// Check single stepping.
Label stepping, done_stepping;
if (FLAG_support_debugger) {
__ LoadIsolate(RAX);
__ movzxb(RAX, Address(RAX, Isolate::single_step_offset()));
__ cmpq(RAX, Immediate(0));
__ j(NOT_EQUAL, &stepping);
__ Bind(&done_stepping);
}
__ LoadIsolate(RAX);
__ movzxb(RAX, Address(RAX, Isolate::single_step_offset()));
__ cmpq(RAX, Immediate(0));
__ j(NOT_EQUAL, &stepping);
__ Bind(&done_stepping);
#endif
const Register left = RAX;
const Register right = RDX;
@ -1999,14 +2003,14 @@ void StubCode::GenerateUnoptimizedIdenticalWithNumberCheckStub(
GenerateIdenticalWithNumberCheckStub(assembler, left, right);
__ ret();
if (FLAG_support_debugger) {
__ Bind(&stepping);
__ EnterStubFrame();
__ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
__ RestoreCodePointer();
__ LeaveStubFrame();
__ jmp(&done_stepping);
}
#if !defined(PRODUCT)
__ Bind(&stepping);
__ EnterStubFrame();
__ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
__ RestoreCodePointer();
__ LeaveStubFrame();
__ jmp(&done_stepping);
#endif
}
// Called from optimized code only.