[vm/concurrency] Move HasAttemptedReload() bool from Isolate to IsolateGroup

As part of making the compiler as well as hot-reload independent of
a particular isolate we move this boolean to `IsolateGroup`.

For hot-reloading isolate groups there are more changes needed that
come in the future. This CL is preparation for making the compiler
independent of current isolate.

Issue https://github.com/dart-lang/sdk/issues/36097

TEST=Refactoring of existing code.

Change-Id: I434cf1180bc963462e2901dfa83e915df9dc1712
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/182561
Commit-Queue: Martin Kustermann <kustermann@google.com>
Reviewed-by: Alexander Aprelev <aam@google.com>
Reviewed-by: Ryan Macnak <rmacnak@google.com>
This commit is contained in:
Martin Kustermann 2021-02-04 17:45:57 +00:00 committed by commit-bot@chromium.org
parent be7bea72dc
commit 376bf1826a
8 changed files with 38 additions and 33 deletions

View file

@ -63,7 +63,7 @@ void BlockScheduler::AssignEdgeWeights(FlowGraph* flow_graph) {
DeoptId::kNone, "BlockScheduler: ICData array cleared");
}
if (ic_data_array.IsNull()) {
DEBUG_ASSERT(Isolate::Current()->HasAttemptedReload() ||
DEBUG_ASSERT(IsolateGroup::Current()->HasAttemptedReload() ||
function.ForceOptimize());
return;
}

View file

@ -1463,7 +1463,7 @@ CompileType LoadStaticFieldInstr::ComputeType() const {
}
if (field.needs_load_guard()) {
// Should be kept in sync with Slot::Get.
DEBUG_ASSERT(Isolate::Current()->HasAttemptedReload());
DEBUG_ASSERT(IsolateGroup::Current()->HasAttemptedReload());
return CompileType::Dynamic();
}
return CompileType(is_nullable, cid, abstract_type);

View file

@ -3422,7 +3422,7 @@ FlowGraph* FlowGraphBuilder::BuildGraphOfFieldAccessor(
body += NullConstant();
} else if (is_getter && is_method) {
ASSERT(!field.needs_load_guard()
NOT_IN_PRODUCT(|| I->HasAttemptedReload()));
NOT_IN_PRODUCT(|| IG->HasAttemptedReload()));
body += LoadLocal(parsed_function_->ParameterVariable(0));
body += LoadField(
field, /*calls_initializer=*/field.NeedsInitializationCheckOnLoad());
@ -3455,7 +3455,7 @@ FlowGraph* FlowGraphBuilder::BuildGraphOfFieldAccessor(
#if defined(PRODUCT)
UNREACHABLE();
#else
ASSERT(Isolate::Current()->HasAttemptedReload());
ASSERT(IsolateGroup::Current()->HasAttemptedReload());
body += CheckAssignable(AbstractType::Handle(Z, field.type()),
Symbols::FunctionResult());
#endif

View file

@ -526,7 +526,7 @@ const String& TranslationHelper::DartFactoryName(NameIndex factory) {
static void CheckStaticLookup(const Object& target) {
if (target.IsNull()) {
#ifndef PRODUCT
ASSERT(Isolate::Current()->HasAttemptedReload());
ASSERT(IsolateGroup::Current()->HasAttemptedReload());
Report::LongJump(LanguageError::Handle(LanguageError::New(String::Handle(
String::New("Unimplemented handling of missing static target")))));
#else

View file

@ -961,9 +961,7 @@ void Isolate::ValidateConstants() {
// TODO(27003)
return;
}
if (HasAttemptedReload()) {
return;
}
// Verify that all canonical instances are correctly setup in the
// corresponding canonical tables.
BackgroundCompiler::Stop(this);
@ -1993,8 +1991,8 @@ bool IsolateGroup::ReloadSources(JSONStream* js,
new IsolateGroupReloadContext(this, shared_class_table, js));
group_reload_context_ = group_reload_context;
SetHasAttemptedReload(true);
ForEachIsolate([&](Isolate* isolate) {
isolate->SetHasAttemptedReload(true);
isolate->program_reload_context_ =
new ProgramReloadContext(group_reload_context_, isolate);
});
@ -2026,8 +2024,8 @@ bool IsolateGroup::ReloadKernel(JSONStream* js,
new IsolateGroupReloadContext(this, shared_class_table, js));
group_reload_context_ = group_reload_context;
SetHasAttemptedReload(true);
ForEachIsolate([&](Isolate* isolate) {
isolate->SetHasAttemptedReload(true);
isolate->program_reload_context_ =
new ProgramReloadContext(group_reload_context_, isolate);
});
@ -2448,7 +2446,7 @@ void Isolate::Shutdown() {
#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
if (FLAG_check_reloaded && is_runnable() && !Isolate::IsSystemIsolate(this)) {
if (!HasAttemptedReload()) {
if (!group()->HasAttemptedReload()) {
FATAL(
"Isolate did not reload before exiting and "
"--check-reloaded is enabled.\n");

View file

@ -474,6 +474,20 @@ class IsolateGroup : public IntrusiveDListEntry<IsolateGroup> {
return null_safety() || FLAG_strict_null_safety_checks;
}
#if !defined(PRODUCT)
#if !defined(DART_PRECOMPILED_RUNTIME)
bool HasAttemptedReload() const {
return HasAttemptedReloadBit::decode(isolate_group_flags_);
}
void SetHasAttemptedReload(bool value) {
isolate_group_flags_ =
HasAttemptedReloadBit::update(value, isolate_group_flags_);
}
#else
bool HasAttemptedReload() const { return false; }
#endif // !defined(DART_PRECOMPILED_RUNTIME)
#endif // !defined(PRODUCT)
#if defined(PRODUCT)
void set_use_osr(bool use_osr) { ASSERT(!use_osr); }
#else // defined(PRODUCT)
@ -741,6 +755,7 @@ class IsolateGroup : public IntrusiveDListEntry<IsolateGroup> {
#define ISOLATE_GROUP_FLAG_BITS(V) \
V(CompactionInProgress) \
V(EnableAsserts) \
V(HasAttemptedReload) \
V(NullSafety) \
V(NullSafetySet) \
V(Obfuscate) \
@ -1239,13 +1254,6 @@ class Isolate : public BaseIsolate, public IntrusiveDListEntry<Isolate> {
void DeleteReloadContext();
bool HasAttemptedReload() const {
return HasAttemptedReloadBit::decode(isolate_flags_);
}
void SetHasAttemptedReload(bool value) {
isolate_flags_ = HasAttemptedReloadBit::update(value, isolate_flags_);
}
bool CanReload() const;
#else
bool IsReloading() const { return false; }
@ -1567,7 +1575,6 @@ class Isolate : public BaseIsolate, public IntrusiveDListEntry<Isolate> {
V(AllClassesFinalized) \
V(RemappingCids) \
V(ResumeRequest) \
V(HasAttemptedReload) \
V(HasAttemptedStepping) \
V(ShouldPausePostServiceRequest) \
V(CopyParentCode) \

View file

@ -5812,7 +5812,7 @@ void Class::RehashConstants(Zone* zone) const {
// Shape changes lose the canonical bit because they may result/ in merging
// constants. E.g., [x1, y1], [x1, y2] -> [x1].
DEBUG_ASSERT(constant.IsCanonical() ||
Isolate::Current()->HasAttemptedReload());
IsolateGroup::Current()->HasAttemptedReload());
InsertCanonicalConstant(zone, constant);
}
set.Release();
@ -8976,7 +8976,7 @@ FunctionPtr Function::ImplicitClosureTarget(Zone* zone) const {
Function::Handle(zone, Resolver::ResolveFunction(zone, owner, func_name));
if (!target.IsNull() && (target.ptr() != parent.ptr())) {
DEBUG_ASSERT(Isolate::Current()->HasAttemptedReload());
DEBUG_ASSERT(IsolateGroup::Current()->HasAttemptedReload());
if ((target.is_static() != parent.is_static()) ||
(target.kind() != parent.kind())) {
target = Function::null();
@ -10299,8 +10299,8 @@ void Field::InitializeNew(const Field& result,
FLAG_precompiled_mode || isolate_group->use_field_guards();
#else
const bool use_guarded_cid =
FLAG_precompiled_mode ||
(isolate_group->use_field_guards() && !isolate->HasAttemptedReload());
FLAG_precompiled_mode || (isolate_group->use_field_guards() &&
!isolate_group->HasAttemptedReload());
#endif // !defined(PRODUCT)
result.set_guarded_cid_unsafe(use_guarded_cid ? kIllegalCid : kDynamicCid);
result.set_is_nullable_unsafe(use_guarded_cid ? false : true);
@ -10594,7 +10594,7 @@ FunctionPtr Field::EnsureInitializerFunction() const {
UNREACHABLE();
#else
SafepointMutexLocker ml(
thread->isolate()->group()->initializer_functions_mutex());
thread->isolate_group()->initializer_functions_mutex());
// Double check after grabbing the lock.
initializer = InitializerFunction();
if (initializer.IsNull()) {
@ -18390,7 +18390,7 @@ void Instance::CanonicalizeFieldsLocked(Thread* thread) const {
const intptr_t instance_size = SizeFromClass();
ASSERT(instance_size != 0);
const auto unboxed_fields_bitmap =
thread->isolate()->group()->shared_class_table()->GetUnboxedFieldsMapAt(
thread->isolate_group()->shared_class_table()->GetUnboxedFieldsMapAt(
class_id);
for (intptr_t offset = Instance::NextFieldOffset(); offset < instance_size;
offset += kWordSize) {

View file

@ -451,26 +451,26 @@ class Thread : public ThreadState {
}
HierarchyInfo* hierarchy_info() const {
ASSERT(isolate_ != NULL);
ASSERT(isolate_group_ != nullptr);
return hierarchy_info_;
}
void set_hierarchy_info(HierarchyInfo* value) {
ASSERT(isolate_ != NULL);
ASSERT((hierarchy_info_ == NULL && value != NULL) ||
(hierarchy_info_ != NULL && value == NULL));
ASSERT(isolate_group_ != nullptr);
ASSERT((hierarchy_info_ == nullptr && value != nullptr) ||
(hierarchy_info_ != nullptr && value == nullptr));
hierarchy_info_ = value;
}
TypeUsageInfo* type_usage_info() const {
ASSERT(isolate_ != NULL);
ASSERT(isolate_group_ != nullptr);
return type_usage_info_;
}
void set_type_usage_info(TypeUsageInfo* value) {
ASSERT(isolate_ != NULL);
ASSERT((type_usage_info_ == NULL && value != NULL) ||
(type_usage_info_ != NULL && value == NULL));
ASSERT(isolate_group_ != nullptr);
ASSERT((type_usage_info_ == nullptr && value != nullptr) ||
(type_usage_info_ != nullptr && value == nullptr));
type_usage_info_ = value;
}