2016-05-17 19:19:06 +00:00
|
|
|
// Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file
|
|
|
|
// for details. All rights reserved. Use of this source code is governed by a
|
|
|
|
// BSD-style license that can be found in the LICENSE file.
|
|
|
|
|
|
|
|
#include "vm/object.h"
|
|
|
|
|
2020-05-07 19:40:18 +00:00
|
|
|
#include "platform/unaligned.h"
|
2019-06-12 21:56:53 +00:00
|
|
|
#include "vm/code_patcher.h"
|
2021-07-29 16:55:55 +00:00
|
|
|
#include "vm/dart_entry.h"
|
2016-07-15 17:52:20 +00:00
|
|
|
#include "vm/hash_table.h"
|
2016-05-17 19:19:06 +00:00
|
|
|
#include "vm/isolate_reload.h"
|
|
|
|
#include "vm/log.h"
|
2019-06-12 21:56:53 +00:00
|
|
|
#include "vm/object_store.h"
|
2016-05-17 19:19:06 +00:00
|
|
|
#include "vm/resolver.h"
|
2019-06-12 21:56:53 +00:00
|
|
|
#include "vm/stub_code.h"
|
2016-05-17 19:19:06 +00:00
|
|
|
#include "vm/symbols.h"
|
|
|
|
|
|
|
|
namespace dart {
|
|
|
|
|
2017-08-24 16:25:53 +00:00
|
|
|
#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
|
2016-05-17 19:19:06 +00:00
|
|
|
|
|
|
|
DECLARE_FLAG(bool, trace_reload);
|
2016-07-15 17:52:20 +00:00
|
|
|
DECLARE_FLAG(bool, trace_reload_verbose);
|
2016-05-17 19:19:06 +00:00
|
|
|
DECLARE_FLAG(bool, two_args_smi_icd);
|
|
|
|
|
2019-08-20 00:49:52 +00:00
|
|
|
void CallSiteResetter::ZeroEdgeCounters(const Function& function) {
|
|
|
|
ic_data_array_ = function.ic_data_array();
|
|
|
|
if (ic_data_array_.IsNull()) {
|
2016-05-17 19:19:06 +00:00
|
|
|
return;
|
|
|
|
}
|
2019-08-20 00:49:52 +00:00
|
|
|
ASSERT(ic_data_array_.Length() > 0);
|
2021-10-28 11:02:48 +00:00
|
|
|
edge_counters_ ^=
|
|
|
|
ic_data_array_.At(Function::ICDataArrayIndices::kEdgeCounters);
|
2019-08-20 00:49:52 +00:00
|
|
|
if (edge_counters_.IsNull()) {
|
2019-04-05 22:47:16 +00:00
|
|
|
return;
|
|
|
|
}
|
2016-05-17 19:19:06 +00:00
|
|
|
// Fill edge counters array with zeros.
|
2019-08-20 00:49:52 +00:00
|
|
|
for (intptr_t i = 0; i < edge_counters_.Length(); i++) {
|
|
|
|
edge_counters_.SetAt(i, Object::smi_zero());
|
2016-05-17 19:19:06 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-20 00:49:52 +00:00
|
|
|
CallSiteResetter::CallSiteResetter(Zone* zone)
|
|
|
|
: zone_(zone),
|
|
|
|
instrs_(Instructions::Handle(zone)),
|
|
|
|
pool_(ObjectPool::Handle(zone)),
|
|
|
|
object_(Object::Handle(zone)),
|
|
|
|
name_(String::Handle(zone)),
|
|
|
|
new_cls_(Class::Handle(zone)),
|
|
|
|
new_lib_(Library::Handle(zone)),
|
|
|
|
new_function_(Function::Handle(zone)),
|
|
|
|
new_field_(Field::Handle(zone)),
|
|
|
|
entries_(Array::Handle(zone)),
|
|
|
|
old_target_(Function::Handle(zone)),
|
|
|
|
new_target_(Function::Handle(zone)),
|
|
|
|
caller_(Function::Handle(zone)),
|
|
|
|
args_desc_array_(Array::Handle(zone)),
|
|
|
|
ic_data_array_(Array::Handle(zone)),
|
|
|
|
edge_counters_(Array::Handle(zone)),
|
|
|
|
descriptors_(PcDescriptors::Handle(zone)),
|
|
|
|
ic_data_(ICData::Handle(zone)) {}
|
|
|
|
|
2019-08-22 21:43:43 +00:00
|
|
|
void CallSiteResetter::ResetCaches(const Code& code) {
|
[vm] Allow optimized type testing stubs to be partial.
Previously, the code in the TypeCheck runtime entry assumed that if a
lazily specialized type testing stub (TTS) returned a false negative in
JIT mode, that it should always be regenerated and that regeneration
would always result in different code. In AOT mode, false negatives
instead always cause the stub to go to runtime, even if that false
negative had been seen before, because the assumption is that false
negatives shouldn't happen when the whole class hierarchy is known at
compile time.
However, even in the current implementation of optimized TTSes, there
are cases where this assumption is false. For example, the code
generated by BuildOptimizedSubclassRangeCheckWithTypeArguments allows
for provided type arguments to be type parameters. When this happens,
the type parameter is instantiated at runtime using the instantiator or
function type arguments, and the instantiated type parameter must be
identical to the result retrieved from the type arguments of the
instance. That means that if the instantiated type parameter is not the
same type, but a supertype, of the result, then a false negative is
generated.
This CL changes TypeCheck's handling of false negatives from lazily
specialized TTSes as follows: in JIT, if the regenerated stub is the
same as the old stub, or in AOT, a false negative causes the same fall
back to SubtypeTestCaches as unoptimized stubs. This way, further
checks with the same false negative will be caught via the STC before
going to runtime, assuming the STC hasn't already filled up with false
negatives.
Currently, we only generate false negatives for reasons that will not
change when respecialization occurs due to additions to the hierarchy,
so we do not need to clear affected STCs during respecialization.
However, the previous approach to resetting STCs on reload (in
CallSiteResetter::ResetCaches) is insufficient, since there may be
caches containing reloaded types in non-reloaded code (like the TTS
invoker stub created by the TTS testing framework). Instead, clear
all caches on reload using the same ObjectVisitor as deoptimizing type
testing stubs.
Since we now have to check instruction equality to determine whether
to add to the STC, we now only replace the existing stub if the
instructions are different. This makes it easier to test whether a
TTS invocation on a false positive caused respecialization or not.
This CL also reworks the testing framework for type testing stubs,
1) creating a test case object that stores the particulars of a given
invocation, including expectations, and 2) moving most checks and
access to appropriate data structures into a state object that handles
setup and performing invocations given test cases.
TEST=vm/cc/TTS_Partial
Cq-Include-Trybots: luci.dart.try:vm-kernel-linux-release-x64-try,vm-kernel-precomp-linux-release-x64-try,vm-kernel-precomp-nnbd-linux-release-x64-try,vm-kernel-nnbd-linux-release-x64-try,vm-kernel-tsan-linux-release-x64-try,vm-kernel-linux-product-x64-try,vm-kernel-precomp-linux-product-x64-try,vm-kernel-linux-release-simarm-try,vm-kernel-linux-release-simarm64-try
Change-Id: I139608c5a0f2442a85a1cf39d1c04104db7a5593
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/208653
Commit-Queue: Tess Strickland <sstrickl@google.com>
Reviewed-by: Martin Kustermann <kustermann@google.com>
Reviewed-by: Alexander Markov <alexmarkov@google.com>
2021-08-10 13:56:33 +00:00
|
|
|
// Iterate over the Code's object pool and reset all ICDatas.
|
|
|
|
// SubtypeTestCaches are reset during the same heap traversal as type
|
|
|
|
// testing stub deoptimization.
|
2016-06-13 20:23:57 +00:00
|
|
|
#ifdef TARGET_ARCH_IA32
|
|
|
|
// IA32 does not have an object pool, but, we can iterate over all
|
|
|
|
// embedded objects by using the variable length data section.
|
2019-08-20 00:49:52 +00:00
|
|
|
if (!code.is_alive()) {
|
2016-06-13 20:19:38 +00:00
|
|
|
return;
|
2016-05-17 19:19:06 +00:00
|
|
|
}
|
2019-08-20 00:49:52 +00:00
|
|
|
instrs_ = code.instructions();
|
|
|
|
ASSERT(!instrs_.IsNull());
|
|
|
|
uword base_address = instrs_.PayloadStart();
|
|
|
|
intptr_t offsets_length = code.pointer_offsets_length();
|
2021-01-15 23:32:02 +00:00
|
|
|
const int32_t* offsets = code.untag()->data();
|
2016-06-13 20:23:57 +00:00
|
|
|
for (intptr_t i = 0; i < offsets_length; i++) {
|
|
|
|
int32_t offset = offsets[i];
|
2020-04-25 05:21:27 +00:00
|
|
|
ObjectPtr* object_ptr = reinterpret_cast<ObjectPtr*>(base_address + offset);
|
2020-05-07 19:40:18 +00:00
|
|
|
ObjectPtr raw_object = LoadUnaligned(object_ptr);
|
2016-06-13 20:23:57 +00:00
|
|
|
if (!raw_object->IsHeapObject()) {
|
2016-06-13 20:09:48 +00:00
|
|
|
continue;
|
|
|
|
}
|
2019-08-20 00:49:52 +00:00
|
|
|
object_ = raw_object;
|
|
|
|
if (object_.IsICData()) {
|
|
|
|
Reset(ICData::Cast(object_));
|
2016-06-13 20:23:57 +00:00
|
|
|
}
|
2016-06-13 20:09:48 +00:00
|
|
|
}
|
2016-06-13 20:23:57 +00:00
|
|
|
#else
|
2019-08-20 00:49:52 +00:00
|
|
|
pool_ = code.object_pool();
|
|
|
|
ASSERT(!pool_.IsNull());
|
2019-08-22 21:43:43 +00:00
|
|
|
ResetCaches(pool_);
|
2018-11-14 02:48:03 +00:00
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2019-08-15 22:48:42 +00:00
|
|
|
static void FindICData(const Array& ic_data_array,
|
|
|
|
intptr_t deopt_id,
|
|
|
|
ICData* ic_data) {
|
|
|
|
// ic_data_array is sorted because of how it is constructed in
|
|
|
|
// Function::SaveICDataMap.
|
2021-10-28 11:02:48 +00:00
|
|
|
intptr_t lo = Function::ICDataArrayIndices::kFirstICData;
|
2019-08-15 22:48:42 +00:00
|
|
|
intptr_t hi = ic_data_array.Length() - 1;
|
|
|
|
while (lo <= hi) {
|
|
|
|
intptr_t mid = (hi - lo + 1) / 2 + lo;
|
|
|
|
ASSERT(mid >= lo);
|
|
|
|
ASSERT(mid <= hi);
|
|
|
|
*ic_data ^= ic_data_array.At(mid);
|
|
|
|
if (ic_data->deopt_id() == deopt_id) {
|
|
|
|
return;
|
|
|
|
} else if (ic_data->deopt_id() > deopt_id) {
|
|
|
|
hi = mid - 1;
|
|
|
|
} else {
|
|
|
|
lo = mid + 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
FATAL1("Missing deopt id %" Pd "\n", deopt_id);
|
|
|
|
}
|
|
|
|
|
2019-08-20 00:49:52 +00:00
|
|
|
void CallSiteResetter::ResetSwitchableCalls(const Code& code) {
|
|
|
|
if (code.is_optimized()) {
|
2019-06-12 21:56:53 +00:00
|
|
|
return; // No switchable calls in optimized code.
|
|
|
|
}
|
|
|
|
|
2019-08-20 00:49:52 +00:00
|
|
|
object_ = code.owner();
|
|
|
|
if (!object_.IsFunction()) {
|
2019-06-12 21:56:53 +00:00
|
|
|
return; // No switchable calls in stub code.
|
|
|
|
}
|
2019-08-20 00:49:52 +00:00
|
|
|
const Function& function = Function::Cast(object_);
|
2019-06-12 21:56:53 +00:00
|
|
|
|
2021-01-15 23:32:02 +00:00
|
|
|
if (function.kind() == UntaggedFunction::kIrregexpFunction) {
|
2019-06-26 21:54:56 +00:00
|
|
|
// Regex matchers do not support breakpoints or stepping, and they only call
|
|
|
|
// core library functions that cannot change due to reload. As a performance
|
|
|
|
// optimization, avoid this matching of ICData to PCs for these functions'
|
|
|
|
// large number of instance calls.
|
|
|
|
ASSERT(!function.is_debuggable());
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2019-08-20 00:49:52 +00:00
|
|
|
ic_data_array_ = function.ic_data_array();
|
|
|
|
if (ic_data_array_.IsNull()) {
|
2019-06-12 21:56:53 +00:00
|
|
|
// The megamorphic miss stub and some recognized function doesn't populate
|
|
|
|
// their ic_data_array. Check this only happens for functions without IC
|
|
|
|
// calls.
|
|
|
|
#if defined(DEBUG)
|
2019-08-20 00:49:52 +00:00
|
|
|
descriptors_ = code.pc_descriptors();
|
2021-01-15 23:32:02 +00:00
|
|
|
PcDescriptors::Iterator iter(descriptors_, UntaggedPcDescriptors::kIcCall);
|
2019-06-12 21:56:53 +00:00
|
|
|
while (iter.MoveNext()) {
|
2022-06-02 23:39:45 +00:00
|
|
|
FATAL1("%s has IC calls but no ic_data_array\n",
|
|
|
|
function.ToFullyQualifiedCString());
|
2019-06-12 21:56:53 +00:00
|
|
|
}
|
|
|
|
#endif
|
|
|
|
return;
|
|
|
|
}
|
2019-08-15 22:48:42 +00:00
|
|
|
|
2019-08-20 00:49:52 +00:00
|
|
|
descriptors_ = code.pc_descriptors();
|
2021-01-15 23:32:02 +00:00
|
|
|
PcDescriptors::Iterator iter(descriptors_, UntaggedPcDescriptors::kIcCall);
|
2019-08-15 22:48:42 +00:00
|
|
|
while (iter.MoveNext()) {
|
2019-08-20 00:49:52 +00:00
|
|
|
uword pc = code.PayloadStart() + iter.PcOffset();
|
|
|
|
CodePatcher::GetInstanceCallAt(pc, code, &object_);
|
2019-06-12 21:56:53 +00:00
|
|
|
// This check both avoids unnecessary patching to reduce log spam and
|
|
|
|
// prevents patching over breakpoint stubs.
|
2019-08-20 00:49:52 +00:00
|
|
|
if (!object_.IsICData()) {
|
|
|
|
FindICData(ic_data_array_, iter.DeoptId(), &ic_data_);
|
|
|
|
ASSERT(ic_data_.rebind_rule() == ICData::kInstance);
|
|
|
|
ASSERT(ic_data_.NumArgsTested() == 1);
|
2019-06-12 21:56:53 +00:00
|
|
|
const Code& stub =
|
2019-08-20 00:49:52 +00:00
|
|
|
ic_data_.is_tracking_exactness()
|
2019-06-12 21:56:53 +00:00
|
|
|
? StubCode::OneArgCheckInlineCacheWithExactnessCheck()
|
|
|
|
: StubCode::OneArgCheckInlineCache();
|
2019-08-20 00:49:52 +00:00
|
|
|
CodePatcher::PatchInstanceCallAt(pc, code, ic_data_, stub);
|
2019-06-12 21:56:53 +00:00
|
|
|
if (FLAG_trace_ic) {
|
|
|
|
OS::PrintErr("Instance call at %" Px
|
|
|
|
" resetting to polymorphic dispatch, %s\n",
|
2019-08-20 00:49:52 +00:00
|
|
|
pc, ic_data_.ToCString());
|
2019-06-12 21:56:53 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-22 21:43:43 +00:00
|
|
|
void CallSiteResetter::ResetCaches(const ObjectPool& pool) {
|
2019-08-20 00:49:52 +00:00
|
|
|
for (intptr_t i = 0; i < pool.Length(); i++) {
|
|
|
|
ObjectPool::EntryType entry_type = pool.TypeAt(i);
|
2019-01-25 16:45:13 +00:00
|
|
|
if (entry_type != ObjectPool::EntryType::kTaggedObject) {
|
2016-06-13 20:23:57 +00:00
|
|
|
continue;
|
|
|
|
}
|
2019-08-20 00:49:52 +00:00
|
|
|
object_ = pool.ObjectAt(i);
|
|
|
|
if (object_.IsICData()) {
|
|
|
|
Reset(ICData::Cast(object_));
|
2016-06-13 20:23:57 +00:00
|
|
|
}
|
|
|
|
}
|
2016-05-17 19:19:06 +00:00
|
|
|
}
|
|
|
|
|
2021-01-16 15:42:53 +00:00
|
|
|
void Class::CopyStaticFieldValues(ProgramReloadContext* reload_context,
|
2019-07-29 14:32:47 +00:00
|
|
|
const Class& old_cls) const {
|
2016-05-17 19:19:06 +00:00
|
|
|
const Array& old_field_list = Array::Handle(old_cls.fields());
|
|
|
|
Field& old_field = Field::Handle();
|
|
|
|
String& old_name = String::Handle();
|
|
|
|
|
|
|
|
const Array& field_list = Array::Handle(fields());
|
|
|
|
Field& field = Field::Handle();
|
|
|
|
String& name = String::Handle();
|
|
|
|
|
|
|
|
for (intptr_t i = 0; i < field_list.Length(); i++) {
|
|
|
|
field = Field::RawCast(field_list.At(i));
|
|
|
|
name = field.name();
|
2019-11-01 20:06:52 +00:00
|
|
|
// Find the corresponding old field, if it exists, and migrate
|
|
|
|
// over the field value.
|
|
|
|
for (intptr_t j = 0; j < old_field_list.Length(); j++) {
|
|
|
|
old_field = Field::RawCast(old_field_list.At(j));
|
|
|
|
old_name = old_field.name();
|
|
|
|
if (name.Equals(old_name)) {
|
|
|
|
if (field.is_static()) {
|
2016-11-03 20:01:48 +00:00
|
|
|
// We only copy values if requested and if the field is not a const
|
|
|
|
// field. We let const fields be updated with a reload.
|
2022-01-04 22:40:03 +00:00
|
|
|
if (!field.is_const()) {
|
2020-01-17 18:12:24 +00:00
|
|
|
// Make new field point to the old field value so that both
|
|
|
|
// old and new code see and update same value.
|
2021-02-12 05:48:15 +00:00
|
|
|
reload_context->isolate_group()->FreeStaticField(field);
|
2021-01-20 20:08:25 +00:00
|
|
|
field.set_field_id_unsafe(old_field.field_id());
|
2016-05-17 19:19:06 +00:00
|
|
|
}
|
|
|
|
reload_context->AddStaticFieldMapping(old_field, field);
|
2019-11-01 20:06:52 +00:00
|
|
|
} else {
|
|
|
|
if (old_field.needs_load_guard()) {
|
|
|
|
ASSERT(!old_field.is_unboxing_candidate());
|
|
|
|
field.set_needs_load_guard(true);
|
2020-12-17 00:22:13 +00:00
|
|
|
field.set_is_unboxing_candidate_unsafe(false);
|
2019-11-01 20:06:52 +00:00
|
|
|
}
|
2016-05-17 19:19:06 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void Class::CopyCanonicalConstants(const Class& old_cls) const {
|
|
|
|
if (is_enum_class()) {
|
2016-07-15 17:52:20 +00:00
|
|
|
// We do not copy enum classes's canonical constants because we explicitly
|
|
|
|
// become the old enum values to the new enum values.
|
2016-05-17 19:19:06 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
#if defined(DEBUG)
|
|
|
|
{
|
|
|
|
// Class has no canonical constants allocated.
|
|
|
|
const Array& my_constants = Array::Handle(constants());
|
2020-09-30 22:29:05 +00:00
|
|
|
ASSERT(my_constants.IsNull() || my_constants.Length() == 0);
|
2016-05-17 19:19:06 +00:00
|
|
|
}
|
|
|
|
#endif // defined(DEBUG).
|
|
|
|
// Copy old constants into new class.
|
|
|
|
const Array& old_constants = Array::Handle(old_cls.constants());
|
|
|
|
if (old_constants.IsNull() || old_constants.Length() == 0) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
TIR_Print("Copied %" Pd " canonical constants for class `%s`\n",
|
2016-11-08 21:54:47 +00:00
|
|
|
old_constants.Length(), ToCString());
|
2016-05-17 19:19:06 +00:00
|
|
|
set_constants(old_constants);
|
|
|
|
}
|
|
|
|
|
2018-12-19 18:46:39 +00:00
|
|
|
void Class::CopyDeclarationType(const Class& old_cls) const {
|
|
|
|
const Type& old_declaration_type = Type::Handle(old_cls.declaration_type());
|
|
|
|
if (old_declaration_type.IsNull()) {
|
2016-05-17 19:19:06 +00:00
|
|
|
return;
|
|
|
|
}
|
2018-12-19 18:46:39 +00:00
|
|
|
set_declaration_type(old_declaration_type);
|
2016-05-17 19:19:06 +00:00
|
|
|
}
|
|
|
|
|
2016-07-15 17:52:20 +00:00
|
|
|
class EnumMapTraits {
|
|
|
|
public:
|
|
|
|
static bool ReportStats() { return false; }
|
|
|
|
static const char* Name() { return "EnumMapTraits"; }
|
2016-05-17 19:19:06 +00:00
|
|
|
|
2016-07-15 17:52:20 +00:00
|
|
|
static bool IsMatch(const Object& a, const Object& b) {
|
2021-01-15 23:32:02 +00:00
|
|
|
return a.ptr() == b.ptr();
|
2016-07-15 17:52:20 +00:00
|
|
|
}
|
2016-05-17 19:19:06 +00:00
|
|
|
|
2016-07-15 17:52:20 +00:00
|
|
|
static uword Hash(const Object& obj) {
|
|
|
|
ASSERT(obj.IsString());
|
|
|
|
return String::Cast(obj).Hash();
|
|
|
|
}
|
|
|
|
};
|
2016-05-17 19:19:06 +00:00
|
|
|
|
2016-07-15 17:52:20 +00:00
|
|
|
// Given an old enum class, add become mappings from old values to new values.
|
|
|
|
// Some notes about how we reload enums below:
|
|
|
|
//
|
|
|
|
// When an enum is reloaded the following three things can happen, possibly
|
|
|
|
// simultaneously.
|
|
|
|
//
|
|
|
|
// 1) A new enum value is added.
|
|
|
|
// This case is handled automatically.
|
|
|
|
// 2) Enum values are reordered.
|
|
|
|
// We pair old and new enums and the old enums 'become' the new ones so
|
2017-01-24 23:07:01 +00:00
|
|
|
// the ordering is always correct (i.e. enum indices match slots in values
|
2016-07-15 17:52:20 +00:00
|
|
|
// array)
|
|
|
|
// 3) An existing enum value is removed.
|
2016-11-15 22:15:36 +00:00
|
|
|
// Each enum class has a canonical 'deleted' enum sentinel instance.
|
|
|
|
// When an enum value is deleted, we 'become' all references to the 'deleted'
|
|
|
|
// sentinel value. The index value is -1.
|
2016-07-15 17:52:20 +00:00
|
|
|
//
|
2021-01-16 15:42:53 +00:00
|
|
|
void Class::ReplaceEnum(ProgramReloadContext* reload_context,
|
2019-07-29 14:32:47 +00:00
|
|
|
const Class& old_enum) const {
|
2016-05-17 19:19:06 +00:00
|
|
|
// We only do this for finalized enum classes.
|
|
|
|
ASSERT(is_enum_class());
|
|
|
|
ASSERT(old_enum.is_enum_class());
|
|
|
|
ASSERT(is_finalized());
|
|
|
|
ASSERT(old_enum.is_finalized());
|
|
|
|
|
2022-01-18 21:48:56 +00:00
|
|
|
Thread* thread = Thread::Current();
|
|
|
|
Zone* zone = thread->zone();
|
|
|
|
ObjectStore* object_store = thread->isolate_group()->object_store();
|
2016-05-17 19:19:06 +00:00
|
|
|
|
2016-07-15 17:52:20 +00:00
|
|
|
Field& field = Field::Handle(zone);
|
|
|
|
String& enum_ident = String::Handle();
|
|
|
|
Instance& old_enum_value = Instance::Handle(zone);
|
|
|
|
Instance& enum_value = Instance::Handle(zone);
|
2016-08-03 17:46:40 +00:00
|
|
|
// The E.values array.
|
2022-01-04 22:40:03 +00:00
|
|
|
Array& old_enum_values = Array::Handle(zone);
|
2016-08-03 17:46:40 +00:00
|
|
|
// The E.values array.
|
2022-01-04 22:40:03 +00:00
|
|
|
Array& enum_values = Array::Handle(zone);
|
2016-11-15 22:15:36 +00:00
|
|
|
// The E._deleted_enum_sentinel instance.
|
|
|
|
Instance& old_deleted_enum_sentinel = Instance::Handle(zone);
|
|
|
|
// The E._deleted_enum_sentinel instance.
|
|
|
|
Instance& deleted_enum_sentinel = Instance::Handle(zone);
|
2016-11-08 21:54:47 +00:00
|
|
|
Array& enum_map_storage =
|
|
|
|
Array::Handle(zone, HashTables::New<UnorderedHashMap<EnumMapTraits> >(4));
|
2016-07-15 17:52:20 +00:00
|
|
|
ASSERT(!enum_map_storage.IsNull());
|
|
|
|
|
|
|
|
TIR_Print("Replacing enum `%s`\n", String::Handle(Name()).ToCString());
|
|
|
|
|
|
|
|
{
|
2022-01-04 22:40:03 +00:00
|
|
|
field = old_enum.LookupStaticField(Symbols::Values());
|
2022-01-07 22:13:36 +00:00
|
|
|
if (!field.IsNull()) {
|
|
|
|
ASSERT(field.is_static() && field.is_const());
|
|
|
|
old_enum_values ^= field.StaticConstFieldValue();
|
|
|
|
ASSERT(!old_enum_values.IsNull());
|
|
|
|
} else {
|
|
|
|
old_enum_values = Array::empty_array().ptr();
|
|
|
|
}
|
2022-01-04 22:40:03 +00:00
|
|
|
|
|
|
|
field = old_enum.LookupStaticField(Symbols::_DeletedEnumSentinel());
|
|
|
|
ASSERT(!field.IsNull() && field.is_static() && field.is_const());
|
|
|
|
old_deleted_enum_sentinel ^= field.StaticConstFieldValue();
|
|
|
|
ASSERT(!old_deleted_enum_sentinel.IsNull());
|
|
|
|
|
2022-01-18 21:48:56 +00:00
|
|
|
field = object_store->enum_name_field();
|
2022-01-04 22:40:03 +00:00
|
|
|
ASSERT(!field.IsNull());
|
|
|
|
|
2021-01-15 23:32:02 +00:00
|
|
|
UnorderedHashMap<EnumMapTraits> enum_map(enum_map_storage.ptr());
|
2016-07-15 17:52:20 +00:00
|
|
|
// Build a map of all enum name -> old enum instance.
|
2022-01-04 22:40:03 +00:00
|
|
|
for (intptr_t i = 0, n = old_enum_values.Length(); i < n; ++i) {
|
|
|
|
old_enum_value ^= old_enum_values.At(i);
|
2016-07-15 17:52:20 +00:00
|
|
|
ASSERT(!old_enum_value.IsNull());
|
2022-01-04 22:40:03 +00:00
|
|
|
enum_ident ^= old_enum_value.GetField(field);
|
2016-07-15 17:52:20 +00:00
|
|
|
VTIR_Print("Element %s being added to mapping\n", enum_ident.ToCString());
|
|
|
|
bool update = enum_map.UpdateOrInsert(enum_ident, old_enum_value);
|
|
|
|
VTIR_Print("Element %s added to mapping\n", enum_ident.ToCString());
|
|
|
|
ASSERT(!update);
|
2016-05-17 19:19:06 +00:00
|
|
|
}
|
2016-07-15 17:52:20 +00:00
|
|
|
// The storage given to the map may have been reallocated, remember the new
|
|
|
|
// address.
|
2021-01-15 23:32:02 +00:00
|
|
|
enum_map_storage = enum_map.Release().ptr();
|
2016-05-17 19:19:06 +00:00
|
|
|
}
|
|
|
|
|
2016-07-15 17:52:20 +00:00
|
|
|
bool enums_deleted = false;
|
|
|
|
{
|
2022-01-04 22:40:03 +00:00
|
|
|
field = LookupStaticField(Symbols::Values());
|
2022-01-07 22:13:36 +00:00
|
|
|
if (!field.IsNull()) {
|
|
|
|
ASSERT(field.is_static() && field.is_const());
|
|
|
|
enum_values ^= field.StaticConstFieldValue();
|
|
|
|
ASSERT(!enum_values.IsNull());
|
|
|
|
} else {
|
|
|
|
enum_values = Array::empty_array().ptr();
|
|
|
|
}
|
2022-01-04 22:40:03 +00:00
|
|
|
|
|
|
|
field = LookupStaticField(Symbols::_DeletedEnumSentinel());
|
|
|
|
ASSERT(!field.IsNull() && field.is_static() && field.is_const());
|
|
|
|
deleted_enum_sentinel ^= field.StaticConstFieldValue();
|
|
|
|
ASSERT(!deleted_enum_sentinel.IsNull());
|
|
|
|
|
2022-01-18 21:48:56 +00:00
|
|
|
field = object_store->enum_name_field();
|
2022-01-04 22:40:03 +00:00
|
|
|
ASSERT(!field.IsNull());
|
|
|
|
|
2021-01-15 23:32:02 +00:00
|
|
|
UnorderedHashMap<EnumMapTraits> enum_map(enum_map_storage.ptr());
|
2016-07-15 17:52:20 +00:00
|
|
|
// Add a become mapping from the old instances to the new instances.
|
2022-01-04 22:40:03 +00:00
|
|
|
for (intptr_t i = 0, n = enum_values.Length(); i < n; ++i) {
|
|
|
|
enum_value ^= enum_values.At(i);
|
2016-07-15 17:52:20 +00:00
|
|
|
ASSERT(!enum_value.IsNull());
|
2022-01-04 22:40:03 +00:00
|
|
|
enum_ident ^= enum_value.GetField(field);
|
|
|
|
|
2016-07-15 17:52:20 +00:00
|
|
|
old_enum_value ^= enum_map.GetOrNull(enum_ident);
|
|
|
|
if (old_enum_value.IsNull()) {
|
|
|
|
VTIR_Print("New element %s was not found in mapping\n",
|
|
|
|
enum_ident.ToCString());
|
|
|
|
} else {
|
|
|
|
VTIR_Print("Adding element `%s` to become mapping\n",
|
|
|
|
enum_ident.ToCString());
|
|
|
|
bool removed = enum_map.Remove(enum_ident);
|
|
|
|
ASSERT(removed);
|
2021-09-28 18:30:52 +00:00
|
|
|
reload_context->AddBecomeMapping(old_enum_value, enum_value);
|
2016-07-15 17:52:20 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
enums_deleted = enum_map.NumOccupied() > 0;
|
|
|
|
// The storage given to the map may have been reallocated, remember the new
|
|
|
|
// address.
|
2021-01-15 23:32:02 +00:00
|
|
|
enum_map_storage = enum_map.Release().ptr();
|
2016-05-17 19:19:06 +00:00
|
|
|
}
|
|
|
|
|
2016-08-03 17:46:40 +00:00
|
|
|
// Map the old E.values array to the new E.values array.
|
2021-09-28 18:30:52 +00:00
|
|
|
reload_context->AddBecomeMapping(old_enum_values, enum_values);
|
2016-08-03 17:46:40 +00:00
|
|
|
|
2016-11-15 22:15:36 +00:00
|
|
|
// Map the old E._deleted_enum_sentinel to the new E._deleted_enum_sentinel.
|
2021-09-28 18:30:52 +00:00
|
|
|
reload_context->AddBecomeMapping(old_deleted_enum_sentinel,
|
|
|
|
deleted_enum_sentinel);
|
2016-11-15 22:15:36 +00:00
|
|
|
|
|
|
|
if (enums_deleted) {
|
2017-01-24 23:07:01 +00:00
|
|
|
// Map all deleted enums to the deleted enum sentinel value.
|
2016-07-15 17:52:20 +00:00
|
|
|
// TODO(johnmccutchan): Add this to the reload 'notices' list.
|
2016-11-08 21:54:47 +00:00
|
|
|
VTIR_Print(
|
2016-11-15 22:15:36 +00:00
|
|
|
"The following enum values were deleted from %s and will become the "
|
|
|
|
"deleted enum sentinel:\n",
|
|
|
|
old_enum.ToCString());
|
2021-01-15 23:32:02 +00:00
|
|
|
UnorderedHashMap<EnumMapTraits> enum_map(enum_map_storage.ptr());
|
2016-07-15 17:52:20 +00:00
|
|
|
UnorderedHashMap<EnumMapTraits>::Iterator it(&enum_map);
|
|
|
|
while (it.MoveNext()) {
|
|
|
|
const intptr_t entry = it.Current();
|
|
|
|
enum_ident = String::RawCast(enum_map.GetKey(entry));
|
|
|
|
ASSERT(!enum_ident.IsNull());
|
2016-11-15 22:15:36 +00:00
|
|
|
old_enum_value ^= enum_map.GetOrNull(enum_ident);
|
|
|
|
VTIR_Print("Element `%s` was deleted\n", enum_ident.ToCString());
|
2021-09-28 18:30:52 +00:00
|
|
|
reload_context->AddBecomeMapping(old_enum_value, deleted_enum_sentinel);
|
2016-07-15 17:52:20 +00:00
|
|
|
}
|
|
|
|
enum_map.Release();
|
2016-05-17 19:19:06 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void Class::PatchFieldsAndFunctions() const {
|
|
|
|
// Move all old functions and fields to a patch class so that they
|
|
|
|
// still refer to their original script.
|
|
|
|
const PatchClass& patch =
|
|
|
|
PatchClass::Handle(PatchClass::New(*this, Script::Handle(script())));
|
|
|
|
ASSERT(!patch.IsNull());
|
2017-10-09 15:03:15 +00:00
|
|
|
const Library& lib = Library::Handle(library());
|
2020-10-28 17:42:35 +00:00
|
|
|
patch.set_library_kernel_data(ExternalTypedData::Handle(lib.kernel_data()));
|
|
|
|
patch.set_library_kernel_offset(lib.kernel_offset());
|
2016-05-17 19:19:06 +00:00
|
|
|
|
2020-10-27 23:51:57 +00:00
|
|
|
const Array& funcs = Array::Handle(current_functions());
|
2016-05-17 19:19:06 +00:00
|
|
|
Function& func = Function::Handle();
|
|
|
|
Object& owner = Object::Handle();
|
|
|
|
for (intptr_t i = 0; i < funcs.Length(); i++) {
|
|
|
|
func = Function::RawCast(funcs.At(i));
|
|
|
|
if ((func.token_pos() == TokenPosition::kMinSource) ||
|
|
|
|
func.IsClosureFunction()) {
|
|
|
|
// Eval functions do not need to have their script updated.
|
|
|
|
//
|
|
|
|
// Closure functions refer to the parent's script which we can
|
|
|
|
// rely on being updated for us, if necessary.
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
// If the source for this function is already patched, leave it alone.
|
|
|
|
owner = func.RawOwner();
|
|
|
|
ASSERT(!owner.IsNull());
|
|
|
|
if (!owner.IsPatchClass()) {
|
2021-01-15 23:32:02 +00:00
|
|
|
ASSERT(owner.ptr() == this->ptr());
|
2016-05-17 19:19:06 +00:00
|
|
|
func.set_owner(patch);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-11-16 18:54:16 +00:00
|
|
|
Thread* thread = Thread::Current();
|
|
|
|
SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
|
2016-05-17 19:19:06 +00:00
|
|
|
const Array& field_list = Array::Handle(fields());
|
|
|
|
Field& field = Field::Handle();
|
|
|
|
for (intptr_t i = 0; i < field_list.Length(); i++) {
|
|
|
|
field = Field::RawCast(field_list.At(i));
|
|
|
|
owner = field.RawOwner();
|
|
|
|
ASSERT(!owner.IsNull());
|
|
|
|
if (!owner.IsPatchClass()) {
|
2021-01-15 23:32:02 +00:00
|
|
|
ASSERT(owner.ptr() == this->ptr());
|
2016-05-17 19:19:06 +00:00
|
|
|
field.set_owner(patch);
|
|
|
|
}
|
|
|
|
field.ForceDynamicGuardedCidAndLength();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-01-16 15:42:53 +00:00
|
|
|
void Class::MigrateImplicitStaticClosures(ProgramReloadContext* irc,
|
2016-08-02 00:44:23 +00:00
|
|
|
const Class& new_cls) const {
|
2020-10-27 23:51:57 +00:00
|
|
|
const Array& funcs = Array::Handle(current_functions());
|
2020-10-16 19:22:52 +00:00
|
|
|
Thread* thread = Thread::Current();
|
2016-08-02 00:44:23 +00:00
|
|
|
Function& old_func = Function::Handle();
|
|
|
|
String& selector = String::Handle();
|
|
|
|
Function& new_func = Function::Handle();
|
[vm/compiler] Cache entry point in closure in bare instructions mode.
This avoids the extra redirection through the closure function, which
does not need to be loaded otherwise during closure calls in this mode,
and thus removes another runtime dependency on the closure function in
bare instructions mode.
In non-bare mode, CODE_REG is populated with the code object for the
function, so caching wouldn't change the number of loads there.
This does not increase the size of closure objects, as there was
already a free word in them due to object alignment.
TEST=Existing tests.
Cq-Include-Trybots: luci.dart.try:vm-kernel-precomp-linux-debug-simarm64c-try,vm-kernel-precomp-linux-debug-simarm_x64-try,vm-kernel-precomp-linux-debug-x64-try,vm-kernel-precomp-linux-debug-x64c-try,vm-kernel-precomp-linux-product-x64-try,vm-kernel-precomp-linux-release-x64-try,vm-kernel-precomp-linux-release-simarm64-try,vm-kernel-precomp-linux-release-simarm-try,vm-kernel-linux-debug-ia32-try,vm-kernel-linux-debug-x64-try,vm-kernel-linux-product-x64-try,vm-kernel-linux-release-simarm64-try,vm-kernel-linux-release-simarm-try,vm-kernel-linux-release-x64-try,vm-kernel-linux-debug-x64c-try
Change-Id: Ida6e0d277919259a8c0e8dcbfaa101379fd22ff1
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/195920
Reviewed-by: Alexander Markov <alexmarkov@google.com>
Reviewed-by: Martin Kustermann <kustermann@google.com>
Commit-Queue: Tess Strickland <sstrickl@google.com>
2021-05-20 09:05:40 +00:00
|
|
|
Closure& old_closure = Closure::Handle();
|
|
|
|
Closure& new_closure = Closure::Handle();
|
2016-08-02 00:44:23 +00:00
|
|
|
for (intptr_t i = 0; i < funcs.Length(); i++) {
|
|
|
|
old_func ^= funcs.At(i);
|
2016-11-08 21:54:47 +00:00
|
|
|
if (old_func.is_static() && old_func.HasImplicitClosureFunction()) {
|
2016-08-02 00:44:23 +00:00
|
|
|
selector = old_func.name();
|
2020-10-16 19:22:52 +00:00
|
|
|
new_func = Resolver::ResolveFunction(thread->zone(), new_cls, selector);
|
2016-08-02 00:44:23 +00:00
|
|
|
if (!new_func.IsNull() && new_func.is_static()) {
|
|
|
|
old_func = old_func.ImplicitClosureFunction();
|
|
|
|
old_closure = old_func.ImplicitStaticClosure();
|
|
|
|
new_func = new_func.ImplicitClosureFunction();
|
|
|
|
new_closure = new_func.ImplicitStaticClosure();
|
2016-09-23 20:07:16 +00:00
|
|
|
if (old_closure.IsCanonical()) {
|
|
|
|
new_closure.SetCanonical();
|
|
|
|
}
|
2016-08-02 00:44:23 +00:00
|
|
|
irc->AddBecomeMapping(old_closure, new_closure);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-07-26 18:13:28 +00:00
|
|
|
class EnumClassConflict : public ClassReasonForCancelling {
|
|
|
|
public:
|
2016-08-02 16:36:46 +00:00
|
|
|
EnumClassConflict(Zone* zone, const Class& from, const Class& to)
|
2016-11-08 21:54:47 +00:00
|
|
|
: ClassReasonForCancelling(zone, from, to) {}
|
2016-07-26 18:13:28 +00:00
|
|
|
|
2020-04-25 05:21:27 +00:00
|
|
|
StringPtr ToString() {
|
2016-07-26 18:13:28 +00:00
|
|
|
return String::NewFormatted(
|
|
|
|
from_.is_enum_class()
|
2016-11-08 21:54:47 +00:00
|
|
|
? "Enum class cannot be redefined to be a non-enum class: %s"
|
|
|
|
: "Class cannot be redefined to be a enum class: %s",
|
2016-07-26 18:13:28 +00:00
|
|
|
from_.ToCString());
|
|
|
|
}
|
|
|
|
};
|
2016-05-17 19:19:06 +00:00
|
|
|
|
2016-07-26 18:13:28 +00:00
|
|
|
class EnsureFinalizedError : public ClassReasonForCancelling {
|
|
|
|
public:
|
2016-08-02 16:36:46 +00:00
|
|
|
EnsureFinalizedError(Zone* zone,
|
|
|
|
const Class& from,
|
|
|
|
const Class& to,
|
|
|
|
const Error& error)
|
2016-11-08 21:54:47 +00:00
|
|
|
: ClassReasonForCancelling(zone, from, to), error_(error) {}
|
2016-07-26 18:13:28 +00:00
|
|
|
|
|
|
|
private:
|
|
|
|
const Error& error_;
|
|
|
|
|
2021-01-15 23:32:02 +00:00
|
|
|
ErrorPtr ToError() { return error_.ptr(); }
|
2016-08-01 19:30:39 +00:00
|
|
|
|
2020-04-25 05:21:27 +00:00
|
|
|
StringPtr ToString() { return String::New(error_.ToErrorCString()); }
|
2016-07-26 18:13:28 +00:00
|
|
|
};
|
|
|
|
|
2020-01-31 16:42:42 +00:00
|
|
|
class ConstToNonConstClass : public ClassReasonForCancelling {
|
|
|
|
public:
|
|
|
|
ConstToNonConstClass(Zone* zone, const Class& from, const Class& to)
|
|
|
|
: ClassReasonForCancelling(zone, from, to) {}
|
|
|
|
|
|
|
|
private:
|
2020-04-25 05:21:27 +00:00
|
|
|
StringPtr ToString() {
|
2020-01-31 16:42:42 +00:00
|
|
|
return String::NewFormatted("Const class cannot become non-const: %s",
|
|
|
|
from_.ToCString());
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2020-02-05 18:42:53 +00:00
|
|
|
class ConstClassFieldRemoved : public ClassReasonForCancelling {
|
|
|
|
public:
|
|
|
|
ConstClassFieldRemoved(Zone* zone, const Class& from, const Class& to)
|
|
|
|
: ClassReasonForCancelling(zone, from, to) {}
|
|
|
|
|
|
|
|
private:
|
2020-04-25 05:21:27 +00:00
|
|
|
StringPtr ToString() {
|
2020-02-05 18:42:53 +00:00
|
|
|
return String::NewFormatted("Const class cannot remove fields: %s",
|
|
|
|
from_.ToCString());
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2016-07-26 18:13:28 +00:00
|
|
|
class NativeFieldsConflict : public ClassReasonForCancelling {
|
|
|
|
public:
|
2016-08-02 16:36:46 +00:00
|
|
|
NativeFieldsConflict(Zone* zone, const Class& from, const Class& to)
|
2016-11-08 21:54:47 +00:00
|
|
|
: ClassReasonForCancelling(zone, from, to) {}
|
2016-07-26 18:13:28 +00:00
|
|
|
|
|
|
|
private:
|
2020-04-25 05:21:27 +00:00
|
|
|
StringPtr ToString() {
|
2016-11-08 21:54:47 +00:00
|
|
|
return String::NewFormatted("Number of native fields changed in %s",
|
|
|
|
from_.ToCString());
|
2016-05-17 19:19:06 +00:00
|
|
|
}
|
2016-07-26 18:13:28 +00:00
|
|
|
};
|
2016-05-17 19:19:06 +00:00
|
|
|
|
2016-07-26 18:13:28 +00:00
|
|
|
class TypeParametersChanged : public ClassReasonForCancelling {
|
|
|
|
public:
|
2016-08-02 16:36:46 +00:00
|
|
|
TypeParametersChanged(Zone* zone, const Class& from, const Class& to)
|
|
|
|
: ClassReasonForCancelling(zone, from, to) {}
|
2016-07-26 18:13:28 +00:00
|
|
|
|
2020-04-25 05:21:27 +00:00
|
|
|
StringPtr ToString() {
|
2016-07-26 18:13:28 +00:00
|
|
|
return String::NewFormatted(
|
|
|
|
"Limitation: type parameters have changed for %s", from_.ToCString());
|
|
|
|
}
|
2016-07-28 18:07:15 +00:00
|
|
|
|
|
|
|
void AppendTo(JSONArray* array) {
|
|
|
|
JSONObject jsobj(array);
|
|
|
|
jsobj.AddProperty("type", "ReasonForCancellingReload");
|
|
|
|
jsobj.AddProperty("kind", "TypeParametersChanged");
|
|
|
|
jsobj.AddProperty("class", to_);
|
|
|
|
jsobj.AddProperty("message",
|
|
|
|
"Limitation: changing type parameters "
|
|
|
|
"does not work with hot reload.");
|
|
|
|
}
|
2016-07-26 18:13:28 +00:00
|
|
|
};
|
|
|
|
|
2016-11-08 21:54:47 +00:00
|
|
|
class PreFinalizedConflict : public ClassReasonForCancelling {
|
2016-07-26 18:13:28 +00:00
|
|
|
public:
|
2016-08-02 16:36:46 +00:00
|
|
|
PreFinalizedConflict(Zone* zone, const Class& from, const Class& to)
|
|
|
|
: ClassReasonForCancelling(zone, from, to) {}
|
2016-07-26 18:13:28 +00:00
|
|
|
|
|
|
|
private:
|
2020-04-25 05:21:27 +00:00
|
|
|
StringPtr ToString() {
|
2016-07-26 18:13:28 +00:00
|
|
|
return String::NewFormatted(
|
|
|
|
"Original class ('%s') is prefinalized and replacement class "
|
|
|
|
"('%s') is not ",
|
|
|
|
from_.ToCString(), to_.ToCString());
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2016-11-08 21:54:47 +00:00
|
|
|
class InstanceSizeConflict : public ClassReasonForCancelling {
|
2016-07-26 18:13:28 +00:00
|
|
|
public:
|
2016-08-02 16:36:46 +00:00
|
|
|
InstanceSizeConflict(Zone* zone, const Class& from, const Class& to)
|
|
|
|
: ClassReasonForCancelling(zone, from, to) {}
|
2016-07-26 18:13:28 +00:00
|
|
|
|
|
|
|
private:
|
2020-04-25 05:21:27 +00:00
|
|
|
StringPtr ToString() {
|
2016-11-08 21:54:47 +00:00
|
|
|
return String::NewFormatted("Instance size mismatch between '%s' (%" Pd
|
|
|
|
") and replacement "
|
|
|
|
"'%s' ( %" Pd ")",
|
2020-01-31 12:56:31 +00:00
|
|
|
from_.ToCString(), from_.host_instance_size(),
|
|
|
|
to_.ToCString(), to_.host_instance_size());
|
2016-07-26 18:13:28 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2016-11-17 19:29:12 +00:00
|
|
|
// This is executed before iterating over the instances.
|
2016-07-26 18:13:28 +00:00
|
|
|
void Class::CheckReload(const Class& replacement,
|
2021-01-16 15:42:53 +00:00
|
|
|
ProgramReloadContext* context) const {
|
|
|
|
ASSERT(ProgramReloadContext::IsSameClass(*this, replacement));
|
2016-07-26 18:13:28 +00:00
|
|
|
|
2019-09-05 16:51:39 +00:00
|
|
|
if (!is_declaration_loaded()) {
|
2020-12-17 00:22:13 +00:00
|
|
|
// The old class hasn't been used in any meaningful way, so the VM is okay
|
2019-09-05 16:51:39 +00:00
|
|
|
// with any change.
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2019-08-19 20:43:02 +00:00
|
|
|
// Ensure is_enum_class etc have been set.
|
|
|
|
replacement.EnsureDeclarationLoaded();
|
|
|
|
|
2016-07-26 18:13:28 +00:00
|
|
|
// Class cannot change enum property.
|
|
|
|
if (is_enum_class() != replacement.is_enum_class()) {
|
2019-11-15 14:08:45 +00:00
|
|
|
context->group_reload_context()->AddReasonForCancelling(
|
|
|
|
new (context->zone())
|
|
|
|
EnumClassConflict(context->zone(), *this, replacement));
|
2016-07-26 18:13:28 +00:00
|
|
|
return;
|
2016-05-17 19:19:06 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if (is_finalized()) {
|
2021-04-19 19:57:08 +00:00
|
|
|
// Make sure the declaration types parameter count matches for the two
|
|
|
|
// classes.
|
|
|
|
// ex. class A<int,B> {} cannot be replace with class A<B> {}.
|
|
|
|
auto group_context = context->group_reload_context();
|
|
|
|
if (NumTypeParameters() != replacement.NumTypeParameters()) {
|
|
|
|
group_context->AddReasonForCancelling(
|
|
|
|
new (context->zone())
|
|
|
|
TypeParametersChanged(context->zone(), *this, replacement));
|
|
|
|
return;
|
|
|
|
}
|
2022-04-26 11:45:48 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if (is_finalized() || is_allocate_finalized()) {
|
|
|
|
auto thread = Thread::Current();
|
2021-04-19 19:57:08 +00:00
|
|
|
|
2016-07-26 18:13:28 +00:00
|
|
|
// Ensure the replacement class is also finalized.
|
2022-04-26 11:45:48 +00:00
|
|
|
const Error& error = Error::Handle(
|
|
|
|
is_allocate_finalized() ? replacement.EnsureIsAllocateFinalized(thread)
|
|
|
|
: replacement.EnsureIsFinalized(thread));
|
2016-05-17 19:19:06 +00:00
|
|
|
if (!error.IsNull()) {
|
2019-11-15 14:08:45 +00:00
|
|
|
context->group_reload_context()->AddReasonForCancelling(
|
2016-11-08 21:54:47 +00:00
|
|
|
new (context->zone())
|
2016-08-02 16:36:46 +00:00
|
|
|
EnsureFinalizedError(context->zone(), *this, replacement, error));
|
2016-07-26 18:13:28 +00:00
|
|
|
return; // No reason to check other properties.
|
2016-05-17 19:19:06 +00:00
|
|
|
}
|
2016-08-02 17:58:16 +00:00
|
|
|
ASSERT(replacement.is_finalized());
|
2016-05-17 19:19:06 +00:00
|
|
|
TIR_Print("Finalized replacement class for %s\n", ToCString());
|
|
|
|
}
|
|
|
|
|
2020-02-05 18:42:53 +00:00
|
|
|
if (is_finalized() && is_const() && (constants() != Array::null()) &&
|
|
|
|
(Array::LengthOf(constants()) > 0)) {
|
|
|
|
// Consts can't become non-consts.
|
|
|
|
if (!replacement.is_const()) {
|
2020-01-31 16:42:42 +00:00
|
|
|
context->group_reload_context()->AddReasonForCancelling(
|
|
|
|
new (context->zone())
|
|
|
|
ConstToNonConstClass(context->zone(), *this, replacement));
|
2020-02-05 18:42:53 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Consts can't lose fields.
|
|
|
|
bool field_removed = false;
|
|
|
|
const Array& old_fields =
|
|
|
|
Array::Handle(OffsetToFieldMap(true /* original classes */));
|
|
|
|
const Array& new_fields = Array::Handle(replacement.OffsetToFieldMap());
|
|
|
|
if (new_fields.Length() < old_fields.Length()) {
|
|
|
|
field_removed = true;
|
|
|
|
} else {
|
|
|
|
Field& old_field = Field::Handle();
|
|
|
|
Field& new_field = Field::Handle();
|
|
|
|
String& old_name = String::Handle();
|
|
|
|
String& new_name = String::Handle();
|
|
|
|
for (intptr_t i = 0, n = old_fields.Length(); i < n; i++) {
|
|
|
|
old_field ^= old_fields.At(i);
|
|
|
|
new_field ^= new_fields.At(i);
|
|
|
|
if (old_field.IsNull() != new_field.IsNull()) {
|
|
|
|
field_removed = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
if (!old_field.IsNull()) {
|
|
|
|
old_name = old_field.name();
|
|
|
|
new_name = new_field.name();
|
|
|
|
if (!old_name.Equals(new_name)) {
|
|
|
|
field_removed = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (field_removed) {
|
|
|
|
context->group_reload_context()->AddReasonForCancelling(
|
|
|
|
new (context->zone())
|
|
|
|
ConstClassFieldRemoved(context->zone(), *this, replacement));
|
|
|
|
return;
|
2020-01-31 16:42:42 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-07-26 18:13:28 +00:00
|
|
|
// Native field count cannot change.
|
|
|
|
if (num_native_fields() != replacement.num_native_fields()) {
|
2019-11-15 14:08:45 +00:00
|
|
|
context->group_reload_context()->AddReasonForCancelling(
|
|
|
|
new (context->zone())
|
|
|
|
NativeFieldsConflict(context->zone(), *this, replacement));
|
2016-11-08 21:54:47 +00:00
|
|
|
return;
|
2016-07-26 18:13:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Just checking.
|
|
|
|
ASSERT(is_enum_class() == replacement.is_enum_class());
|
|
|
|
ASSERT(num_native_fields() == replacement.num_native_fields());
|
2016-06-08 18:00:31 +00:00
|
|
|
|
2016-05-17 19:19:06 +00:00
|
|
|
if (is_finalized()) {
|
2016-07-26 18:13:28 +00:00
|
|
|
if (!CanReloadFinalized(replacement, context)) return;
|
|
|
|
}
|
|
|
|
if (is_prefinalized()) {
|
|
|
|
if (!CanReloadPreFinalized(replacement, context)) return;
|
|
|
|
}
|
2016-11-08 21:54:47 +00:00
|
|
|
TIR_Print("Class `%s` can be reloaded (%" Pd " and %" Pd ")\n", ToCString(),
|
|
|
|
id(), replacement.id());
|
2016-07-26 18:13:28 +00:00
|
|
|
}
|
2016-05-17 19:19:06 +00:00
|
|
|
|
2016-07-26 18:13:28 +00:00
|
|
|
bool Class::RequiresInstanceMorphing(const Class& replacement) const {
|
|
|
|
// Get the field maps for both classes. These field maps walk the class
|
|
|
|
// hierarchy.
|
2016-11-14 21:33:19 +00:00
|
|
|
const Array& fields =
|
|
|
|
Array::Handle(OffsetToFieldMap(true /* original classes */));
|
2016-11-08 21:54:47 +00:00
|
|
|
const Array& replacement_fields =
|
|
|
|
Array::Handle(replacement.OffsetToFieldMap());
|
2016-07-26 18:13:28 +00:00
|
|
|
|
|
|
|
// Check that the size of the instance is the same.
|
|
|
|
if (fields.Length() != replacement_fields.Length()) return true;
|
|
|
|
|
|
|
|
// Check that we have the same next field offset. This check is not
|
|
|
|
// redundant with the one above because the instance OffsetToFieldMap
|
|
|
|
// array length is based on the instance size (which may be aligned up).
|
2020-01-31 12:56:31 +00:00
|
|
|
if (host_next_field_offset() != replacement.host_next_field_offset()) {
|
|
|
|
return true;
|
|
|
|
}
|
2016-07-26 18:13:28 +00:00
|
|
|
|
|
|
|
// Verify that field names / offsets match across the entire hierarchy.
|
|
|
|
Field& field = Field::Handle();
|
|
|
|
String& field_name = String::Handle();
|
|
|
|
Field& replacement_field = Field::Handle();
|
|
|
|
String& replacement_field_name = String::Handle();
|
|
|
|
|
|
|
|
for (intptr_t i = 0; i < fields.Length(); i++) {
|
|
|
|
if (fields.At(i) == Field::null()) {
|
|
|
|
ASSERT(replacement_fields.At(i) == Field::null());
|
|
|
|
continue;
|
2016-05-17 19:19:06 +00:00
|
|
|
}
|
2016-07-26 18:13:28 +00:00
|
|
|
field = Field::RawCast(fields.At(i));
|
|
|
|
replacement_field = Field::RawCast(replacement_fields.At(i));
|
|
|
|
field_name = field.name();
|
|
|
|
replacement_field_name = replacement_field.name();
|
|
|
|
if (!field_name.Equals(replacement_field_name)) return true;
|
2016-05-17 19:19:06 +00:00
|
|
|
}
|
2016-07-26 18:13:28 +00:00
|
|
|
return false;
|
|
|
|
}
|
2016-05-17 19:19:06 +00:00
|
|
|
|
2016-07-26 18:13:28 +00:00
|
|
|
bool Class::CanReloadFinalized(const Class& replacement,
|
2021-01-16 15:42:53 +00:00
|
|
|
ProgramReloadContext* context) const {
|
2018-05-09 16:34:17 +00:00
|
|
|
// Make sure the declaration types argument count matches for the two classes.
|
2016-07-26 18:13:28 +00:00
|
|
|
// ex. class A<int,B> {} cannot be replace with class A<B> {}.
|
2019-11-15 14:08:45 +00:00
|
|
|
auto group_context = context->group_reload_context();
|
2020-04-17 03:02:27 +00:00
|
|
|
auto shared_class_table =
|
|
|
|
group_context->isolate_group()->shared_class_table();
|
2018-05-09 16:34:17 +00:00
|
|
|
if (NumTypeArguments() != replacement.NumTypeArguments()) {
|
2019-11-15 14:08:45 +00:00
|
|
|
group_context->AddReasonForCancelling(
|
|
|
|
new (context->zone())
|
|
|
|
TypeParametersChanged(context->zone(), *this, replacement));
|
2016-05-17 19:19:06 +00:00
|
|
|
return false;
|
|
|
|
}
|
2016-07-26 18:13:28 +00:00
|
|
|
if (RequiresInstanceMorphing(replacement)) {
|
2019-11-15 14:08:45 +00:00
|
|
|
ASSERT(id() == replacement.id());
|
|
|
|
const classid_t cid = id();
|
|
|
|
// We unconditionally create an instance morpher. As a side effect of
|
|
|
|
// building the morpher, we will mark all new fields as late.
|
|
|
|
auto instance_morpher = InstanceMorpher::CreateFromClassDescriptors(
|
2020-02-20 21:08:35 +00:00
|
|
|
context->zone(), shared_class_table, *this, replacement);
|
2019-11-15 14:08:45 +00:00
|
|
|
group_context->EnsureHasInstanceMorpherFor(cid, instance_morpher);
|
2016-07-26 18:13:28 +00:00
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
2016-05-17 19:19:06 +00:00
|
|
|
|
2016-07-26 18:13:28 +00:00
|
|
|
bool Class::CanReloadPreFinalized(const Class& replacement,
|
2021-01-16 15:42:53 +00:00
|
|
|
ProgramReloadContext* context) const {
|
2016-07-26 18:13:28 +00:00
|
|
|
// The replacement class must also prefinalized.
|
|
|
|
if (!replacement.is_prefinalized()) {
|
2019-11-15 14:08:45 +00:00
|
|
|
context->group_reload_context()->AddReasonForCancelling(
|
|
|
|
new (context->zone())
|
|
|
|
PreFinalizedConflict(context->zone(), *this, replacement));
|
2016-11-08 21:54:47 +00:00
|
|
|
return false;
|
2016-07-26 18:13:28 +00:00
|
|
|
}
|
|
|
|
// Check the instance sizes are equal.
|
2020-01-31 12:56:31 +00:00
|
|
|
if (host_instance_size() != replacement.host_instance_size()) {
|
2019-11-15 14:08:45 +00:00
|
|
|
context->group_reload_context()->AddReasonForCancelling(
|
|
|
|
new (context->zone())
|
|
|
|
InstanceSizeConflict(context->zone(), *this, replacement));
|
2016-11-08 21:54:47 +00:00
|
|
|
return false;
|
2016-07-26 18:13:28 +00:00
|
|
|
}
|
2016-05-17 19:19:06 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2016-07-26 18:13:28 +00:00
|
|
|
void Library::CheckReload(const Library& replacement,
|
2021-01-16 15:42:53 +00:00
|
|
|
ProgramReloadContext* context) const {
|
2021-01-20 21:58:04 +00:00
|
|
|
// Carry over the loaded bit of any deferred prefixes.
|
2020-06-16 21:11:40 +00:00
|
|
|
Object& object = Object::Handle();
|
|
|
|
LibraryPrefix& prefix = LibraryPrefix::Handle();
|
2021-01-20 21:58:04 +00:00
|
|
|
LibraryPrefix& original_prefix = LibraryPrefix::Handle();
|
|
|
|
String& name = String::Handle();
|
|
|
|
String& original_name = String::Handle();
|
2020-06-16 21:11:40 +00:00
|
|
|
DictionaryIterator it(replacement);
|
|
|
|
while (it.HasNext()) {
|
|
|
|
object = it.GetNext();
|
|
|
|
if (!object.IsLibraryPrefix()) continue;
|
2021-01-15 23:32:02 +00:00
|
|
|
prefix ^= object.ptr();
|
2021-01-20 21:58:04 +00:00
|
|
|
if (!prefix.is_deferred_load()) continue;
|
|
|
|
|
|
|
|
name = prefix.name();
|
|
|
|
DictionaryIterator original_it(*this);
|
|
|
|
while (original_it.HasNext()) {
|
|
|
|
object = original_it.GetNext();
|
|
|
|
if (!object.IsLibraryPrefix()) continue;
|
|
|
|
original_prefix ^= object.ptr();
|
|
|
|
if (!original_prefix.is_deferred_load()) continue;
|
|
|
|
original_name = original_prefix.name();
|
|
|
|
if (!name.Equals(original_name)) continue;
|
|
|
|
|
2021-02-11 18:00:59 +00:00
|
|
|
// The replacement of the old prefix with the new prefix
|
|
|
|
// in Isolate::loaded_prefixes_set_ implicitly carried
|
|
|
|
// the loaded state over to the new prefix.
|
2021-01-20 21:58:04 +00:00
|
|
|
context->AddBecomeMapping(original_prefix, prefix);
|
2020-06-16 21:11:40 +00:00
|
|
|
}
|
|
|
|
}
|
2016-05-17 19:19:06 +00:00
|
|
|
}
|
|
|
|
|
2019-08-20 00:49:52 +00:00
|
|
|
void CallSiteResetter::Reset(const ICData& ic) {
|
|
|
|
ICData::RebindRule rule = ic.rebind_rule();
|
|
|
|
if (rule == ICData::kInstance) {
|
|
|
|
const intptr_t num_args = ic.NumArgsTested();
|
|
|
|
const intptr_t len = ic.Length();
|
2018-12-10 17:09:21 +00:00
|
|
|
// We need at least one non-sentinel entry to require a check
|
|
|
|
// for the smi fast path case.
|
|
|
|
if (num_args == 2 && len >= 2) {
|
2019-08-20 00:49:52 +00:00
|
|
|
if (ic.IsImmutable()) {
|
2018-12-10 17:09:21 +00:00
|
|
|
return;
|
|
|
|
}
|
2019-08-20 00:49:52 +00:00
|
|
|
name_ = ic.target_name();
|
|
|
|
const Class& smi_class = Class::Handle(zone_, Smi::Class());
|
2018-12-10 17:09:21 +00:00
|
|
|
const Function& smi_op_target = Function::Handle(
|
2019-08-20 00:49:52 +00:00
|
|
|
zone_, Resolver::ResolveDynamicAnyArgs(zone_, smi_class, name_));
|
2018-12-10 17:09:21 +00:00
|
|
|
GrowableArray<intptr_t> class_ids(2);
|
2019-08-20 00:49:52 +00:00
|
|
|
Function& target = Function::Handle(zone_);
|
|
|
|
ic.GetCheckAt(0, &class_ids, &target);
|
2021-01-15 23:32:02 +00:00
|
|
|
if ((target.ptr() == smi_op_target.ptr()) && (class_ids[0] == kSmiCid) &&
|
2018-12-10 17:09:21 +00:00
|
|
|
(class_ids[1] == kSmiCid)) {
|
|
|
|
// The smi fast path case, preserve the initial entry but reset the
|
|
|
|
// count.
|
2020-11-25 20:35:38 +00:00
|
|
|
ic.ClearCountAt(0, *this);
|
2022-04-06 07:55:34 +00:00
|
|
|
ic.TruncateTo(/*num_checks=*/1, *this);
|
2018-12-10 17:09:21 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
// Fall back to the normal behavior with cached empty ICData arrays.
|
2017-09-28 19:43:32 +00:00
|
|
|
}
|
2022-04-06 07:55:34 +00:00
|
|
|
ic.Clear(*this);
|
2019-08-20 00:49:52 +00:00
|
|
|
ic.set_is_megamorphic(false);
|
2017-09-28 19:43:32 +00:00
|
|
|
return;
|
2019-08-20 00:49:52 +00:00
|
|
|
} else if (rule == ICData::kNoRebind || rule == ICData::kNSMDispatch) {
|
2017-09-28 19:43:32 +00:00
|
|
|
// TODO(30877) we should account for addition/removal of NSM.
|
|
|
|
// Don't rebind dispatchers.
|
|
|
|
return;
|
2019-08-20 00:49:52 +00:00
|
|
|
} else if (rule == ICData::kStatic || rule == ICData::kSuper) {
|
|
|
|
old_target_ = ic.GetTargetAt(0);
|
|
|
|
if (old_target_.IsNull()) {
|
2016-05-17 19:19:06 +00:00
|
|
|
FATAL("old_target is NULL.\n");
|
|
|
|
}
|
2019-08-20 00:49:52 +00:00
|
|
|
name_ = old_target_.name();
|
2017-09-28 19:43:32 +00:00
|
|
|
|
2019-08-20 00:49:52 +00:00
|
|
|
if (rule == ICData::kStatic) {
|
|
|
|
ASSERT(old_target_.is_static() ||
|
2021-01-15 23:32:02 +00:00
|
|
|
old_target_.kind() == UntaggedFunction::kConstructor);
|
2017-09-28 19:43:32 +00:00
|
|
|
// This can be incorrect if the call site was an unqualified invocation.
|
2019-08-20 00:49:52 +00:00
|
|
|
new_cls_ = old_target_.Owner();
|
2020-10-16 19:22:52 +00:00
|
|
|
new_target_ = Resolver::ResolveFunction(zone_, new_cls_, name_);
|
2019-08-20 00:49:52 +00:00
|
|
|
if (new_target_.kind() != old_target_.kind()) {
|
|
|
|
new_target_ = Function::null();
|
2018-10-03 17:38:57 +00:00
|
|
|
}
|
2017-09-28 19:43:32 +00:00
|
|
|
} else {
|
|
|
|
// Super call.
|
2019-08-20 00:49:52 +00:00
|
|
|
caller_ = ic.Owner();
|
|
|
|
ASSERT(!caller_.is_static());
|
|
|
|
new_cls_ = caller_.Owner();
|
|
|
|
new_cls_ = new_cls_.SuperClass();
|
2020-08-14 20:11:19 +00:00
|
|
|
new_target_ = Resolver::ResolveDynamicAnyArgs(zone_, new_cls_, name_,
|
|
|
|
/*allow_add=*/true);
|
2016-05-17 19:19:06 +00:00
|
|
|
}
|
2019-08-20 00:49:52 +00:00
|
|
|
args_desc_array_ = ic.arguments_descriptor();
|
|
|
|
ArgumentsDescriptor args_desc(args_desc_array_);
|
|
|
|
if (new_target_.IsNull() ||
|
2020-03-03 20:30:32 +00:00
|
|
|
!new_target_.AreValidArguments(args_desc, NULL)) {
|
2016-07-14 21:20:44 +00:00
|
|
|
// TODO(rmacnak): Patch to a NSME stub.
|
2016-07-15 17:52:20 +00:00
|
|
|
VTIR_Print("Cannot rebind static call to %s from %s\n",
|
2019-08-20 00:49:52 +00:00
|
|
|
old_target_.ToCString(),
|
|
|
|
Object::Handle(zone_, ic.Owner()).ToCString());
|
2016-05-17 19:19:06 +00:00
|
|
|
return;
|
|
|
|
}
|
2020-11-25 20:35:38 +00:00
|
|
|
ic.ClearAndSetStaticTarget(new_target_, *this);
|
2016-05-17 19:19:06 +00:00
|
|
|
} else {
|
2017-09-28 19:43:32 +00:00
|
|
|
FATAL("Unexpected rebind rule.");
|
2016-05-17 19:19:06 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-08-24 16:25:53 +00:00
|
|
|
#endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
|
2016-05-17 19:19:06 +00:00
|
|
|
|
2019-08-15 16:31:01 +00:00
|
|
|
} // namespace dart
|