[vm, gc] Avoid unnecessary intermediate maps, handles and arrays in setting up a become operation.

TEST=ci
Change-Id: Ib014fc544dcab54258d2102ee87d8f35f609594a
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/211940
Commit-Queue: Ryan Macnak <rmacnak@google.com>
Reviewed-by: Alexander Aprelev <aam@google.com>
This commit is contained in:
Ryan Macnak 2021-09-28 18:30:52 +00:00 committed by commit-bot@chromium.org
parent fad52e3388
commit 8e17ac781b
11 changed files with 173 additions and 238 deletions

View file

@ -92,7 +92,7 @@ void LoadIndexedUnsafeInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
// No addressing mode will ignore the upper bits. Cannot use the shorter `orl`
// to clear the upper bits as this instructions uses negative indices as part
// of FP-relative loads.
// TODO(compressed-pointers): Can we guarentee the index is already
// TODO(compressed-pointers): Can we guarantee the index is already
// sign-extended if always comes for an args-descriptor load?
__ movsxd(index, index);
#endif
@ -123,7 +123,7 @@ DEFINE_BACKEND(StoreIndexedUnsafe,
// No addressing mode will ignore the upper bits. Cannot use the shorter `orl`
// to clear the upper bits as this instructions uses negative indices as part
// of FP-relative stores.
// TODO(compressed-pointers): Can we guarentee the index is already
// TODO(compressed-pointers): Can we guarantee the index is already
// sign-extended if always comes for an args-descriptor load?
__ movsxd(index, index);
#endif

View file

@ -217,6 +217,27 @@ class WritableCodeLiteralsScope : public ValueObject {
};
#endif
Become::Become() {
IsolateGroup* group = Thread::Current()->isolate_group();
ASSERT(group->become() == nullptr); // Only one outstanding become at a time.
group->set_become(this);
}
Become::~Become() {
Thread::Current()->isolate_group()->set_become(nullptr);
}
void Become::Add(const Object& before, const Object& after) {
pointers_.Add(before.ptr());
pointers_.Add(after.ptr());
}
void Become::VisitObjectPointers(ObjectPointerVisitor* visitor) {
if (pointers_.length() != 0) {
visitor->VisitPointers(&pointers_[0], pointers_.length());
}
}
void Become::MakeDummyObject(const Instance& instance) {
// Make the forward pointer point to itself.
// This is needed to distinguish it from a real forward object.
@ -228,7 +249,7 @@ static bool IsDummyObject(ObjectPtr object) {
return GetForwardedObject(object) == object;
}
void Become::CrashDump(ObjectPtr before_obj, ObjectPtr after_obj) {
static void CrashDump(ObjectPtr before_obj, ObjectPtr after_obj) {
OS::PrintErr("DETECTED FATAL ISSUE IN BECOME MAPPINGS\n");
OS::PrintErr("BEFORE ADDRESS: %#" Px "\n", static_cast<uword>(before_obj));
@ -256,7 +277,7 @@ void Become::CrashDump(ObjectPtr before_obj, ObjectPtr after_obj) {
}
}
void Become::ElementsForwardIdentity(const Array& before, const Array& after) {
void Become::Forward() {
Thread* thread = Thread::Current();
auto heap = thread->isolate_group()->heap();
@ -264,10 +285,9 @@ void Become::ElementsForwardIdentity(const Array& before, const Array& after) {
HeapIterationScope his(thread);
// Setup forwarding pointers.
ASSERT(before.Length() == after.Length());
for (intptr_t i = 0; i < before.Length(); i++) {
ObjectPtr before_obj = before.At(i);
ObjectPtr after_obj = after.At(i);
for (intptr_t i = 0; i < pointers_.length(); i += 2) {
ObjectPtr before_obj = pointers_[i];
ObjectPtr after_obj = pointers_[i + 1];
if (before_obj == after_obj) {
FATAL("become: Cannot self-forward");
@ -304,10 +324,11 @@ void Become::ElementsForwardIdentity(const Array& before, const Array& after) {
FollowForwardingPointers(thread);
#if defined(DEBUG)
for (intptr_t i = 0; i < before.Length(); i++) {
ASSERT(before.At(i) == after.At(i));
for (intptr_t i = 0; i < pointers_.length(); i += 2) {
ASSERT(pointers_[i] == pointers_[i + 1]);
}
#endif
pointers_.Clear();
}
void Become::FollowForwardingPointers(Thread* thread) {

View file

@ -6,6 +6,7 @@
#define RUNTIME_VM_HEAP_BECOME_H_
#include "platform/atomic.h"
#include "platform/growable_array.h"
#include "vm/allocation.h"
#include "vm/raw_object.h"
@ -68,15 +69,39 @@ class ForwardingCorpse {
DISALLOW_IMPLICIT_CONSTRUCTORS(ForwardingCorpse);
};
// TODO(johnmccutchan): Refactor this class so that it is not all static and
// provides utility methods for building the mapping of before and after.
class Become : public AllStatic {
// Forward/exchange object identity within pairs of objects.
//
// Forward: Redirects all pointers to each 'before' object to the corresponding
// 'after' object. Every 'before' object is guaranteed to be unreachable after
// the operation. The identity hash of the 'before' object is retained.
//
// This is useful for atomically applying behavior and schema changes, which can
// be done by allocating fresh objects with the new schema and forwarding the
// identity of the old objects to the new objects.
//
// Exchange: Redirect all pointers to each 'before' object to the corresponding
// 'after' object and vice versa. Both objects remain reachable after the
// operation.
//
// This is useful for implementing certain types of proxies. For example, an
// infrequently accessed object may be written to disk and swapped with a
// so-called "husk", and swapped back when it is later accessed.
//
// This operation is named 'become' after its original in Smalltalk:
// x become: y "exchange identity for one pair"
// x becomeForward: y "forward identity for one pair"
// #(x ...) elementsExchangeIdentityWith: #(y ...)
// #(x ...) elementsForwardIdentityTo: #(y ...)
class Become {
public:
// Smalltalk's one-way bulk become (Array>>#elementsForwardIdentityTo:).
// Redirects all pointers to elements of 'before' to the corresponding element
// in 'after'. Every element in 'before' is guaranteed to be not reachable.
// Useful for atomically applying behavior and schema changes.
static void ElementsForwardIdentity(const Array& before, const Array& after);
Become();
~Become();
void Add(const Object& before, const Object& after);
void Forward();
void Exchange() { UNIMPLEMENTED(); }
void VisitObjectPointers(ObjectPointerVisitor* visitor);
// Convert and instance object into a dummy object,
// making the instance independent of its class.
@ -88,7 +113,8 @@ class Become : public AllStatic {
static void FollowForwardingPointers(Thread* thread);
private:
static void CrashDump(ObjectPtr before_obj, ObjectPtr after_obj);
MallocGrowableArray<ObjectPtr> pointers_;
DISALLOW_COPY_AND_ASSIGN(Become);
};
} // namespace dart

View file

@ -13,24 +13,25 @@
namespace dart {
void TestBecomeForward(Heap::Space before_space, Heap::Space after_space) {
// Allocate the container in old space to test the remembered set.
const Array& container = Array::Handle(Array::New(1, Heap::kOld));
const String& before_obj = String::Handle(String::New("old", before_space));
const String& after_obj = String::Handle(String::New("new", after_space));
container.SetAt(0, before_obj);
EXPECT(before_obj.ptr() != after_obj.ptr());
// Allocate the arrays in old space to test the remembered set.
const Array& before = Array::Handle(Array::New(1, Heap::kOld));
before.SetAt(0, before_obj);
const Array& after = Array::Handle(Array::New(1, Heap::kOld));
after.SetAt(0, after_obj);
Become::ElementsForwardIdentity(before, after);
Become become;
become.Add(before_obj, after_obj);
become.Forward();
EXPECT(before_obj.ptr() == after_obj.ptr());
EXPECT(container.At(0) == after_obj.ptr());
GCTestHelper::CollectAllGarbage();
EXPECT(before_obj.ptr() == after_obj.ptr());
EXPECT(container.At(0) == after_obj.ptr());
}
ISOLATE_UNIT_TEST_CASE(BecomeFowardOldToOld) {
@ -62,11 +63,9 @@ ISOLATE_UNIT_TEST_CASE(BecomeForwardPeer) {
EXPECT_EQ(peer, heap->GetPeer(before_obj.ptr()));
EXPECT_EQ(no_peer, heap->GetPeer(after_obj.ptr()));
const Array& before = Array::Handle(Array::New(1, Heap::kOld));
before.SetAt(0, before_obj);
const Array& after = Array::Handle(Array::New(1, Heap::kOld));
after.SetAt(0, after_obj);
Become::ElementsForwardIdentity(before, after);
Become become;
become.Add(before_obj, after_obj);
become.Forward();
EXPECT(before_obj.ptr() == after_obj.ptr());
EXPECT_EQ(peer, heap->GetPeer(before_obj.ptr()));
@ -86,11 +85,9 @@ ISOLATE_UNIT_TEST_CASE(BecomeForwardObjectId) {
EXPECT_EQ(id, heap->GetObjectId(before_obj.ptr()));
EXPECT_EQ(no_id, heap->GetObjectId(after_obj.ptr()));
const Array& before = Array::Handle(Array::New(1, Heap::kOld));
before.SetAt(0, before_obj);
const Array& after = Array::Handle(Array::New(1, Heap::kOld));
after.SetAt(0, after_obj);
Become::ElementsForwardIdentity(before, after);
Become become;
become.Add(before_obj, after_obj);
become.Forward();
EXPECT(before_obj.ptr() == after_obj.ptr());
EXPECT_EQ(id, heap->GetObjectId(before_obj.ptr()));
@ -114,11 +111,9 @@ ISOLATE_UNIT_TEST_CASE(BecomeForwardMessageId) {
EXPECT_EQ(no_id,
isolate->forward_table_old()->GetValueExclusive(after_obj.ptr()));
const Array& before = Array::Handle(Array::New(1, Heap::kOld));
before.SetAt(0, before_obj);
const Array& after = Array::Handle(Array::New(1, Heap::kOld));
after.SetAt(0, after_obj);
Become::ElementsForwardIdentity(before, after);
Become become;
become.Add(before_obj, after_obj);
become.Forward();
EXPECT(before_obj.ptr() == after_obj.ptr());
EXPECT_EQ(id,
@ -142,12 +137,9 @@ ISOLATE_UNIT_TEST_CASE(BecomeForwardRememberedObject) {
EXPECT(before_obj.ptr() != after_obj.ptr());
const Array& before = Array::Handle(Array::New(1, Heap::kOld));
before.SetAt(0, before_obj);
const Array& after = Array::Handle(Array::New(1, Heap::kOld));
after.SetAt(0, after_obj);
Become::ElementsForwardIdentity(before, after);
Become become;
become.Add(before_obj, after_obj);
become.Forward();
EXPECT(before_obj.ptr() == after_obj.ptr());
EXPECT(!after_obj.ptr()->untag()->IsRemembered());
@ -174,11 +166,9 @@ ISOLATE_UNIT_TEST_CASE(BecomeForwardRememberedCards) {
Object::Handle(card_remembered_array.At(0)).ToCString());
}
const Array& before = Array::Handle(Array::New(1, Heap::kOld));
before.SetAt(0, old_element);
const Array& after = Array::Handle(Array::New(1, Heap::kOld));
after.SetAt(0, new_element);
Become::ElementsForwardIdentity(before, after);
Become become;
become.Add(old_element, new_element);
become.Forward();
EXPECT(old_element.ptr() == new_element.ptr());
EXPECT(old_element.ptr()->IsNewObject());

View file

@ -187,7 +187,7 @@ class MarkingVisitorBase : public ObjectPointerVisitor {
// processing here is idempotent, so repeated visits only hurt performance
// but not correctness. Duplicatation is expected to be low.
// By the absence of a special case, we are treating WeakProperties as
// strong references here. This guarentees a WeakProperty will only be
// strong references here. This guarantees a WeakProperty will only be
// added to the delayed_weak_properties_ list of the worker that
// encounters it during ordinary marking. This is in the same spirit as
// the eliminated write barrier, which would have added the newly written

View file

@ -2899,6 +2899,10 @@ void IsolateGroup::VisitSharedPointers(ObjectPointerVisitor* visitor) {
visitor->VisitPointer(
reinterpret_cast<ObjectPtr*>(&(source()->loaded_blobs_)));
}
if (become() != nullptr) {
become()->VisitObjectPointers(visitor);
}
}
void IsolateGroup::VisitStackPointers(ObjectPointerVisitor* visitor,

View file

@ -46,6 +46,7 @@ namespace dart {
// Forward declarations.
class ApiState;
class BackgroundCompiler;
class Become;
class Capability;
class CodeIndexTable;
class Debugger;
@ -687,6 +688,9 @@ class IsolateGroup : public IntrusiveDListEntry<IsolateGroup> {
#endif
}
Become* become() const { return become_; }
void set_become(Become* become) { become_ = become; }
uint64_t id() const { return id_; }
static void Init();
@ -829,6 +833,7 @@ class IsolateGroup : public IntrusiveDListEntry<IsolateGroup> {
RelaxedAtomic<intptr_t> reload_every_n_stack_overflow_checks_;
ProgramReloadContext* program_reload_context_ = nullptr;
#endif
Become* become_ = nullptr;
#define ISOLATE_METRIC_VARIABLE(type, variable, name, unit) \
type metric_##variable##_;

View file

@ -178,8 +178,7 @@ InstanceMorpher::InstanceMorpher(
shared_class_table_(shared_class_table),
mapping_(mapping),
new_fields_offsets_(new_fields_offsets),
before_(zone, 16),
after_(zone, 16) {}
before_(zone, 16) {}
void InstanceMorpher::AddObject(ObjectPtr object) {
ASSERT(object->GetClassId() == cid_);
@ -187,65 +186,65 @@ void InstanceMorpher::AddObject(ObjectPtr object) {
before_.Add(&instance);
}
InstancePtr InstanceMorpher::Morph(const Instance& instance) const {
// Code can reference constants / canonical objects either directly in the
// instruction stream (ia32) or via an object pool.
//
// We have the following invariants:
//
// a) Those canonical objects don't change state (i.e. are not mutable):
// our optimizer can e.g. execute loads of such constants at
// compile-time.
//
// => We ensure that const-classes with live constants cannot be
// reloaded to become non-const classes (see Class::CheckReload).
//
// b) Those canonical objects live in old space: e.g. on ia32 the scavenger
// does not make the RX pages writable and therefore cannot update
// pointers embedded in the instruction stream.
//
// In order to maintain these invariants we ensure to always morph canonical
// objects to old space.
const bool is_canonical = instance.IsCanonical();
const Heap::Space space = is_canonical ? Heap::kOld : Heap::kNew;
const auto& result = Instance::Handle(
Z, Instance::NewFromCidAndSize(shared_class_table_, cid_, space));
void InstanceMorpher::CreateMorphedCopies(Become* become) {
Instance& after = Instance::Handle(Z);
Object& value = Object::Handle(Z);
for (intptr_t i = 0; i < before_.length(); i++) {
const Instance& before = *before_.At(i);
// We preserve the canonical bit of the object, since this object is present
// in the class's constants.
if (is_canonical) {
result.SetCanonical();
}
// Code can reference constants / canonical objects either directly in the
// instruction stream (ia32) or via an object pool.
//
// We have the following invariants:
//
// a) Those canonical objects don't change state (i.e. are not mutable):
// our optimizer can e.g. execute loads of such constants at
// compile-time.
//
// => We ensure that const-classes with live constants cannot be
// reloaded to become non-const classes (see Class::CheckReload).
//
// b) Those canonical objects live in old space: e.g. on ia32 the
// scavenger does not make the RX pages writable and therefore cannot
// update pointers embedded in the instruction stream.
//
// In order to maintain these invariants we ensure to always morph canonical
// objects to old space.
const bool is_canonical = before.IsCanonical();
const Heap::Space space = is_canonical ? Heap::kOld : Heap::kNew;
after = Instance::NewFromCidAndSize(shared_class_table_, cid_, space);
// We preserve the canonical bit of the object, since this object is present
// in the class's constants.
if (is_canonical) {
after.SetCanonical();
}
#if defined(HASH_IN_OBJECT_HEADER)
const uint32_t hash = Object::GetCachedHash(instance.ptr());
Object::SetCachedHashIfNotSet(result.ptr(), hash);
const uint32_t hash = Object::GetCachedHash(before.ptr());
Object::SetCachedHashIfNotSet(after.ptr(), hash);
#endif
// Morph the context from instance to result using mapping_.
Object& value = Object::Handle(Z);
for (intptr_t i = 0; i < mapping_->length(); i += 2) {
intptr_t from_offset = mapping_->At(i);
intptr_t to_offset = mapping_->At(i + 1);
ASSERT(from_offset > 0);
ASSERT(to_offset > 0);
value = instance.RawGetFieldAtOffset(from_offset);
result.RawSetFieldAtOffset(to_offset, value);
}
// Morph the context from [before] to [after] using mapping_.
for (intptr_t i = 0; i < mapping_->length(); i += 2) {
intptr_t from_offset = mapping_->At(i);
intptr_t to_offset = mapping_->At(i + 1);
ASSERT(from_offset > 0);
ASSERT(to_offset > 0);
value = before.RawGetFieldAtOffset(from_offset);
after.RawSetFieldAtOffset(to_offset, value);
}
for (intptr_t i = 0; i < new_fields_offsets_->length(); i++) {
const intptr_t field_offset = new_fields_offsets_->At(i);
result.RawSetFieldAtOffset(field_offset, Object::sentinel());
}
for (intptr_t i = 0; i < new_fields_offsets_->length(); i++) {
const intptr_t field_offset = new_fields_offsets_->At(i);
after.RawSetFieldAtOffset(field_offset, Object::sentinel());
}
// Convert the instance into a filler object.
Become::MakeDummyObject(instance);
return result.ptr();
}
// Convert the old instance into a filler object. We will switch to the new
// class table before the next heap walk, so there must be no instances of
// any class with the old size.
Become::MakeDummyObject(before);
void InstanceMorpher::CreateMorphedCopies() {
for (intptr_t i = 0; i < before_.length(); i++) {
const Instance& copy = Instance::Handle(Z, Morph(*before_.At(i)));
after_.Add(&copy);
become->Add(before, after);
}
}
@ -372,34 +371,6 @@ class LibraryMapTraits {
static uword Hash(const Object& obj) { return Library::Cast(obj).UrlHash(); }
};
class BecomeMapTraits {
public:
static bool ReportStats() { return false; }
static const char* Name() { return "BecomeMapTraits"; }
static bool IsMatch(const Object& a, const Object& b) {
return a.ptr() == b.ptr();
}
static uword Hash(const Object& obj) {
if (obj.IsLibrary()) {
return Library::Cast(obj).UrlHash();
} else if (obj.IsClass()) {
return String::HashRawSymbol(Class::Cast(obj).Name());
} else if (obj.IsField()) {
return String::HashRawSymbol(Field::Cast(obj).name());
} else if (obj.IsClosure()) {
return String::HashRawSymbol(
Function::Handle(Closure::Cast(obj).function()).name());
} else if (obj.IsLibraryPrefix()) {
return String::HashRawSymbol(LibraryPrefix::Cast(obj).name());
} else {
FATAL1("Unexpected type in become: %s\n", obj.ToCString());
}
return 0;
}
};
bool ProgramReloadContext::IsSameClass(const Class& a, const Class& b) {
// TODO(turnidge): We need to look at generic type arguments for
// synthetic mixin classes. Their names are not necessarily unique
@ -461,8 +432,6 @@ ProgramReloadContext::ProgramReloadContext(
removed_class_set_storage_(Array::null()),
old_libraries_set_storage_(Array::null()),
library_map_storage_(Array::null()),
become_map_storage_(Array::null()),
become_enum_mappings_(GrowableObjectArray::null()),
saved_root_library_(Library::null()),
saved_libraries_(GrowableObjectArray::null()) {
// NOTE: DO NOT ALLOCATE ANY RAW OBJECTS HERE. The ProgramReloadContext is not
@ -793,10 +762,7 @@ bool IsolateGroupReloadContext::Reload(bool force_reload,
// active.
ASSERT(HasNoTasks(heap));
const Array& before = Array::Handle(Z, Array::New(count));
const Array& after = Array::Handle(Z, Array::New(count));
MorphInstancesPhase1Allocate(&locator, before, after);
MorphInstancesPhase1Allocate(&locator, IG->become());
{
// Apply the new class table before "become". Become will replace
// all the instances of the old size with forwarding corpses, then
@ -811,7 +777,7 @@ bool IsolateGroupReloadContext::Reload(bool force_reload,
IG->program_reload_context()->DiscardSavedClassTable(
/*is_rollback=*/false);
}
MorphInstancesPhase2Become(before, after);
MorphInstancesPhase2Become(IG->become());
discard_class_tables = false;
}
@ -1044,10 +1010,6 @@ void ProgramReloadContext::ReloadPhase1AllocateStorageMapsAndCheckpoint() {
HashTables::New<UnorderedHashSet<LibraryMapTraits> >(4);
library_map_storage_ =
HashTables::New<UnorderedHashMap<LibraryMapTraits> >(4);
become_map_storage_ = HashTables::New<UnorderedHashMap<BecomeMapTraits> >(4);
// Keep a separate array for enum mappings to avoid having to invoke
// hashCode on the instances.
become_enum_mappings_ = GrowableObjectArray::New(Heap::kOld);
// While reloading everything we do must be reversible so that we can abort
// safely if the reload fails. This function stashes things to the side and
@ -1639,39 +1601,7 @@ void ProgramReloadContext::CommitBeforeInstanceMorphing() {
}
void ProgramReloadContext::CommitAfterInstanceMorphing() {
{
const GrowableObjectArray& become_enum_mappings =
GrowableObjectArray::Handle(become_enum_mappings_);
UnorderedHashMap<BecomeMapTraits> become_map(become_map_storage_);
intptr_t replacement_count =
become_map.NumOccupied() + become_enum_mappings.Length() / 2;
const Array& before =
Array::Handle(Array::New(replacement_count, Heap::kOld));
const Array& after =
Array::Handle(Array::New(replacement_count, Heap::kOld));
Object& obj = Object::Handle();
intptr_t replacement_index = 0;
UnorderedHashMap<BecomeMapTraits>::Iterator it(&become_map);
while (it.MoveNext()) {
const intptr_t entry = it.Current();
obj = become_map.GetKey(entry);
before.SetAt(replacement_index, obj);
obj = become_map.GetPayload(entry, 0);
after.SetAt(replacement_index, obj);
replacement_index++;
}
for (intptr_t i = 0; i < become_enum_mappings.Length(); i += 2) {
obj = become_enum_mappings.At(i);
before.SetAt(replacement_index, obj);
obj = become_enum_mappings.At(i + 1);
after.SetAt(replacement_index, obj);
replacement_index++;
}
ASSERT(replacement_index == replacement_count);
become_map.Release();
Become::ElementsForwardIdentity(before, after);
}
become_.Forward();
// Rehash constants map for all classes. Constants are hashed by content, and
// content may have changed from fields being added or removed.
@ -1747,8 +1677,7 @@ void IsolateGroupReloadContext::ReportReasonsForCancelling() {
void IsolateGroupReloadContext::MorphInstancesPhase1Allocate(
ObjectLocator* locator,
const Array& before,
const Array& after) {
Become* become) {
ASSERT(HasInstanceMorphers());
if (FLAG_trace_reload) {
@ -1764,27 +1693,14 @@ void IsolateGroupReloadContext::MorphInstancesPhase1Allocate(
(count > 1) ? "s" : "");
for (intptr_t i = 0; i < instance_morphers_.length(); i++) {
instance_morphers_.At(i)->CreateMorphedCopies();
instance_morphers_.At(i)->CreateMorphedCopies(become);
}
// Create the inputs for Become.
intptr_t index = 0;
for (intptr_t i = 0; i < instance_morphers_.length(); i++) {
InstanceMorpher* morpher = instance_morphers_.At(i);
for (intptr_t j = 0; j < morpher->before()->length(); j++) {
before.SetAt(index, *morpher->before()->At(j));
after.SetAt(index, *morpher->after()->At(j));
index++;
}
}
ASSERT(index == count);
}
void IsolateGroupReloadContext::MorphInstancesPhase2Become(const Array& before,
const Array& after) {
void IsolateGroupReloadContext::MorphInstancesPhase2Become(Become* become) {
ASSERT(HasInstanceMorphers());
Become::ElementsForwardIdentity(before, after);
become->Forward();
// The heap now contains only instances with the new size. Ordinary GC is safe
// again.
}
@ -2550,26 +2466,12 @@ void ProgramReloadContext::AddStaticFieldMapping(const Field& old_field,
const Field& new_field) {
ASSERT(old_field.is_static());
ASSERT(new_field.is_static());
AddBecomeMapping(old_field, new_field);
}
void ProgramReloadContext::AddBecomeMapping(const Object& old,
const Object& neu) {
ASSERT(become_map_storage_ != Array::null());
UnorderedHashMap<BecomeMapTraits> become_map(become_map_storage_);
bool update = become_map.UpdateOrInsert(old, neu);
ASSERT(!update);
become_map_storage_ = become_map.Release().ptr();
}
void ProgramReloadContext::AddEnumBecomeMapping(const Object& old,
const Object& neu) {
const GrowableObjectArray& become_enum_mappings =
GrowableObjectArray::Handle(become_enum_mappings_);
become_enum_mappings.Add(old);
become_enum_mappings.Add(neu);
ASSERT((become_enum_mappings.Length() % 2) == 0);
become_.Add(old, neu);
}
void ProgramReloadContext::RebuildDirectSubclasses() {

View file

@ -13,6 +13,7 @@
#include "vm/globals.h"
#include "vm/growable_array.h"
#include "vm/hash_map.h"
#include "vm/heap/become.h"
#include "vm/log.h"
#include "vm/object.h"
@ -68,24 +69,15 @@ class InstanceMorpher : public ZoneAllocated {
ZoneGrowableArray<intptr_t>* new_fields_offsets);
virtual ~InstanceMorpher() {}
// Called on each instance that needs to be morphed.
InstancePtr Morph(const Instance& instance) const;
// Adds an object to be morphed.
void AddObject(ObjectPtr object);
// Create the morphed objects based on the before() list.
void CreateMorphedCopies();
void CreateMorphedCopies(Become* become);
// Append the morper info to JSON array.
void AppendTo(JSONArray* array);
// Returns the list of objects that need to be morphed.
const GrowableArray<const Instance*>* before() const { return &before_; }
// Returns the list of morphed objects (matches order in before()).
const GrowableArray<const Instance*>* after() const { return &after_; }
// Returns the cid associated with the from_ and to_ class.
intptr_t cid() const { return cid_; }
@ -100,7 +92,6 @@ class InstanceMorpher : public ZoneAllocated {
ZoneGrowableArray<intptr_t>* new_fields_offsets_;
GrowableArray<const Instance*> before_;
GrowableArray<const Instance*> after_;
};
class ReasonForCancelling : public ZoneAllocated {
@ -220,10 +211,8 @@ class IsolateGroupReloadContext {
void CheckpointSharedClassTable();
void MorphInstancesPhase1Allocate(ObjectLocator* locator,
const Array& before,
const Array& after);
void MorphInstancesPhase2Become(const Array& before, const Array& after);
void MorphInstancesPhase1Allocate(ObjectLocator* locator, Become* become);
void MorphInstancesPhase2Become(Become* become);
void ForEachIsolate(std::function<void(Isolate*)> callback);
@ -401,9 +390,10 @@ class ProgramReloadContext {
const Library& original);
void AddStaticFieldMapping(const Field& old_field, const Field& new_field);
void AddBecomeMapping(const Object& old, const Object& neu);
void AddEnumBecomeMapping(const Object& old, const Object& neu);
void RebuildDirectSubclasses();
Become become_;
ObjectPtr* from() {
return reinterpret_cast<ObjectPtr*>(&old_classes_set_storage_);
}
@ -412,8 +402,6 @@ class ProgramReloadContext {
ArrayPtr removed_class_set_storage_;
ArrayPtr old_libraries_set_storage_;
ArrayPtr library_map_storage_;
ArrayPtr become_map_storage_;
GrowableObjectArrayPtr become_enum_mappings_;
LibraryPtr saved_root_library_;
GrowableObjectArrayPtr saved_libraries_;
ObjectPtr* to() { return reinterpret_cast<ObjectPtr*>(&saved_libraries_); }

View file

@ -397,7 +397,7 @@ void Class::ReplaceEnum(ProgramReloadContext* reload_context,
enum_ident.ToCString());
bool removed = enum_map.Remove(enum_ident);
ASSERT(removed);
reload_context->AddEnumBecomeMapping(old_enum_value, enum_value);
reload_context->AddBecomeMapping(old_enum_value, enum_value);
}
}
enums_deleted = enum_map.NumOccupied() > 0;
@ -409,13 +409,13 @@ void Class::ReplaceEnum(ProgramReloadContext* reload_context,
// Map the old E.values array to the new E.values array.
ASSERT(!old_enum_values.IsNull());
ASSERT(!enum_values.IsNull());
reload_context->AddEnumBecomeMapping(old_enum_values, enum_values);
reload_context->AddBecomeMapping(old_enum_values, enum_values);
// Map the old E._deleted_enum_sentinel to the new E._deleted_enum_sentinel.
ASSERT(!old_deleted_enum_sentinel.IsNull());
ASSERT(!deleted_enum_sentinel.IsNull());
reload_context->AddEnumBecomeMapping(old_deleted_enum_sentinel,
deleted_enum_sentinel);
reload_context->AddBecomeMapping(old_deleted_enum_sentinel,
deleted_enum_sentinel);
if (enums_deleted) {
// Map all deleted enums to the deleted enum sentinel value.
@ -432,8 +432,7 @@ void Class::ReplaceEnum(ProgramReloadContext* reload_context,
ASSERT(!enum_ident.IsNull());
old_enum_value ^= enum_map.GetOrNull(enum_ident);
VTIR_Print("Element `%s` was deleted\n", enum_ident.ToCString());
reload_context->AddEnumBecomeMapping(old_enum_value,
deleted_enum_sentinel);
reload_context->AddBecomeMapping(old_enum_value, deleted_enum_sentinel);
}
enum_map.Release();
}

View file

@ -53,7 +53,7 @@ void VirtualMemoryCompressedHeap::Init(void* compressed_heap_region,
ASSERT(Utils::IsAligned(base_, kCompressedHeapPageSize));
ASSERT(Utils::IsAligned(size_, kCompressedHeapPageSize));
// base_ is not necessarily 4GB-aligned, because on some systems we can't make
// a large enough reservation to guarentee it. Instead, we have only the
// a large enough reservation to guarantee it. Instead, we have only the
// weaker property that all addresses in [base_, base_ + size_) have the same
// same upper 32 bits, which is what we really need for compressed pointers.
intptr_t mask = ~(kCompressedHeapAlignment - 1);