[vm] Free readonly header bit so it can be used for other uses.

This is reland of https://dart-review.googlesource.com/c/sdk/+/97340 rebased on top of removal of GraphMarked bit, which was not compatible with this ReadOnly->InVMIsolateHeap change(due to how write-pages are not covered by Contains check)

Change-Id: I34c6421afb4baeafa5a449787020dab9fa800d05
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/97545
Reviewed-by: Ryan Macnak <rmacnak@google.com>
Commit-Queue: Alexander Aprelev <aam@google.com>
This commit is contained in:
Alexander Aprelev 2019-03-22 23:46:51 +00:00 committed by commit-bot@chromium.org
parent 466b6884dc
commit 59b8a9c4bb
28 changed files with 77 additions and 73 deletions

View file

@ -76,7 +76,6 @@ void Deserializer::InitializeHeader(RawObject* raw,
uint32_t tags = 0;
tags = RawObject::ClassIdTag::update(class_id, tags);
tags = RawObject::SizeTag::update(size, tags);
tags = RawObject::ReadOnlyBit::update(false, tags);
tags = RawObject::CanonicalBit::update(is_canonical, tags);
tags = RawObject::OldBit::update(true, tags);
tags = RawObject::OldAndNotMarkedBit::update(true, tags);
@ -1761,7 +1760,8 @@ class RODataSerializationCluster : public SerializationCluster {
// will be loaded into read-only memory. Extra bytes due to allocation
// rounding need to be deterministically set for reliable deduplication in
// shared images.
if (object->IsReadOnly()) {
if (object->InVMIsolateHeap() ||
s->isolate()->heap()->old_space()->IsObjectFromImagePages(object)) {
// This object is already read-only.
} else {
Object::FinalizeReadOnlyObject(object);

View file

@ -1506,7 +1506,7 @@ void Precompiler::AttachOptimizedTypeTestingStub() {
for (intptr_t i = 0; i < types.length(); i++) {
const AbstractType& type = types.At(i);
if (type.IsReadOnly()) {
if (type.InVMIsolateHeap()) {
// The only important types in the vm isolate are "dynamic"/"void", which
// will get their optimized top-type testing stub installed at creation.
continue;

View file

@ -33,7 +33,7 @@ void FlowGraphCompiler::ArchSpecificInitialization() {
const auto& stub =
Code::ZoneHandle(object_store->write_barrier_wrappers_stub());
if (!stub.IsReadOnly()) {
if (!stub.InVMIsolateHeap()) {
assembler_->generate_invoke_write_barrier_wrapper_ =
[&](Condition condition, Register reg) {
const intptr_t offset_into_target =
@ -46,7 +46,7 @@ void FlowGraphCompiler::ArchSpecificInitialization() {
const auto& array_stub =
Code::ZoneHandle(object_store->array_write_barrier_stub());
if (!array_stub.IsReadOnly()) {
if (!array_stub.InVMIsolateHeap()) {
assembler_->generate_invoke_array_write_barrier_ =
[&](Condition condition) {
AddPcRelativeCallStubTarget(array_stub);
@ -952,7 +952,7 @@ void FlowGraphCompiler::GenerateCall(TokenPosition token_pos,
RawPcDescriptors::Kind kind,
LocationSummary* locs) {
if (FLAG_precompiled_mode && FLAG_use_bare_instructions &&
!stub.IsReadOnly()) {
!stub.InVMIsolateHeap()) {
AddPcRelativeCallStubTarget(stub);
__ GenerateUnRelocatedPcRelativeCall();
EmitCallsiteMetadata(token_pos, DeoptId::kNone, kind, locs);

View file

@ -32,7 +32,7 @@ void FlowGraphCompiler::ArchSpecificInitialization() {
const auto& stub =
Code::ZoneHandle(object_store->write_barrier_wrappers_stub());
if (!stub.IsReadOnly()) {
if (!stub.InVMIsolateHeap()) {
assembler_->generate_invoke_write_barrier_wrapper_ = [&](Register reg) {
const intptr_t offset_into_target =
Thread::WriteBarrierWrappersOffsetForRegister(reg);
@ -43,7 +43,7 @@ void FlowGraphCompiler::ArchSpecificInitialization() {
const auto& array_stub =
Code::ZoneHandle(object_store->array_write_barrier_stub());
if (!array_stub.IsReadOnly()) {
if (!array_stub.InVMIsolateHeap()) {
assembler_->generate_invoke_array_write_barrier_ = [&]() {
AddPcRelativeCallStubTarget(array_stub);
assembler_->GenerateUnRelocatedPcRelativeCall();
@ -945,7 +945,7 @@ void FlowGraphCompiler::GenerateCall(TokenPosition token_pos,
RawPcDescriptors::Kind kind,
LocationSummary* locs) {
if (FLAG_precompiled_mode && FLAG_use_bare_instructions &&
!stub.IsReadOnly()) {
!stub.InVMIsolateHeap()) {
AddPcRelativeCallStubTarget(stub);
__ GenerateUnRelocatedPcRelativeCall();
EmitCallsiteMetadata(token_pos, DeoptId::kNone, kind, locs);

View file

@ -31,7 +31,7 @@ void FlowGraphCompiler::ArchSpecificInitialization() {
const auto& stub =
Code::ZoneHandle(object_store->write_barrier_wrappers_stub());
if (!stub.IsReadOnly()) {
if (!stub.InVMIsolateHeap()) {
assembler_->generate_invoke_write_barrier_wrapper_ = [&](Register reg) {
const intptr_t offset_into_target =
Thread::WriteBarrierWrappersOffsetForRegister(reg);
@ -42,7 +42,7 @@ void FlowGraphCompiler::ArchSpecificInitialization() {
const auto& array_stub =
Code::ZoneHandle(object_store->array_write_barrier_stub());
if (!array_stub.IsReadOnly()) {
if (!array_stub.InVMIsolateHeap()) {
assembler_->generate_invoke_array_write_barrier_ = [&]() {
AddPcRelativeCallStubTarget(array_stub);
assembler_->GenerateUnRelocatedPcRelativeCall();
@ -941,7 +941,7 @@ void FlowGraphCompiler::GenerateCall(TokenPosition token_pos,
RawPcDescriptors::Kind kind,
LocationSummary* locs) {
if (FLAG_precompiled_mode && FLAG_use_bare_instructions &&
!stub.IsReadOnly()) {
!stub.InVMIsolateHeap()) {
AddPcRelativeCallStubTarget(stub);
__ GenerateUnRelocatedPcRelativeCall();
EmitCallsiteMetadata(token_pos, DeoptId::kNone, kind, locs);

View file

@ -261,7 +261,7 @@ void HierarchyInfo::BuildRangesForJIT(ClassTable* table,
bool use_subtype_test,
bool include_abstract,
bool exclude_null) {
if (dst_klass.IsReadOnly()) {
if (dst_klass.InVMIsolateHeap()) {
BuildRangesFor(table, ranges, dst_klass, use_subtype_test, include_abstract,
exclude_null);
return;

View file

@ -3121,7 +3121,7 @@ class TemplateDartCall : public TemplateDefinition<kInputCount, Throws> {
argument_names_(argument_names),
arguments_(arguments),
token_pos_(token_pos) {
ASSERT(argument_names.IsZoneHandle() || argument_names.IsReadOnly());
ASSERT(argument_names.IsZoneHandle() || argument_names.InVMIsolateHeap());
}
RawString* Selector() {

View file

@ -3087,7 +3087,7 @@ void CheckStackOverflowInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
: object_store->stack_overflow_stub_without_fpu_regs_stub());
const bool using_shared_stub = locs()->call_on_shared_slow_path();
if (FLAG_precompiled_mode && FLAG_use_bare_instructions &&
using_shared_stub && !stub.IsReadOnly()) {
using_shared_stub && !stub.InVMIsolateHeap()) {
compiler->AddPcRelativeCallStubTarget(stub);
__ GenerateUnRelocatedPcRelativeCall(LS);
@ -5796,7 +5796,7 @@ void CheckNullInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
: object_store->null_error_stub_without_fpu_regs_stub());
const bool using_shared_stub = locs()->call_on_shared_slow_path();
if (FLAG_precompiled_mode && FLAG_use_bare_instructions &&
using_shared_stub && !stub.IsReadOnly()) {
using_shared_stub && !stub.InVMIsolateHeap()) {
compiler->AddPcRelativeCallStubTarget(stub);
__ GenerateUnRelocatedPcRelativeCall(EQUAL);

View file

@ -2826,7 +2826,7 @@ class CheckStackOverflowSlowPath
: object_store->stack_overflow_stub_without_fpu_regs_stub());
if (FLAG_precompiled_mode && FLAG_use_bare_instructions &&
using_shared_stub && !stub.IsReadOnly()) {
using_shared_stub && !stub.InVMIsolateHeap()) {
compiler->AddPcRelativeCallStubTarget(stub);
__ GenerateUnRelocatedPcRelativeCall();
@ -5104,7 +5104,7 @@ void NullErrorSlowPath::EmitSharedStubCall(FlowGraphCompiler* compiler,
live_fpu_regs ? object_store->null_error_stub_with_fpu_regs_stub()
: object_store->null_error_stub_without_fpu_regs_stub());
if (FLAG_precompiled_mode && FLAG_use_bare_instructions &&
using_shared_stub && !stub.IsReadOnly()) {
using_shared_stub && !stub.InVMIsolateHeap()) {
compiler->AddPcRelativeCallStubTarget(stub);
compiler->assembler()->GenerateUnRelocatedPcRelativeCall();
return;

View file

@ -39,7 +39,7 @@ bool CHA::HasSubclasses(const Class& cls) {
// read-only.
// TODO(fschneider): Enable tracking of CHA dependent code for VM heap
// classes.
if (cls.IsReadOnly()) return true;
if (cls.InVMIsolateHeap()) return true;
if (cls.IsObjectClass()) {
// Class Object has subclasses, although we do not keep track of them.
@ -58,7 +58,7 @@ bool CHA::HasSubclasses(intptr_t cid) const {
bool CHA::ConcreteSubclasses(const Class& cls,
GrowableArray<intptr_t>* class_ids) {
if (cls.IsReadOnly()) return false;
if (cls.InVMIsolateHeap()) return false;
if (cls.IsObjectClass()) return false;
if (!cls.is_abstract()) {
@ -87,7 +87,7 @@ bool CHA::IsImplemented(const Class& cls) {
// read-only.
// TODO(fschneider): Enable tracking of CHA dependent code for VM heap
// classes.
if (cls.IsReadOnly()) return true;
if (cls.InVMIsolateHeap()) return true;
return cls.is_implemented();
}
@ -129,7 +129,7 @@ bool CHA::HasOverride(const Class& cls,
// read-only.
// TODO(fschneider): Enable tracking of CHA dependent code for VM heap
// classes.
if (cls.IsReadOnly()) return true;
if (cls.InVMIsolateHeap()) return true;
// Subclasses of Object are not tracked by CHA. Safely assume that overrides
// exist.

View file

@ -1045,7 +1045,7 @@ bool ConstantEvaluator::GetCachedConstant(intptr_t kernel_offset,
}
bool is_present = false;
ASSERT(!script_.IsReadOnly());
ASSERT(!script_.InVMIsolateHeap());
if (script_.compile_time_constants() == Array::null()) {
return false;
}
@ -1075,7 +1075,7 @@ void ConstantEvaluator::CacheConstantValue(intptr_t kernel_offset,
return;
}
const intptr_t kInitialConstMapSize = 16;
ASSERT(!script_.IsReadOnly());
ASSERT(!script_.InVMIsolateHeap());
if (script_.compile_time_constants() == Array::null()) {
const Array& array = Array::Handle(
HashTables::New<KernelConstantsMap>(kInitialConstMapSize, Heap::kNew));

View file

@ -769,7 +769,7 @@ uword Code::EntryPointOf(const dart::Code& code) {
}
bool CanEmbedAsRawPointerInGeneratedCode(const dart::Object& obj) {
return obj.IsSmi() || obj.IsReadOnly();
return obj.IsSmi() || obj.InVMIsolateHeap();
}
word ToRawPointer(const dart::Object& a) {

View file

@ -492,7 +492,7 @@ void Api::Init() {
}
static Dart_Handle InitNewReadOnlyApiHandle(RawObject* raw) {
ASSERT(raw->IsReadOnly());
ASSERT(raw->InVMIsolateHeap());
LocalHandle* ref = Dart::AllocateReadOnlyApiHandle();
ref->set_raw(raw);
return ref->apiHandle();

View file

@ -302,7 +302,7 @@ class HashTable : public ValueObject {
}
void UpdateCollisions(intptr_t collisions) const {
if (KeyTraits::ReportStats()) {
if (data_->raw()->IsReadOnly()) {
if (data_->raw()->InVMIsolateHeap()) {
return;
}
AdjustSmiValueAt(kNumProbesIndex, collisions + 1);

View file

@ -193,14 +193,14 @@ void Become::CrashDump(RawObject* before_obj, RawObject* after_obj) {
OS::PrintErr("BEFORE ADDRESS: %p\n", before_obj);
OS::PrintErr("BEFORE IS HEAP OBJECT: %s",
before_obj->IsHeapObject() ? "YES" : "NO");
OS::PrintErr("BEFORE IS READ ONLY OBJECT: %s",
before_obj->IsReadOnly() ? "YES" : "NO");
OS::PrintErr("BEFORE IN VMISOLATE HEAP OBJECT: %s",
before_obj->InVMIsolateHeap() ? "YES" : "NO");
OS::PrintErr("AFTER ADDRESS: %p\n", after_obj);
OS::PrintErr("AFTER IS HEAP OBJECT: %s",
after_obj->IsHeapObject() ? "YES" : "NO");
OS::PrintErr("AFTER IS READ ONLY OBJECT: %s",
after_obj->IsReadOnly() ? "YES" : "NO");
OS::PrintErr("AFTER IN VMISOLATE HEAP OBJECT: %s",
after_obj->InVMIsolateHeap() ? "YES" : "NO");
if (before_obj->IsHeapObject()) {
OS::PrintErr("BEFORE OBJECT CLASS ID=%" Pd "\n", before_obj->GetClassId());
@ -240,7 +240,7 @@ void Become::ElementsForwardIdentity(const Array& before, const Array& after) {
CrashDump(before_obj, after_obj);
FATAL("become: Cannot become immediates");
}
if (before_obj->IsReadOnly()) {
if (before_obj->InVMIsolateHeap()) {
CrashDump(before_obj, after_obj);
FATAL("become: Cannot forward VM heap objects");
}

View file

@ -1371,6 +1371,18 @@ void PageSpace::SetupImagePage(void* pointer, uword size, bool is_executable) {
image_pages_ = page;
}
bool PageSpace::IsObjectFromImagePages(dart::RawObject* object) {
uword object_addr = RawObject::ToAddr(object);
HeapPage* image_page = image_pages_;
while (image_page != nullptr) {
if (image_page->Contains(object_addr)) {
return true;
}
image_page = image_page->next();
}
return false;
}
PageSpaceController::PageSpaceController(Heap* heap,
int heap_growth_ratio,
int heap_growth_max,

View file

@ -445,6 +445,8 @@ class PageSpace {
enable_concurrent_mark_ = enable_concurrent_mark;
}
bool IsObjectFromImagePages(RawObject* object);
private:
// Ids for time and data records in Heap::GCStats.
enum {

View file

@ -336,7 +336,6 @@ void ImageWriter::WriteROData(WriteStream* stream) {
// Write object header with the mark and read-only bits set.
uword marked_tags = obj.raw()->ptr()->tags_;
marked_tags = RawObject::ReadOnlyBit::update(true, marked_tags);
marked_tags = RawObject::OldBit::update(true, marked_tags);
marked_tags = RawObject::OldAndNotMarkedBit::update(false, marked_tags);
marked_tags = RawObject::OldAndNotRememberedBit::update(true, marked_tags);
@ -487,7 +486,6 @@ void AssemblyImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
// Write Instructions with the mark and read-only bits set.
uword marked_tags = insns.raw_ptr()->tags_;
marked_tags = RawObject::ReadOnlyBit::update(true, marked_tags);
marked_tags = RawObject::OldBit::update(true, marked_tags);
marked_tags = RawObject::OldAndNotMarkedBit::update(false, marked_tags);
marked_tags =
@ -737,7 +735,6 @@ void BlobImageWriter::WriteText(WriteStream* clustered_stream, bool vm) {
// Write Instructions with the mark and read-only bits set.
uword marked_tags = insns.raw_ptr()->tags_;
marked_tags = RawObject::ReadOnlyBit::update(true, marked_tags);
marked_tags = RawObject::OldBit::update(true, marked_tags);
marked_tags = RawObject::OldAndNotMarkedBit::update(false, marked_tags);
marked_tags = RawObject::OldAndNotRememberedBit::update(true, marked_tags);

View file

@ -40,7 +40,7 @@ Message::Message(Dart_Port dest_port,
snapshot_length_(0),
finalizable_data_(NULL),
priority_(priority) {
ASSERT(!raw_obj->IsHeapObject() || raw_obj->IsReadOnly());
ASSERT(!raw_obj->IsHeapObject() || raw_obj->InVMIsolateHeap());
ASSERT((priority == kNormalPriority) ||
(delivery_failure_port == kIllegalPort));
ASSERT(IsRaw());

View file

@ -1034,7 +1034,6 @@ class FinalizeVMIsolateVisitor : public ObjectVisitor {
ASSERT(!obj->IsForwardingCorpse());
if (!obj->IsFreeListElement()) {
obj->SetMarkBitUnsynchronized();
obj->SetReadOnlyUnsynchronized();
Object::FinalizeReadOnlyObject(obj);
#if defined(HASH_IN_OBJECT_HEADER)
// These objects end up in the read-only VM isolate which is shared
@ -1224,7 +1223,6 @@ void Object::MakeUnusedSpaceTraversable(const Object& obj,
reinterpret_cast<RawTypedData*>(RawObject::FromAddr(addr));
uword new_tags = RawObject::ClassIdTag::update(kTypedDataInt8ArrayCid, 0);
new_tags = RawObject::SizeTag::update(leftover_size, new_tags);
new_tags = RawObject::ReadOnlyBit::update(false, new_tags);
const bool is_old = obj.raw()->IsOldObject();
new_tags = RawObject::OldBit::update(is_old, new_tags);
new_tags = RawObject::OldAndNotMarkedBit::update(is_old, new_tags);
@ -1255,7 +1253,6 @@ void Object::MakeUnusedSpaceTraversable(const Object& obj,
RawObject* raw = reinterpret_cast<RawObject*>(RawObject::FromAddr(addr));
uword new_tags = RawObject::ClassIdTag::update(kInstanceCid, 0);
new_tags = RawObject::SizeTag::update(leftover_size, new_tags);
new_tags = RawObject::ReadOnlyBit::update(false, new_tags);
const bool is_old = obj.raw()->IsOldObject();
new_tags = RawObject::OldBit::update(is_old, new_tags);
new_tags = RawObject::OldAndNotMarkedBit::update(is_old, new_tags);
@ -2040,8 +2037,8 @@ RawError* Object::Init(Isolate* isolate,
}
#if defined(DEBUG)
bool Object::IsReadOnly() const {
if (FLAG_verify_handles && raw()->IsReadOnly()) {
bool Object::InVMIsolateHeap() const {
if (FLAG_verify_handles && raw()->InVMIsolateHeap()) {
Heap* vm_isolate_heap = Dart::vm_isolate()->heap();
uword addr = RawObject::ToAddr(raw());
if (!vm_isolate_heap->Contains(addr)) {
@ -2050,7 +2047,7 @@ bool Object::IsReadOnly() const {
ASSERT(vm_isolate_heap->Contains(addr));
}
}
return raw()->IsReadOnly();
return raw()->InVMIsolateHeap();
}
#endif // DEBUG
@ -2076,7 +2073,6 @@ void Object::InitializeObject(uword address, intptr_t class_id, intptr_t size) {
ASSERT(class_id != kIllegalCid);
tags = RawObject::ClassIdTag::update(class_id, tags);
tags = RawObject::SizeTag::update(size, tags);
tags = RawObject::ReadOnlyBit::update(false, tags);
const bool is_old =
(address & kNewObjectAlignmentOffset) == kOldObjectAlignmentOffset;
tags = RawObject::OldBit::update(is_old, tags);
@ -5681,7 +5677,7 @@ void Function::AttachBytecode(const Bytecode& value) const {
ASSERT(FLAG_enable_interpreter || FLAG_use_bytecode_compiler);
ASSERT(!value.IsNull());
// Finish setting up code before activating it.
if (!value.IsReadOnly()) {
if (!value.InVMIsolateHeap()) {
value.set_function(*this);
}
StorePointer(&raw_ptr()->bytecode_, value.raw());
@ -16098,7 +16094,7 @@ RawInstance* Instance::CheckAndCanonicalize(Thread* thread,
return result.raw();
}
if (IsNew()) {
ASSERT((isolate == Dart::vm_isolate()) || !IsReadOnly());
ASSERT((isolate == Dart::vm_isolate()) || !InVMIsolateHeap());
// Create a canonical object in old space.
result ^= Object::Clone(*this, Heap::kOld);
} else {
@ -17443,7 +17439,7 @@ RawAbstractType* Type::Canonicalize(TrailPtr trail) const {
ASSERT(!IsFunctionType());
Type& type = Type::Handle(zone, cls.declaration_type());
if (type.IsNull()) {
ASSERT(!cls.raw()->IsReadOnly() || (isolate == Dart::vm_isolate()));
ASSERT(!cls.raw()->InVMIsolateHeap() || (isolate == Dart::vm_isolate()));
// Canonicalize the type arguments of the supertype, if any.
TypeArguments& type_args = TypeArguments::Handle(zone, arguments());
type_args = type_args.Canonicalize(trail);

View file

@ -307,9 +307,9 @@ class Object {
bool IsNew() const { return raw()->IsNewObject(); }
bool IsOld() const { return raw()->IsOldObject(); }
#if defined(DEBUG)
bool IsReadOnly() const;
bool InVMIsolateHeap() const;
#else
bool IsReadOnly() const { return raw()->IsReadOnly(); }
bool InVMIsolateHeap() const { return raw()->InVMIsolateHeap(); }
#endif // DEBUG
// Print the object on stdout for debugging.

View file

@ -44,7 +44,7 @@ class ObjectGraph::Stack : public ObjectPointerVisitor {
Heap* heap = isolate()->heap();
for (RawObject** current = first; current <= last; ++current) {
if ((*current)->IsHeapObject() &&
!(*current)->IsReadOnly() &&
!(*current)->InVMIsolateHeap() &&
heap->GetObjectId(*current) == 0) { // not visited yet
if (!include_vm_objects_ && !IsUserClass((*current)->GetClassId())) {
continue;
@ -513,7 +513,7 @@ class WritePointerVisitor : public ObjectPointerVisitor {
virtual void VisitPointers(RawObject** first, RawObject** last) {
for (RawObject** current = first; current <= last; ++current) {
RawObject* object = *current;
if (!object->IsHeapObject() || object->IsReadOnly()) {
if (!object->IsHeapObject() || object->InVMIsolateHeap()) {
// Ignore smis and objects in the VM isolate for now.
// TODO(koda): To track which field each pointer corresponds to,
// we'll need to encode which fields were omitted here.

View file

@ -84,7 +84,7 @@ class ParsedFunction : public ZoneAllocated {
#if defined(DEBUG)
if (list == NULL) return;
for (intptr_t i = 0; i < list->length(); i++) {
ASSERT(list->At(i)->IsZoneHandle() || list->At(i)->IsReadOnly());
ASSERT(list->At(i)->IsZoneHandle() || list->At(i)->InVMIsolateHeap());
}
#endif
}

View file

@ -320,7 +320,7 @@ void ProgramVisitor::DedupPcDescriptors() {
void Visit(const Function& function) {
bytecode_ = function.bytecode();
if (!bytecode_.IsNull() && !bytecode_.IsReadOnly()) {
if (!bytecode_.IsNull() && !bytecode_.InVMIsolateHeap()) {
pc_descriptor_ = bytecode_.pc_descriptors();
if (!pc_descriptor_.IsNull()) {
pc_descriptor_ = DedupPcDescriptor(pc_descriptor_);
@ -654,7 +654,7 @@ void ProgramVisitor::DedupLists() {
if (!function.IsSignatureFunction() &&
!function.IsClosureFunction() &&
(function.name() != Symbols::Call().raw()) &&
!list_.IsReadOnly()) {
!list_.InVMIsolateHeap()) {
// Parameter types not needed for function type tests.
for (intptr_t i = 0; i < list_.Length(); i++) {
list_.SetAt(i, Object::dynamic_type());
@ -669,7 +669,8 @@ void ProgramVisitor::DedupLists() {
if (!list_.IsNull()) {
// Preserve parameter names in case of recompilation for the JIT.
if (FLAG_precompiled_mode) {
if (!function.HasOptionalNamedParameters() && !list_.IsReadOnly()) {
if (!function.HasOptionalNamedParameters() &&
!list_.InVMIsolateHeap()) {
// Parameter names not needed for resolution.
for (intptr_t i = 0; i < list_.Length(); i++) {
list_.SetAt(i, Symbols::OptimizedOut());
@ -682,7 +683,7 @@ void ProgramVisitor::DedupLists() {
}
RawArray* DedupList(const Array& list) {
if (list.IsReadOnly()) {
if (list.InVMIsolateHeap()) {
// Avoid using read-only VM objects for de-duplication.
return list.raw();
}

View file

@ -15,6 +15,10 @@
namespace dart {
bool RawObject::InVMIsolateHeap() const {
return Dart::vm_isolate()->heap()->Contains(ToAddr(this));
}
void RawObject::Validate(Isolate* isolate) const {
if (Object::void_class_ == reinterpret_cast<RawClass*>(kHeapObjectTag)) {
// Validation relies on properly initialized class classes. Skip if the

View file

@ -121,10 +121,10 @@ class RawObject {
kOldBit = 3, // Incremental barrier source.
kOldAndNotRememberedBit = 4, // Generational barrier source.
kCanonicalBit = 5,
kReadOnlyBit = 6,
kReservedBit = 7,
kReservedTagPos = 6,
kReservedTagSize = 2,
kSizeTagPos = 8,
kSizeTagPos = kReservedTagPos + kReservedTagSize, // = 8
kSizeTagSize = 8,
kClassIdTagPos = kSizeTagPos + kSizeTagSize, // = 16
kClassIdTagSize = 16,
@ -192,15 +192,15 @@ class RawObject {
class CanonicalBit : public BitField<uint32_t, bool, kCanonicalBit, 1> {};
class ReservedBit : public BitField<uint32_t, bool, kReservedBit, 1> {};
class ReadOnlyBit : public BitField<uint32_t, bool, kReadOnlyBit, 1> {};
class OldBit : public BitField<uint32_t, bool, kOldBit, 1> {};
class OldAndNotRememberedBit
: public BitField<uint32_t, bool, kOldAndNotRememberedBit, 1> {};
class ReservedBits
: public BitField<uint32_t, intptr_t, kReservedTagPos, kReservedTagSize> {
};
bool IsWellFormed() const {
uword value = reinterpret_cast<uword>(this);
return (value & kSmiTagMask) == 0 ||
@ -283,13 +283,7 @@ class RawObject {
void SetCanonical() { UpdateTagBit<CanonicalBit>(true); }
void ClearCanonical() { UpdateTagBit<CanonicalBit>(false); }
// Objects in the VM-isolate's heap or on an image page from an AppJIT or
// AppAOT snapshot are permanently read-only. They may never be modified
// again. In particular, they cannot be marked.
bool IsReadOnly() const { return ReadOnlyBit::decode(ptr()->tags_); }
void SetReadOnlyUnsynchronized() {
ptr()->tags_ = ReadOnlyBit::update(true, ptr()->tags_);
}
bool InVMIsolateHeap() const;
// Support for GC remembered bit.
bool IsRemembered() const {
@ -453,8 +447,6 @@ class RawObject {
return reinterpret_cast<uword>(raw_obj->ptr());
}
static bool IsReadOnly(intptr_t value) { return ReadOnlyBit::decode(value); }
static bool IsCanonical(intptr_t value) {
return CanonicalBit::decode(value);
}

View file

@ -1025,7 +1025,7 @@ bool SnapshotWriter::CheckAndWritePredefinedObject(RawObject* rawobj) {
// Now check if it is an object from the VM isolate. These objects are shared
// by all isolates.
if (rawobj->IsReadOnly() && HandleVMIsolateObject(rawobj)) {
if (rawobj->InVMIsolateHeap() && HandleVMIsolateObject(rawobj)) {
return true;
}

View file

@ -740,7 +740,7 @@ intptr_t Thread::OffsetFromThread(const Object& object) {
// [object] is in fact a [Code] object.
if (object.IsCode()) {
#define COMPUTE_OFFSET(type_name, member_name, expr, default_init_value) \
ASSERT((expr)->IsReadOnly()); \
ASSERT((expr)->InVMIsolateHeap()); \
if (object.raw() == expr) { \
return Thread::member_name##offset(); \
}