[vm] Place only Dart heap pages in the 4GB compressible region.

Don't allocate zones, timeline events or profile samples in the compressible region, since these allocations don't yeild compressed pointers and are competing for a limited resource.

TEST=ci
Bug: b/196510517
Change-Id: I4fc2f0d67060f927fa10d241b15b1cae3b73d919
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/212400
Commit-Queue: Ryan Macnak <rmacnak@google.com>
Reviewed-by: Siva Annamalai <asiva@google.com>
This commit is contained in:
Ryan Macnak 2021-09-08 01:16:57 +00:00 committed by commit-bot@chromium.org
parent 77467fe2f7
commit bc43e97b74
14 changed files with 79 additions and 39 deletions

View file

@ -48,7 +48,9 @@ void NativeCallbackTrampolines::AllocateTrampoline() {
VirtualMemory* const memory = VirtualMemory::AllocateAligned(
/*size=*/VirtualMemory::PageSize(),
/*alignment=*/VirtualMemory::PageSize(),
/*is_executable=*/true, /*name=*/"Dart VM FFI callback trampolines");
/*is_executable=*/true,
/*is_compressed=*/false,
/*name=*/"Dart VM FFI callback trampolines");
memory->Protect(VirtualMemory::kReadWrite);
if (memory == nullptr) {

View file

@ -84,8 +84,8 @@ static void TestFreeList(VirtualMemory* region,
TEST_CASE(FreeList) {
FreeList* free_list = new FreeList();
const intptr_t kBlobSize = 1 * MB;
VirtualMemory* region =
VirtualMemory::Allocate(kBlobSize, /* is_executable */ false, "test");
VirtualMemory* region = VirtualMemory::Allocate(
kBlobSize, /* is_executable */ false, /* is_compressed */ false, "test");
TestFreeList(region, free_list, false);
@ -97,8 +97,8 @@ TEST_CASE(FreeList) {
TEST_CASE(FreeListProtected) {
FreeList* free_list = new FreeList();
const intptr_t kBlobSize = 1 * MB;
VirtualMemory* region =
VirtualMemory::Allocate(kBlobSize, /* is_executable */ false, "test");
VirtualMemory* region = VirtualMemory::Allocate(
kBlobSize, /*is_executable*/ false, /*is_compressed*/ false, "test");
TestFreeList(region, free_list, true);
@ -113,8 +113,8 @@ TEST_CASE(FreeListProtectedTinyObjects) {
const intptr_t kObjectSize = 2 * kWordSize;
uword* objects = new uword[kBlobSize / kObjectSize];
VirtualMemory* blob =
VirtualMemory::Allocate(kBlobSize, /* is_executable = */ false, "test");
VirtualMemory* blob = VirtualMemory::Allocate(
kBlobSize, /*is_executable*/ false, /*is_compressed*/ false, "test");
ASSERT(Utils::IsAligned(blob->start(), 4096));
blob->Protect(VirtualMemory::kReadWrite);
@ -154,8 +154,8 @@ TEST_CASE(FreeListProtectedVariableSizeObjects) {
objects[i] = static_cast<uword>(NULL);
}
VirtualMemory* blob =
VirtualMemory::Allocate(kBlobSize, /* is_executable = */ false, "test");
VirtualMemory* blob = VirtualMemory::Allocate(
kBlobSize, /*is_executable*/ false, /*is_compressed*/ false, "test");
blob->Protect(VirtualMemory::kReadWrite);
// Enqueue the large blob as one free block.
@ -197,9 +197,9 @@ static void TestRegress38528(intptr_t header_overlap) {
// "<other code>" region is also still executable (and not writable).
std::unique_ptr<FreeList> free_list(new FreeList());
const uword page = VirtualMemory::PageSize();
std::unique_ptr<VirtualMemory> blob(
VirtualMemory::Allocate(2 * page,
/*is_executable=*/false, "test"));
std::unique_ptr<VirtualMemory> blob(VirtualMemory::Allocate(
2 * page,
/*is_executable=*/false, /*is_compressed*/ false, "test"));
const intptr_t remainder_size = page / 2;
const intptr_t alloc_size = page - header_overlap * kObjectAlignment;
void* const other_code =

View file

@ -47,9 +47,11 @@ OldPage* OldPage::Allocate(intptr_t size_in_words,
PageType type,
const char* name) {
const bool executable = type == kExecutable;
const bool compressed = !executable;
VirtualMemory* memory = VirtualMemory::AllocateAligned(
size_in_words << kWordSizeLog2, kOldPageSize, executable, name);
size_in_words << kWordSizeLog2, kOldPageSize, executable, compressed,
name);
if (memory == NULL) {
return NULL;
}

View file

@ -669,9 +669,10 @@ NewPage* NewPage::Allocate() {
if (memory == nullptr) {
const intptr_t alignment = kNewPageSize;
const bool is_executable = false;
const bool compressed = true;
const char* const name = Heap::RegionName(Heap::kNew);
memory =
VirtualMemory::AllocateAligned(size, alignment, is_executable, name);
memory = VirtualMemory::AllocateAligned(size, alignment, is_executable,
compressed, name);
}
if (memory == nullptr) {
return nullptr; // Out of memory.

View file

@ -185,8 +185,10 @@ SampleBlockBuffer::SampleBlockBuffer(intptr_t blocks,
intptr_t samples_per_block) {
const intptr_t size = Utils::RoundUp(
blocks * samples_per_block * sizeof(Sample), VirtualMemory::PageSize());
const bool kNotExecutable = false;
memory_ = VirtualMemory::Allocate(size, kNotExecutable, "dart-profiler");
const bool executable = false;
const bool compressed = false;
memory_ =
VirtualMemory::Allocate(size, executable, compressed, "dart-profiler");
if (memory_ == NULL) {
OUT_OF_MEMORY();
}
@ -341,8 +343,10 @@ Sample* SampleBlockBuffer::ReserveSampleImpl(Isolate* isolate,
AllocationSampleBuffer::AllocationSampleBuffer(intptr_t capacity) {
const intptr_t size =
Utils::RoundUp(capacity * sizeof(Sample), VirtualMemory::PageSize());
const bool kNotExecutable = false;
memory_ = VirtualMemory::Allocate(size, kNotExecutable, "dart-profiler");
const bool executable = false;
const bool compressed = false;
memory_ =
VirtualMemory::Allocate(size, executable, compressed, "dart-profiler");
if (memory_ == NULL) {
OUT_OF_MEMORY();
}

View file

@ -143,8 +143,10 @@ class BacktrackStack {
// https://github.com/flutter/flutter/issues/29007 for examples.
// So intead we directly ask OS to provide us memory.
if (memory_ == nullptr) {
const bool executable = false;
const bool compressed = false;
memory_ = std::unique_ptr<VirtualMemory>(VirtualMemory::Allocate(
sizeof(intptr_t) * kBacktrackStackSize, /*is_executable=*/false,
sizeof(intptr_t) * kBacktrackStackSize, executable, compressed,
"regexp-backtrack-stack"));
}
}

View file

@ -1122,8 +1122,10 @@ TimelineEventFixedBufferRecorder::TimelineEventFixedBufferRecorder(
intptr_t size = Utils::RoundUp(num_blocks_ * sizeof(TimelineEventBlock),
VirtualMemory::PageSize());
const bool kNotExecutable = false;
memory_ = VirtualMemory::Allocate(size, kNotExecutable, "dart-timeline");
const bool executable = false;
const bool compressed = false;
memory_ =
VirtualMemory::Allocate(size, executable, compressed, "dart-timeline");
if (memory_ == NULL) {
OUT_OF_MEMORY();
}

View file

@ -50,12 +50,15 @@ class VirtualMemory {
// the requested size cannot be allocated, NULL is returned.
static VirtualMemory* Allocate(intptr_t size,
bool is_executable,
bool is_compressed,
const char* name) {
return AllocateAligned(size, PageSize(), is_executable, name);
return AllocateAligned(size, PageSize(), is_executable, is_compressed,
name);
}
static VirtualMemory* AllocateAligned(intptr_t size,
intptr_t alignment,
bool is_executable,
bool is_compressed,
const char* name);
// Returns the cached page size. Use only if Init() has been called.

View file

@ -112,6 +112,7 @@ bool VirtualMemory::DualMappingEnabled() {
VirtualMemory* VirtualMemory::AllocateAligned(intptr_t size,
intptr_t alignment,
bool is_executable,
bool is_compressed,
const char* name) {
// When FLAG_write_protect_code is active, code memory (indicated by
// is_executable = true) is allocated as non-executable and later
@ -134,8 +135,13 @@ VirtualMemory* VirtualMemory::AllocateAligned(intptr_t size,
ASSERT((ZX_VM_ALIGN_1KB <= align_flag) && (align_flag <= ZX_VM_ALIGN_4GB));
#if defined(DART_COMPRESSED_POINTERS)
zx_handle_t vmar =
is_executable ? zx_vmar_root_self() : compressed_heap_vmar_;
zx_handle_t vmar;
if (is_compressed) {
RELEASE_ASSERT(!is_executable);
vmar = compressed_heap_vmar_;
} else {
vmar = zx_vmar_root_self();
}
#else
zx_handle_t vmar = zx_vmar_root_self();
#endif // defined(DART_COMPRESSED_POINTERS)

View file

@ -160,7 +160,10 @@ void VirtualMemory::Init() {
if (FLAG_dual_map_code) {
intptr_t size = PageSize();
intptr_t alignment = kOldPageSize;
VirtualMemory* vm = AllocateAligned(size, alignment, true, "memfd-test");
bool executable = true;
bool compressed = false;
VirtualMemory* vm =
AllocateAligned(size, alignment, executable, compressed, "memfd-test");
if (vm == nullptr) {
LOG_INFO("memfd_create not supported; disabling dual mapping of code.\n");
FLAG_dual_map_code = false;
@ -280,6 +283,7 @@ static void* MapAligned(void* hint,
VirtualMemory* VirtualMemory::AllocateAligned(intptr_t size,
intptr_t alignment,
bool is_executable,
bool is_compressed,
const char* name) {
// When FLAG_write_protect_code is active, code memory (indicated by
// is_executable = true) is allocated as non-executable and later
@ -293,7 +297,8 @@ VirtualMemory* VirtualMemory::AllocateAligned(intptr_t size,
ASSERT(name != nullptr);
#if defined(DART_COMPRESSED_POINTERS)
if (!is_executable) {
if (is_compressed) {
RELEASE_ASSERT(!is_executable);
MemoryRegion region =
VirtualMemoryCompressedHeap::Allocate(size, alignment);
if (region.pointer() == nullptr) {

View file

@ -22,17 +22,12 @@ bool IsZero(char* begin, char* end) {
VM_UNIT_TEST_CASE(AllocateVirtualMemory) {
const intptr_t kVirtualMemoryBlockSize = 64 * KB;
VirtualMemory* vm =
VirtualMemory::Allocate(kVirtualMemoryBlockSize, false, "test");
VirtualMemory::Allocate(kVirtualMemoryBlockSize, false, false, "test");
EXPECT(vm != NULL);
EXPECT(vm->address() != NULL);
EXPECT_EQ(vm->start(), reinterpret_cast<uword>(vm->address()));
#if defined(DART_COMPRESSED_POINTERS)
EXPECT_EQ(kCompressedHeapPageSize, vm->size());
EXPECT_EQ(vm->start() + kCompressedHeapPageSize, vm->end());
#else
EXPECT_EQ(kVirtualMemoryBlockSize, vm->size());
EXPECT_EQ(vm->start() + kVirtualMemoryBlockSize, vm->end());
#endif // defined(DART_COMPRESSED_POINTERS)
EXPECT(vm->Contains(vm->start()));
EXPECT(vm->Contains(vm->start() + 1));
EXPECT(vm->Contains(vm->start() + kVirtualMemoryBlockSize - 1));
@ -63,7 +58,7 @@ VM_UNIT_TEST_CASE(AllocateAlignedVirtualMemory) {
intptr_t kIterations = kHeapPageSize / kVirtualPageSize;
for (intptr_t i = 0; i < kIterations; i++) {
VirtualMemory* vm = VirtualMemory::AllocateAligned(
kHeapPageSize, kHeapPageSize, false, "test");
kHeapPageSize, kHeapPageSize, false, false, "test");
EXPECT(Utils::IsAligned(vm->start(), kHeapPageSize));
EXPECT_EQ(kHeapPageSize, vm->size());
delete vm;
@ -76,19 +71,19 @@ VM_UNIT_TEST_CASE(FreeVirtualMemory) {
const intptr_t kIterations = 900; // Enough to exhaust 32-bit address space.
for (intptr_t i = 0; i < kIterations; ++i) {
VirtualMemory* vm =
VirtualMemory::Allocate(kVirtualMemoryBlockSize, false, "test");
VirtualMemory::Allocate(kVirtualMemoryBlockSize, false, false, "test");
delete vm;
}
// Check that truncation does not introduce leaks.
for (intptr_t i = 0; i < kIterations; ++i) {
VirtualMemory* vm =
VirtualMemory::Allocate(kVirtualMemoryBlockSize, false, "test");
VirtualMemory::Allocate(kVirtualMemoryBlockSize, false, false, "test");
vm->Truncate(kVirtualMemoryBlockSize / 2);
delete vm;
}
for (intptr_t i = 0; i < kIterations; ++i) {
VirtualMemory* vm =
VirtualMemory::Allocate(kVirtualMemoryBlockSize, true, "test");
VirtualMemory::Allocate(kVirtualMemoryBlockSize, true, false, "test");
vm->Truncate(0);
delete vm;
}

View file

@ -82,6 +82,7 @@ bool VirtualMemory::DualMappingEnabled() {
VirtualMemory* VirtualMemory::AllocateAligned(intptr_t size,
intptr_t alignment,
bool is_executable,
bool is_compressed,
const char* name) {
// When FLAG_write_protect_code is active, code memory (indicated by
// is_executable = true) is allocated as non-executable and later
@ -91,7 +92,8 @@ VirtualMemory* VirtualMemory::AllocateAligned(intptr_t size,
ASSERT(Utils::IsAligned(alignment, PageSize()));
#if defined(DART_COMPRESSED_POINTERS)
if (!is_executable) {
if (is_compressed) {
RELEASE_ASSERT(!is_executable);
MemoryRegion region =
VirtualMemoryCompressedHeap::Allocate(size, alignment);
if (region.pointer() == nullptr) {

View file

@ -87,7 +87,9 @@ Zone::Segment* Zone::Segment::New(intptr_t size, Zone::Segment* next) {
}
}
if (memory == nullptr) {
memory = VirtualMemory::Allocate(size, false, "dart-zone");
bool executable = false;
bool compressed = false;
memory = VirtualMemory::Allocate(size, executable, compressed, "dart-zone");
total_size_.fetch_add(size);
}
if (memory == nullptr) {

View file

@ -242,4 +242,18 @@ ISOLATE_UNIT_TEST_CASE(StressMallocThroughZones) {
#endif // !defined(PRODUCT)
}
#if defined(DART_COMPRESSED_POINTERS)
ISOLATE_UNIT_TEST_CASE(ZonesNotLimitedByCompressedHeap) {
StackZone stack_zone(Thread::Current());
Zone* zone = stack_zone.GetZone();
size_t total = 0;
while (total <= (4u * GB)) {
size_t chunk_size = 512u * MB;
zone->AllocUnsafe(chunk_size);
total += chunk_size;
}
}
#endif // defined(DART_COMPRESSED_POINTERS)
} // namespace dart