[vm, gc] Check if we should start concurrent marking after external allocations and background code finalization.

Bug: https://github.com/flutter/flutter/issues/36808
Change-Id: I2577ffa22e4dec5d659198948d490b4e17aae8db
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/111664
Commit-Queue: Ryan Macnak <rmacnak@google.com>
Reviewed-by: Martin Kustermann <kustermann@google.com>
This commit is contained in:
Ryan Macnak 2019-08-02 22:08:59 +00:00 committed by commit-bot@chromium.org
parent 57b9ce7fb7
commit 6653611ae4
6 changed files with 111 additions and 54 deletions

View file

@ -678,10 +678,7 @@ RawCode* CompileParsedFunctionHelper::Compile(CompilationPipeline* pipeline) {
// executable).
{
CheckIfBackgroundCompilerIsBeingStopped(optimized());
SafepointOperationScope safepoint_scope(thread());
// Do not Garbage collect during this stage and instead allow the
// heap to grow.
NoHeapGrowthControlScope no_growth_control;
ForceGrowthSafepointOperationScope safepoint_scope(thread());
CheckIfBackgroundCompilerIsBeingStopped(optimized());
*result =
FinalizeCompilation(&assembler, &graph_compiler, flow_graph);
@ -1314,21 +1311,14 @@ void BackgroundCompiler::Run() {
void BackgroundCompiler::Compile(const Function& function) {
ASSERT(Thread::Current()->IsMutatorThread());
// TODO(srdjan): Checking different strategy for collecting garbage
// accumulated by background compiler.
if (isolate_->heap()->NeedsGarbageCollection()) {
isolate_->heap()->CollectMostGarbage();
}
{
MonitorLocker ml(&queue_monitor_);
ASSERT(running_);
if (function_queue()->ContainsObj(function)) {
return;
}
QueueElement* elem = new QueueElement(function);
function_queue()->Add(elem);
ml.Notify();
MonitorLocker ml(&queue_monitor_);
ASSERT(running_);
if (function_queue()->ContainsObj(function)) {
return;
}
QueueElement* elem = new QueueElement(function);
function_queue()->Add(elem);
ml.Notify();
}
void BackgroundCompiler::VisitPointers(ObjectPointerVisitor* visitor) {

View file

@ -1550,16 +1550,16 @@ TEST_CASE(DartAPI_MalformedStringToUTF8) {
class GCTestHelper : public AllStatic {
public:
static void CollectNewSpace() {
Isolate::Current()->heap()->new_space()->Scavenge();
Thread* thread = Thread::Current();
ASSERT(thread->execution_state() == Thread::kThreadInVM);
thread->heap()->new_space()->Scavenge();
}
static void WaitForGCTasks() {
Thread* thread = Thread::Current();
PageSpace* old_space = thread->isolate()->heap()->old_space();
MonitorLocker ml(old_space->tasks_lock());
while (old_space->tasks() > 0) {
ml.WaitWithSafepointCheck(thread);
}
ASSERT(thread->execution_state() == Thread::kThreadInVM);
thread->heap()->WaitForMarkerTasks(thread);
thread->heap()->WaitForSweeperTasks(thread);
}
};
@ -3391,8 +3391,8 @@ TEST_CASE(DartAPI_WeakPersistentHandleExternalAllocationSizeNewspaceGC) {
{
TransitionNativeToVM transition(thread);
Isolate::Current()->heap()->CollectGarbage(Heap::kOld);
GCTestHelper::WaitForGCTasks();
EXPECT(heap->ExternalInWords(Heap::kOld) == 0);
GCTestHelper::WaitForGCTasks(); // Finalize GC for accurate live size.
EXPECT_EQ(0, heap->ExternalInWords(Heap::kOld));
}
}
@ -3403,7 +3403,11 @@ TEST_CASE(DartAPI_WeakPersistentHandleExternalAllocationSizeOldspaceGC) {
Dart_Handle live = AllocateOldString("live");
EXPECT_VALID(live);
Dart_WeakPersistentHandle weak = NULL;
EXPECT_EQ(0, isolate->heap()->ExternalInWords(Heap::kOld));
{
TransitionNativeToVM transition(thread);
GCTestHelper::WaitForGCTasks(); // Finalize GC for accurate live size.
EXPECT_EQ(0, isolate->heap()->ExternalInWords(Heap::kOld));
}
const intptr_t kSmallExternalSize = 1 * KB;
{
Dart_EnterScope();
@ -3414,14 +3418,22 @@ TEST_CASE(DartAPI_WeakPersistentHandleExternalAllocationSizeOldspaceGC) {
EXPECT_VALID(AsHandle(weak));
Dart_ExitScope();
}
EXPECT_EQ(kSmallExternalSize,
isolate->heap()->ExternalInWords(Heap::kOld) * kWordSize);
{
TransitionNativeToVM transition(thread);
GCTestHelper::WaitForGCTasks(); // Finalize GC for accurate live size.
EXPECT_EQ(kSmallExternalSize,
isolate->heap()->ExternalInWords(Heap::kOld) * kWordSize);
}
// Large enough to trigger GC in old space. Not actually allocated.
const intptr_t kHugeExternalSize = (kWordSize == 4) ? 513 * MB : 1025 * MB;
Dart_NewWeakPersistentHandle(live, NULL, kHugeExternalSize, NopCallback);
// Expect small garbage to be collected.
EXPECT_EQ(kHugeExternalSize,
isolate->heap()->ExternalInWords(Heap::kOld) * kWordSize);
{
TransitionNativeToVM transition(thread);
GCTestHelper::WaitForGCTasks(); // Finalize GC for accurate live size.
// Expect small garbage to be collected.
EXPECT_EQ(kHugeExternalSize,
isolate->heap()->ExternalInWords(Heap::kOld) * kWordSize);
}
Dart_ExitScope();
}
@ -3456,6 +3468,7 @@ TEST_CASE(DartAPI_WeakPersistentHandleExternalAllocationSizeOddReferents) {
{
TransitionNativeToVM transition(thread);
Isolate::Current()->heap()->CollectGarbage(Heap::kOld);
GCTestHelper::WaitForGCTasks(); // Finalize GC for accurate live size.
EXPECT_EQ(0, heap->ExternalInWords(Heap::kOld));
}
}

View file

@ -179,21 +179,23 @@ void Heap::AllocateExternal(intptr_t cid, intptr_t size, Space space) {
if (space == kNew) {
isolate()->AssertCurrentThreadIsMutator();
new_space_.AllocateExternal(cid, size);
if (new_space_.ExternalInWords() > (4 * new_space_.CapacityInWords())) {
// Attempt to free some external allocation by a scavenge. (If the total
// remains above the limit, next external alloc will trigger another.)
CollectGarbage(kScavenge, kExternal);
// Promotion may have pushed old space over its limit.
if (old_space_.NeedsGarbageCollection()) {
CollectGarbage(kMarkSweep, kExternal);
}
if (new_space_.ExternalInWords() <= (4 * new_space_.CapacityInWords())) {
return;
}
// Attempt to free some external allocation by a scavenge. (If the total
// remains above the limit, next external alloc will trigger another.)
CollectGarbage(kScavenge, kExternal);
// Promotion may have pushed old space over its limit. Fall through for old
// space GC check.
} else {
ASSERT(space == kOld);
old_space_.AllocateExternal(cid, size);
if (old_space_.NeedsGarbageCollection()) {
CollectMostGarbage(kExternal);
}
}
if (old_space_.NeedsGarbageCollection()) {
CollectGarbage(kMarkSweep, kExternal);
} else {
CheckStartConcurrentMarking(Thread::Current(), kExternal);
}
}
@ -1061,13 +1063,13 @@ Heap::Space Heap::SpaceForExternal(intptr_t size) const {
NoHeapGrowthControlScope::NoHeapGrowthControlScope()
: ThreadStackResource(Thread::Current()) {
Heap* heap = reinterpret_cast<Isolate*>(isolate())->heap();
Heap* heap = isolate()->heap();
current_growth_controller_state_ = heap->GrowthControlState();
heap->DisableGrowthControl();
}
NoHeapGrowthControlScope::~NoHeapGrowthControlScope() {
Heap* heap = reinterpret_cast<Isolate*>(isolate())->heap();
Heap* heap = isolate()->heap();
heap->SetGrowthControlState(current_growth_controller_state_);
}

View file

@ -4,6 +4,7 @@
#include "vm/heap/safepoint.h"
#include "vm/heap/heap.h"
#include "vm/thread.h"
#include "vm/thread_registry.h"
@ -37,6 +38,50 @@ SafepointOperationScope::~SafepointOperationScope() {
handler->ResumeThreads(T);
}
ForceGrowthSafepointOperationScope::ForceGrowthSafepointOperationScope(
Thread* T)
: ThreadStackResource(T) {
ASSERT(T != NULL);
Isolate* I = T->isolate();
ASSERT(I != NULL);
Heap* heap = I->heap();
current_growth_controller_state_ = heap->GrowthControlState();
heap->DisableGrowthControl();
SafepointHandler* handler = I->group()->safepoint_handler();
ASSERT(handler != NULL);
// Signal all threads to get to a safepoint and wait for them to
// get to a safepoint.
handler->SafepointThreads(T);
}
ForceGrowthSafepointOperationScope::~ForceGrowthSafepointOperationScope() {
Thread* T = thread();
ASSERT(T != NULL);
Isolate* I = T->isolate();
ASSERT(I != NULL);
// Resume all threads which are blocked for the safepoint operation.
SafepointHandler* handler = I->safepoint_handler();
ASSERT(handler != NULL);
handler->ResumeThreads(T);
Heap* heap = I->heap();
heap->SetGrowthControlState(current_growth_controller_state_);
if (current_growth_controller_state_) {
ASSERT(T->CanCollectGarbage());
// Check if we passed the growth limit during the scope.
if (heap->old_space()->NeedsGarbageCollection()) {
heap->CollectGarbage(Heap::kMarkSweep, Heap::kOldSpace);
} else {
heap->CheckStartConcurrentMarking(T, Heap::kOldSpace);
}
}
}
SafepointHandler::SafepointHandler(IsolateGroup* isolate_group)
: isolate_group_(isolate_group),
safepoint_lock_(),

View file

@ -24,6 +24,20 @@ class SafepointOperationScope : public ThreadStackResource {
DISALLOW_COPY_AND_ASSIGN(SafepointOperationScope);
};
// A stack based scope that can be used to perform an operation after getting
// all threads to a safepoint. At the end of the operation all the threads are
// resumed. Allocations in the scope will force heap growth.
class ForceGrowthSafepointOperationScope : public ThreadStackResource {
public:
explicit ForceGrowthSafepointOperationScope(Thread* T);
~ForceGrowthSafepointOperationScope();
private:
bool current_growth_controller_state_;
DISALLOW_COPY_AND_ASSIGN(ForceGrowthSafepointOperationScope);
};
// Implements handling of safepoint operations for all threads in an
// IsolateGroup.
class SafepointHandler {
@ -94,6 +108,7 @@ class SafepointHandler {
friend class Isolate;
friend class IsolateGroup;
friend class SafepointOperationScope;
friend class ForceGrowthSafepointOperationScope;
friend class HeapIterationScope;
};

View file

@ -196,15 +196,12 @@ RawCode* StubCode::GetAllocationStubForClass(const Class& cls) {
// changes code page access permissions (makes them temporary not
// executable).
{
SafepointOperationScope safepoint_scope(thread);
ForceGrowthSafepointOperationScope safepoint_scope(thread);
stub = cls.allocation_stub();
// Check if stub was already generated.
if (!stub.IsNull()) {
return stub.raw();
}
// Do not Garbage collect during this stage and instead allow the
// heap to grow.
NoHeapGrowthControlScope no_growth_control;
stub = Code::FinalizeCode(nullptr, &assembler, pool_attachment,
/*optimized=*/false, /*stats=*/nullptr);
stub.set_owner(cls);
@ -214,11 +211,6 @@ RawCode* StubCode::GetAllocationStubForClass(const Class& cls) {
// We notify code observers after finalizing the code in order to be
// outside a [SafepointOperationScope].
Code::NotifyCodeObservers(nullptr, stub, /*optimized=*/false);
Isolate* isolate = thread->isolate();
if (isolate->heap()->NeedsGarbageCollection()) {
isolate->heap()->CollectMostGarbage();
}
}
#ifndef PRODUCT
if (FLAG_support_disassembler && FLAG_disassemble_stubs) {