[ VM ] Ensure heap sampling profiler is initialized on newly spawned threads

Also fixes some incorrect TLAB boundary calculations

Fixes https://github.com/dart-lang/sdk/issues/50564

TEST=DartAPI_HeapSampling looped under full system load for 30 minutes

Change-Id: I6e56cc659b58f9b246d20dc5b649fc367af9c672
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/273500
Reviewed-by: Ryan Macnak <rmacnak@google.com>
Commit-Queue: Ben Konyi <bkonyi@google.com>
This commit is contained in:
Ben Konyi 2022-12-05 17:41:08 +00:00
parent bea7045517
commit 3850a595b9
5 changed files with 86 additions and 61 deletions

View file

@ -1829,40 +1829,26 @@ DART_EXPORT void Dart_NotifyDestroyed() {
DART_EXPORT void Dart_EnableHeapSampling() {
#if !defined(PRODUCT)
IsolateGroup::ForEach([&](IsolateGroup* group) {
group->thread_registry()->ForEachThread(
[&](Thread* thread) { thread->heap_sampler().Enable(true); });
});
HeapProfileSampler::Enable(true);
#endif
}
DART_EXPORT void Dart_DisableHeapSampling() {
#if !defined(PRODUCT)
IsolateGroup::ForEach([&](IsolateGroup* group) {
group->thread_registry()->ForEachThread(
[&](Thread* thread) { thread->heap_sampler().Enable(false); });
});
HeapProfileSampler::Enable(false);
#endif
}
DART_EXPORT void Dart_RegisterHeapSamplingCallback(
Dart_HeapSamplingCallback callback) {
#if !defined(PRODUCT)
IsolateGroup::ForEach([&](IsolateGroup* group) {
group->thread_registry()->ForEachThread([&](Thread* thread) {
thread->heap_sampler().SetSamplingCallback(callback);
});
});
HeapProfileSampler::SetSamplingCallback(callback);
#endif
}
DART_EXPORT void Dart_SetHeapSamplingPeriod(intptr_t bytes) {
#if !defined(PRODUCT)
IsolateGroup::ForEach([&](IsolateGroup* group) {
group->thread_registry()->ForEachThread([&](Thread* thread) {
thread->heap_sampler().SetSamplingInterval(bytes);
});
});
HeapProfileSampler::SetSamplingInterval(bytes);
#endif
}

View file

@ -14,48 +14,40 @@
#include "vm/os.h"
#include "vm/random.h"
#include "vm/thread.h"
#include "vm/thread_registry.h"
namespace dart {
HeapProfileSampler::HeapProfileSampler(Thread* thread)
: lock_(new RwLock()),
interval_to_next_sample_(kUninitialized),
thread_(thread) {}
bool HeapProfileSampler::enabled_ = false;
Dart_HeapSamplingCallback HeapProfileSampler::callback_ = nullptr;
RwLock* HeapProfileSampler::lock_ = new RwLock();
intptr_t HeapProfileSampler::sampling_interval_ =
HeapProfileSampler::kDefaultSamplingInterval;
HeapProfileSampler::~HeapProfileSampler() {
delete lock_;
lock_ = nullptr;
}
HeapProfileSampler::HeapProfileSampler(Thread* thread)
: interval_to_next_sample_(kUninitialized), thread_(thread) {}
void HeapProfileSampler::Enable(bool enabled) {
WriteRwLocker locker(Thread::Current(), lock_);
enabled_ = enabled;
if (enabled) {
SetNextSamplingIntervalLocked(GetNextSamplingIntervalLocked());
} else if (!enabled) {
// Reset the TLAB boundaries to the true end to avoid unnecessary slow
// path invocations when sampling is disabled.
thread_->set_end(thread_->true_end());
}
}
void HeapProfileSampler::HandleNewTLAB(intptr_t old_tlab_remaining_space) {
ReadRwLocker locker(thread_, lock_);
if (!enabled_ || next_tlab_offset_ == kUninitialized) {
return;
}
thread_->set_end(next_tlab_offset_ + old_tlab_remaining_space +
thread_->top());
next_tlab_offset_ = kUninitialized;
IsolateGroup::ForEach([&](IsolateGroup* group) {
group->thread_registry()->ForEachThread(
[&](Thread* thread) { thread->heap_sampler().EnableLocked(); });
});
}
void HeapProfileSampler::SetSamplingInterval(intptr_t bytes_interval) {
WriteRwLocker locker(Thread::Current(), lock_);
ASSERT(bytes_interval >= 0);
sampling_interval_ = bytes_interval;
// Force reset the next sampling point.
thread_->set_end(thread_->true_end());
SetNextSamplingIntervalLocked(GetNextSamplingIntervalLocked());
if (!enabled_) {
return;
}
IsolateGroup::ForEach([&](IsolateGroup* group) {
group->thread_registry()->ForEachThread([&](Thread* thread) {
thread->heap_sampler().SetSamplingIntervalLocked();
});
});
}
void HeapProfileSampler::SetSamplingCallback(
@ -65,6 +57,46 @@ void HeapProfileSampler::SetSamplingCallback(
callback_ = callback;
}
void HeapProfileSampler::Initialize() {
ReadRwLocker locker(Thread::Current(), lock_);
EnableLocked();
}
void HeapProfileSampler::EnableLocked() {
if (enabled_) {
SetNextSamplingIntervalLocked(GetNextSamplingIntervalLocked());
} else if (!enabled_) {
// Reset the TLAB boundaries to the true end to avoid unnecessary slow
// path invocations when sampling is disabled.
thread_->set_end(thread_->true_end());
next_tlab_offset_ = kUninitialized;
}
}
void HeapProfileSampler::SetSamplingIntervalLocked() {
// Force reset the next sampling point.
thread_->set_end(thread_->true_end());
SetNextSamplingIntervalLocked(GetNextSamplingIntervalLocked());
}
void HeapProfileSampler::HandleNewTLAB(intptr_t old_tlab_remaining_space) {
ReadRwLocker locker(thread_, lock_);
if (!enabled_ || next_tlab_offset_ == kUninitialized) {
return;
}
intptr_t updated_offset = next_tlab_offset_ + old_tlab_remaining_space;
if (updated_offset + thread_->top() > thread_->true_end()) {
// The next sampling point isn't in this TLAB.
next_tlab_offset_ = updated_offset - (thread_->true_end() - thread_->top());
thread_->set_end(thread_->true_end());
} else {
ASSERT(updated_offset <= static_cast<intptr_t>(thread_->true_end()) -
static_cast<intptr_t>(thread_->top()));
thread_->set_end(updated_offset + thread_->top());
next_tlab_offset_ = kUninitialized;
}
}
void HeapProfileSampler::InvokeCallbackForLastSample(
Dart_PersistentHandle type_name,
Dart_WeakPersistentHandle obj) {
@ -157,6 +189,7 @@ intptr_t HeapProfileSampler::SetNextSamplingIntervalLocked(
if (new_end > true_end) {
// The next sampling point is in the next TLAB.
ASSERT(next_tlab_offset_ == kUninitialized);
next_tlab_offset_ = new_end - true_end;
new_end = true_end;
}

View file

@ -26,16 +26,17 @@ class Thread;
class HeapProfileSampler {
public:
explicit HeapProfileSampler(Thread* thread);
~HeapProfileSampler();
void Enable(bool enabled);
static void Enable(bool enabled);
static void SetSamplingInterval(intptr_t bytes_interval);
static void SetSamplingCallback(Dart_HeapSamplingCallback callback);
void Initialize();
void HandleNewTLAB(intptr_t old_tlab_remaining_space);
void SetSamplingInterval(intptr_t bytes_interval);
void SetSamplingCallback(Dart_HeapSamplingCallback callback);
void InvokeCallbackForLastSample(Dart_PersistentHandle type_name,
Dart_WeakPersistentHandle obj);
@ -52,6 +53,9 @@ class HeapProfileSampler {
void SampleSize(intptr_t allocation_size);
private:
void EnableLocked();
void SetSamplingIntervalLocked();
intptr_t SetNextSamplingIntervalLocked(intptr_t next_interval);
intptr_t GetNextSamplingIntervalLocked();
@ -59,18 +63,16 @@ class HeapProfileSampler {
// Protects sampling logic from modifications of callback_, sampling_interval,
// and enabled_ while collecting a sample.
RwLock* lock_;
static RwLock* lock_;
static bool enabled_;
static Dart_HeapSamplingCallback callback_;
static intptr_t sampling_interval_;
bool enabled_ = false;
static const intptr_t kUninitialized = -1;
static const intptr_t kDefaultSamplingInterval = 512 * KB;
Dart_HeapSamplingCallback callback_;
const intptr_t kUninitialized = -1;
const intptr_t kDefaultSamplingInterval = 1 << 19; // 512KiB
intptr_t sampling_interval_ = kDefaultSamplingInterval;
intptr_t interval_to_next_sample_;
intptr_t next_tlab_offset_ = kUninitialized;
intptr_t last_sample_size_ = kUninitialized;
Thread* thread_;

View file

@ -251,7 +251,8 @@ class Scavenger {
ASSERT(heap_ != Dart::vm_isolate_group()->heap());
const uword result = thread->top();
const intptr_t remaining = thread->end() - result;
const intptr_t remaining = static_cast<intptr_t>(thread->end()) - result;
ASSERT(remaining >= 0);
if (UNLIKELY(remaining < size)) {
return 0;
}

View file

@ -638,6 +638,9 @@ Thread* IsolateGroup::ScheduleThreadLocked(MonitorLocker* ml,
thread->set_safepoint_state(
Thread::SetBypassSafepoints(bypass_safepoint, 0));
thread->set_vm_tag(VMTag::kVMTagId);
#if !defined(PRODUCT)
thread->heap_sampler().Initialize();
#endif
ASSERT(thread->no_safepoint_scope_depth() == 0);
os_thread->set_thread(thread);
Thread::SetCurrent(thread);