[vm] Bypass malloc for large Zone allocations to avoid jemalloc leaks.

Revert 9a07ad88f4.

Bug: https://github.com/flutter/flutter/issues/29007
Change-Id: I6a5f51f0c3a54d354ec5f8495677d46f94d8a1d3
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/100568
Commit-Queue: Ryan Macnak <rmacnak@google.com>
Reviewed-by: Zach Anderson <zra@google.com>
Reviewed-by: Vyacheslav Egorov <vegorov@google.com>
This commit is contained in:
Ryan Macnak 2019-04-30 16:17:18 +00:00 committed by commit-bot@chromium.org
parent a7325f9e59
commit 347b72c260
8 changed files with 151 additions and 38 deletions

View file

@ -949,8 +949,7 @@ Isolate::Isolate(const Dart_IsolateFlags& api_flags)
catch_entry_moves_cache_(),
embedder_entry_points_(NULL),
obfuscation_map_(NULL),
reverse_pc_lookup_cache_(nullptr),
irregexp_backtrack_stack_(nullptr) {
reverse_pc_lookup_cache_(nullptr) {
FlagsCopyFrom(api_flags);
SetErrorsFatal(true);
set_compilation_allowed(true);
@ -982,9 +981,6 @@ Isolate::~Isolate() {
// RELEASE_ASSERT(reload_context_ == NULL);
#endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
delete[] irregexp_backtrack_stack_;
irregexp_backtrack_stack_ = nullptr;
delete reverse_pc_lookup_cache_;
reverse_pc_lookup_cache_ = nullptr;

View file

@ -734,18 +734,6 @@ class Isolate : public BaseIsolate {
reverse_pc_lookup_cache_ = table;
}
// This doesn't belong here, but to avoid triggering bugs in jemalloc we
// allocate the irregexpinterpreter's stack once per isolate instead of once
// per regexp execution.
// See https://github.com/flutter/flutter/issues/29007
static constexpr intptr_t kIrregexpBacktrackStackSize = 1 << 16;
intptr_t* irregexp_backtrack_stack() {
if (irregexp_backtrack_stack_ == nullptr) {
irregexp_backtrack_stack_ = new intptr_t[kIrregexpBacktrackStackSize];
}
return irregexp_backtrack_stack_;
}
// Isolate-specific flag handling.
static void FlagsInitialize(Dart_IsolateFlags* api_flags);
void FlagsCopyTo(Dart_IsolateFlags* api_flags) const;
@ -1058,8 +1046,6 @@ class Isolate : public BaseIsolate {
ReversePcLookupCache* reverse_pc_lookup_cache_;
intptr_t* irregexp_backtrack_stack_;
static Dart_IsolateCreateCallback create_callback_;
static Dart_IsolateShutdownCallback shutdown_callback_;
static Dart_IsolateCleanupCallback cleanup_callback_;

View file

@ -494,7 +494,7 @@ static IrregexpInterpreter::IrregexpResult ExecRaw(const RegExp& regexp,
TypedData::Handle(zone, regexp.bytecode(is_one_byte, sticky));
ASSERT(!bytecode.IsNull());
IrregexpInterpreter::IrregexpResult result =
IrregexpInterpreter::Match(bytecode, subject, raw_output, index);
IrregexpInterpreter::Match(bytecode, subject, raw_output, index, zone);
if (result == IrregexpInterpreter::RE_SUCCESS) {
// Copy capture results to the start of the registers array.

View file

@ -127,8 +127,8 @@ static int32_t Load16Aligned(const uint8_t* pc) {
// matching terminates.
class BacktrackStack {
public:
explicit BacktrackStack(Isolate* isolate) {
data_ = isolate->irregexp_backtrack_stack();
explicit BacktrackStack(Zone* zone) {
data_ = zone->Alloc<intptr_t>(kBacktrackStackSize);
}
intptr_t* data() const { return data_; }
@ -136,8 +136,7 @@ class BacktrackStack {
intptr_t max_size() const { return kBacktrackStackSize; }
private:
static constexpr intptr_t kBacktrackStackSize =
Isolate::kIrregexpBacktrackStackSize;
static const intptr_t kBacktrackStackSize = 1 << 16;
intptr_t* data_;
@ -149,12 +148,13 @@ static IrregexpInterpreter::IrregexpResult RawMatch(const uint8_t* code_base,
const String& subject,
int32_t* registers,
intptr_t current,
uint32_t current_char) {
uint32_t current_char,
Zone* zone) {
const uint8_t* pc = code_base;
// BacktrackStack ensures that the memory allocated for the backtracking stack
// is returned to the system or cached if there is no stack being cached at
// the moment.
BacktrackStack backtrack_stack(Isolate::Current());
BacktrackStack backtrack_stack(zone);
intptr_t* backtrack_stack_base = backtrack_stack.data();
intptr_t* backtrack_sp = backtrack_stack_base;
intptr_t backtrack_stack_space = backtrack_stack.max_size();
@ -627,7 +627,8 @@ IrregexpInterpreter::IrregexpResult IrregexpInterpreter::Match(
const TypedData& bytecode,
const String& subject,
int32_t* registers,
intptr_t start_position) {
intptr_t start_position,
Zone* zone) {
NoSafepointScope no_safepoint;
const uint8_t* code_base = reinterpret_cast<uint8_t*>(bytecode.DataAddr(0));
@ -638,10 +639,10 @@ IrregexpInterpreter::IrregexpResult IrregexpInterpreter::Match(
if (subject.IsOneByteString() || subject.IsExternalOneByteString()) {
return RawMatch<uint8_t>(code_base, subject, registers, start_position,
previous_char);
previous_char, zone);
} else if (subject.IsTwoByteString() || subject.IsExternalTwoByteString()) {
return RawMatch<uint16_t>(code_base, subject, registers, start_position,
previous_char);
previous_char, zone);
} else {
UNREACHABLE();
return IrregexpInterpreter::RE_FAILURE;

View file

@ -20,7 +20,8 @@ class IrregexpInterpreter : public AllStatic {
static IrregexpResult Match(const TypedData& bytecode,
const String& subject,
int32_t* captures,
intptr_t start_position);
intptr_t start_position,
Zone* zone);
};
} // namespace dart

View file

@ -11,6 +11,7 @@
#include "vm/handles_impl.h"
#include "vm/heap/heap.h"
#include "vm/os.h"
#include "vm/virtual_memory.h"
namespace dart {
@ -21,25 +22,28 @@ class Zone::Segment {
public:
Segment* next() const { return next_; }
intptr_t size() const { return size_; }
VirtualMemory* memory() const { return memory_; }
uword start() { return address(sizeof(Segment)); }
uword end() { return address(size_); }
// Allocate or delete individual segments.
static Segment* New(intptr_t size, Segment* next);
static Segment* NewLarge(intptr_t size, Segment* next);
static void DeleteSegmentList(Segment* segment);
static void DeleteLargeSegmentList(Segment* segment);
static void IncrementMemoryCapacity(uintptr_t size);
static void DecrementMemoryCapacity(uintptr_t size);
private:
Segment* next_;
intptr_t size_;
VirtualMemory* memory_;
void* alignment_;
// Computes the address of the nth byte in this segment.
uword address(int n) { return reinterpret_cast<uword>(this) + n; }
static void Delete(Segment* segment) { free(segment); }
DISALLOW_IMPLICIT_CONSTRUCTORS(Segment);
};
@ -49,13 +53,36 @@ Zone::Segment* Zone::Segment::New(intptr_t size, Zone::Segment* next) {
if (result == NULL) {
OUT_OF_MEMORY();
}
ASSERT(Utils::IsAligned(result->start(), Zone::kAlignment));
#ifdef DEBUG
// Zap the entire allocated segment (including the header).
memset(result, kZapUninitializedByte, size);
#endif
result->next_ = next;
result->size_ = size;
result->memory_ = nullptr;
result->alignment_ = nullptr; // Avoid unused variable warnings.
IncrementMemoryCapacity(size);
return result;
}
Zone::Segment* Zone::Segment::NewLarge(intptr_t size, Zone::Segment* next) {
size = Utils::RoundUp(size, VirtualMemory::PageSize());
VirtualMemory* memory = VirtualMemory::Allocate(size, false, "dart-zone");
if (memory == NULL) {
OUT_OF_MEMORY();
}
Segment* result = reinterpret_cast<Segment*>(memory->start());
#ifdef DEBUG
// Zap the entire allocated segment (including the header).
memset(result, kZapUninitializedByte, size);
#endif
result->next_ = next;
result->size_ = size;
result->memory_ = memory;
result->alignment_ = nullptr; // Avoid unused variable warnings.
LSAN_REGISTER_ROOT_REGION(result, sizeof(*result));
IncrementMemoryCapacity(size);
return result;
}
@ -69,7 +96,23 @@ void Zone::Segment::DeleteSegmentList(Segment* head) {
// Zap the entire current segment (including the header).
memset(current, kZapDeletedByte, current->size());
#endif
Segment::Delete(current);
free(current);
current = next;
}
}
void Zone::Segment::DeleteLargeSegmentList(Segment* head) {
Segment* current = head;
while (current != NULL) {
DecrementMemoryCapacity(current->size());
Segment* next = current->next();
VirtualMemory* memory = current->memory();
#ifdef DEBUG
// Zap the entire current segment (including the header).
memset(current, kZapDeletedByte, current->size());
#endif
LSAN_UNREGISTER_ROOT_REGION(current, sizeof(*current));
delete memory;
current = next;
}
}
@ -129,7 +172,7 @@ void Zone::DeleteAll() {
Segment::DeleteSegmentList(head_);
}
if (large_segments_ != NULL) {
Segment::DeleteSegmentList(large_segments_);
Segment::DeleteLargeSegmentList(large_segments_);
}
// Reset zone state.
#ifdef DEBUG
@ -214,9 +257,9 @@ uword Zone::AllocateLargeSegment(intptr_t size) {
ASSERT(free_size < size);
// Create a new large segment and chain it up.
ASSERT(Utils::IsAligned(sizeof(Segment), kAlignment));
size += sizeof(Segment); // Account for book keeping fields in size.
large_segments_ = Segment::New(size, large_segments_);
// Account for book keeping fields in size.
size += Utils::RoundUp(sizeof(Segment), kAlignment);
large_segments_ = Segment::NewLarge(size, large_segments_);
uword result = Utils::RoundUp(large_segments_->start(), kAlignment);
return result;

View file

@ -176,4 +176,66 @@ VM_UNIT_TEST_CASE(NativeScopeZoneAllocation) {
EXPECT_EQ(0UL, ApiNativeScope::current_memory_usage());
}
#if !defined(PRODUCT)
// Allow for pooling in the malloc implementation.
static const int64_t kRssSlack = 20 * MB;
#endif // !defined(PRODUCT)
// clang-format off
static const size_t kSizes[] = {
64 * KB,
64 * KB + 2 * kWordSize,
64 * KB - 2 * kWordSize,
128 * KB,
128 * KB + 2 * kWordSize,
128 * KB - 2 * kWordSize,
256 * KB,
256 * KB + 2 * kWordSize,
256 * KB - 2 * kWordSize,
512 * KB,
512 * KB + 2 * kWordSize,
512 * KB - 2 * kWordSize,
};
// clang-format on
TEST_CASE(StressMallocDirectly) {
#if !defined(PRODUCT)
int64_t start_rss = Service::CurrentRSS();
#endif // !defined(PRODUCT)
void* allocations[ARRAY_SIZE(kSizes)];
for (size_t i = 0; i < ((3u * GB) / (512u * KB)); i++) {
for (size_t j = 0; j < ARRAY_SIZE(kSizes); j++) {
allocations[j] = malloc(kSizes[j]);
}
for (size_t j = 0; j < ARRAY_SIZE(kSizes); j++) {
free(allocations[j]);
}
}
#if !defined(PRODUCT)
int64_t stop_rss = Service::CurrentRSS();
EXPECT_LT(stop_rss, start_rss + kRssSlack);
#endif // !defined(PRODUCT)
}
TEST_CASE(StressMallocThroughZones) {
#if !defined(PRODUCT)
int64_t start_rss = Service::CurrentRSS();
#endif // !defined(PRODUCT)
for (size_t i = 0; i < ((3u * GB) / (512u * KB)); i++) {
StackZone stack_zone(Thread::Current());
Zone* zone = stack_zone.GetZone();
for (size_t j = 0; j < ARRAY_SIZE(kSizes); j++) {
zone->Alloc<uint8_t>(kSizes[j]);
}
}
#if !defined(PRODUCT)
int64_t stop_rss = Service::CurrentRSS();
EXPECT_LT(stop_rss, start_rss + kRssSlack);
#endif // !defined(PRODUCT)
}
} // namespace dart

View file

@ -0,0 +1,24 @@
// Copyright (c) 2019, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
// Regression test for https://github.com/flutter/flutter/issues/29007
String escape(String string) {
var regex = new RegExp("(\\?|\\\$|\\*|\\(|\\)|\\[)|\\+|\\.|\\\\");
return string.replaceAllMapped(
regex, (Match m) => "\\" + string.substring(m.start, m.end));
}
main() {
var text = """
Yet but three? Come one more.
Two of both kinds make up four.
""";
var accumulate = 0;
for (var i = 0; i < 65536; i++) {
accumulate += escape(text).length;
}
print(accumulate);
}