VM-internalize the default Map implementation.

Make the compact linked hash map, which is the default implementation for Map
(including map literals), a VM-internal class.

This makes it easy to have more efficient serialization of maps (although for
now, the implementation is straight-forward).

Refactor the compact hash class hierarchy to enable the VM-internal class and
the rest (also the Set classes) to share a maximal amount of code, by using
two different bases for implicit/explicit fields.

Remove existing proof-of-concept, C++-based internal VM-class.

BUG=http://dartbug.com/22982
R=asiva@google.com

Review URL: https://codereview.chromium.org//1151523002
This commit is contained in:
Daniel Andersson 2015-05-28 09:32:54 -07:00
parent e68d22ce1e
commit ea46192ca3
18 changed files with 507 additions and 384 deletions

View file

@ -916,11 +916,7 @@ patch class LinkedHashMap<K, V> {
if (isValidKey == null) {
if (hashCode == null) {
if (equals == null) {
if (_useInternalCached) {
return new _InternalLinkedHashMap<K, V>();
} else {
return new _CompactLinkedHashMap<K, V>();
}
return new _InternalLinkedHashMap<K, V>();
}
hashCode = _defaultHashCode;
} else {
@ -945,9 +941,6 @@ patch class LinkedHashMap<K, V> {
/* patch */ factory LinkedHashMap.identity() =
_CompactLinkedIdentityHashMap<K, V>;
static final bool _useInternalCached = _useInternal;
static bool get _useInternal native "LinkedHashMap_useInternal";
}
patch class LinkedHashSet<E> {

View file

@ -7,6 +7,5 @@
'collection_patch.dart',
'compact_hash.dart',
'linked_hash_map.cc',
'linked_hash_map.dart',
],
}

View file

@ -6,34 +6,68 @@ import 'dart:typed_data';
import 'dart:_internal' as internal;
// Hash table with open addressing that separates the index from keys/values.
abstract class _HashBase {
abstract class _HashFieldBase {
// Each occupied entry in _index is a fixed-size integer that encodes a pair:
// [ hash pattern for key | index of entry in _data ]
// The hash pattern is based on hashCode, but is guaranteed to be non-zero.
// The length of _index is always a power of two, and there is always at
// least one unoccupied entry.
Uint32List _index;
// The number of bits used for each component is determined by table size.
// The length of _index is twice the number of entries in _data, and both
// are doubled when _data is full. Thus, _index will have a max load factor
// of 1/2, which enables one more bit to be used for the hash.
// TODO(koda): Consider growing _data by factor sqrt(2), twice as often.
static const int _INITIAL_INDEX_BITS = 3;
static const int _INITIAL_INDEX_SIZE = 1 << (_INITIAL_INDEX_BITS + 1);
// Unused and deleted entries are marked by 0 and 1, respectively.
static const int _UNUSED_PAIR = 0;
static const int _DELETED_PAIR = 1;
Uint32List _index = new Uint32List(_HashBase._INITIAL_INDEX_SIZE);
// Cached in-place mask for the hash pattern component. On 32-bit, the top
// bits are wasted to avoid Mint allocation.
// TODO(koda): Reclaim the bits by making the compiler treat hash patterns
// as unsigned words.
int _hashMask = internal.is64Bit ?
(1 << (32 - _INITIAL_INDEX_BITS)) - 1 :
(1 << (30 - _INITIAL_INDEX_BITS)) - 1;
(1 << (32 - _HashBase._INITIAL_INDEX_BITS)) - 1 :
(1 << (30 - _HashBase._INITIAL_INDEX_BITS)) - 1;
// Fixed-length list of keys (set) or key/value at even/odd indices (map).
List _data = new List(_HashBase._INITIAL_INDEX_SIZE);
// Length of _data that is used (i.e., keys + values for a map).
int _usedData = 0;
// Number of deleted keys.
int _deletedKeys = 0;
}
// Base class for VM-internal classes; keep in sync with _HashFieldBase.
abstract class _HashVMBase {
Uint32List get _index native "LinkedHashMap_getIndex";
void set _index(Uint32List value) native "LinkedHashMap_setIndex";
int get _hashMask native "LinkedHashMap_getHashMask";
void set _hashMask(int value) native "LinkedHashMap_setHashMask";
List get _data native "LinkedHashMap_getData";
void set _data(List value) native "LinkedHashMap_setData";
int get _usedData native "LinkedHashMap_getUsedData";
void set _usedData(int value) native "LinkedHashMap_setUsedData";
int get _deletedKeys native "LinkedHashMap_getDeletedKeys";
void set _deletedKeys(int value) native "LinkedHashMap_setDeletedKeys";
}
// This mixin can be applied to _HashFieldBase or _HashVMBase (for
// normal and VM-internalized classes, respectivley), which provide the
// actual fields/accessors that this mixin assumes.
// TODO(koda): Consider moving field comments to _HashFieldBase.
abstract class _HashBase {
// The number of bits used for each component is determined by table size.
// The length of _index is twice the number of entries in _data, and both
// are doubled when _data is full. Thus, _index will have a max load factor
// of 1/2, which enables one more bit to be used for the hash.
// TODO(koda): Consider growing _data by factor sqrt(2), twice as often.
static const int _INITIAL_INDEX_BITS = 3;
static const int _INITIAL_INDEX_SIZE = 1 << (_INITIAL_INDEX_BITS + 1);
// Unused and deleted entries are marked by 0 and 1, respectively.
static const int _UNUSED_PAIR = 0;
static const int _DELETED_PAIR = 1;
static int _hashPattern(int fullHash, int hashMask, int size) {
final int maskedHash = fullHash & hashMask;
// TODO(koda): Consider keeping bit length and use left shift.
@ -47,14 +81,7 @@ abstract class _HashBase {
return ((i << 1) + i) & sizeMask;
}
static int _nextProbe(int i, int sizeMask) => (i + 1) & sizeMask;
// Fixed-length list of keys (set) or key/value at even/odd indices (map).
List _data;
// Length of _data that is used (i.e., keys + values for a map).
int _usedData = 0;
// Number of deleted keys.
int _deletedKeys = 0;
// A self-loop is used to mark a deleted key or value.
static bool _isDeleted(List data, Object keyOrValue) =>
identical(keyOrValue, data);
@ -79,22 +106,19 @@ class _IdenticalAndIdentityHashCode {
bool _equals(e1, e2) => identical(e1, e2);
}
// Map with iteration in insertion order (hence "Linked"). New keys are simply
// appended to _data.
class _CompactLinkedHashMap<K, V>
extends MapBase<K, V> with _HashBase, _OperatorEqualsAndHashCode
// VM-internalized implementation of a default-constructed LinkedHashMap.
class _InternalLinkedHashMap<K, V> extends _HashVMBase
with MapMixin<K, V>, _LinkedHashMapMixin<K, V>, _HashBase,
_OperatorEqualsAndHashCode
implements LinkedHashMap<K, V> {
factory _InternalLinkedHashMap() native "LinkedHashMap_allocate";
}
_CompactLinkedHashMap() {
assert(_HashBase._UNUSED_PAIR == 0);
_index = new Uint32List(_HashBase._INITIAL_INDEX_SIZE);
_data = new List(_HashBase._INITIAL_INDEX_SIZE);
}
class _LinkedHashMapMixin<K, V> {
int get length => (_usedData >> 1) - _deletedKeys;
bool get isEmpty => length == 0;
bool get isNotEmpty => !isEmpty;
void _rehash() {
if ((_deletedKeys << 2) > _usedData) {
// TODO(koda): Consider shrinking.
@ -105,7 +129,7 @@ class _CompactLinkedHashMap<K, V>
_init(_index.length << 1, _hashMask >> 1, _data, _usedData);
}
}
void clear() {
if (!isEmpty) {
_init(_index.length, _hashMask);
@ -131,7 +155,7 @@ class _CompactLinkedHashMap<K, V>
}
}
}
void _insert(K key, V value, int hashPattern, int i) {
if (_usedData == _data.length) {
_rehash();
@ -145,7 +169,7 @@ class _CompactLinkedHashMap<K, V>
_data[_usedData++] = value;
}
}
// If key is present, returns the index of the value in _data, else returns
// the negated insertion point in _index.
int _findValueOrInsertPoint(K key, int fullHash, int hashPattern, int size) {
@ -173,7 +197,7 @@ class _CompactLinkedHashMap<K, V>
}
return firstDeleted >= 0 ? -firstDeleted : -i;
}
void operator[]=(K key, V value) {
final int size = _index.length;
final int sizeMask = size - 1;
@ -187,7 +211,7 @@ class _CompactLinkedHashMap<K, V>
_insert(key, value, hashPattern, i);
}
}
V putIfAbsent(K key, V ifAbsent()) {
final int size = _index.length;
final int sizeMask = size - 1;
@ -210,7 +234,7 @@ class _CompactLinkedHashMap<K, V>
}
return value;
}
V remove(Object key) {
final int size = _index.length;
final int sizeMask = size - 1;
@ -239,7 +263,7 @@ class _CompactLinkedHashMap<K, V>
}
return null;
}
// If key is absent, return _data (which is never a value).
Object _getValueOrData(Object key) {
final int size = _index.length;
@ -264,14 +288,14 @@ class _CompactLinkedHashMap<K, V>
}
return _data;
}
bool containsKey(Object key) => !identical(_data, _getValueOrData(key));
V operator[](Object key) {
var v = _getValueOrData(key);
return identical(_data, v) ? null : v;
}
bool containsValue(Object value) {
for (var v in values) {
// Spec. says this should always use "==", also for identity maps, etc.
@ -297,12 +321,15 @@ class _CompactLinkedHashMap<K, V>
new _CompactIterable<V>(this, _data, _usedData, -1, 2);
}
class _CompactLinkedIdentityHashMap<K, V>
extends _CompactLinkedHashMap<K, V> with _IdenticalAndIdentityHashCode {
class _CompactLinkedIdentityHashMap<K, V> extends _HashFieldBase
with MapMixin<K, V>, _LinkedHashMapMixin<K, V>, _HashBase,
_IdenticalAndIdentityHashCode
implements LinkedHashMap<K, V> {
}
class _CompactLinkedCustomHashMap<K, V>
extends _CompactLinkedHashMap<K, V> {
class _CompactLinkedCustomHashMap<K, V> extends _HashFieldBase
with MapMixin<K, V>, _LinkedHashMapMixin<K, V>, _HashBase
implements LinkedHashMap<K, V> {
final _equality;
final _hasher;
final _validKey;
@ -369,8 +396,8 @@ class _CompactIterator<E> implements Iterator<E> {
}
// Set implementation, analogous to _CompactLinkedHashMap.
class _CompactLinkedHashSet<E>
extends SetBase<E> with _HashBase, _OperatorEqualsAndHashCode
class _CompactLinkedHashSet<E> extends _HashFieldBase
with _HashBase, _OperatorEqualsAndHashCode, SetMixin<E>
implements LinkedHashSet<E> {
_CompactLinkedHashSet() {

View file

@ -13,85 +13,98 @@
namespace dart {
DEFINE_FLAG(bool, use_internal_hash_map, false, "Use internal hash map.");
DEFINE_NATIVE_ENTRY(LinkedHashMap_allocate, 1) {
const TypeArguments& type_arguments =
TypeArguments::CheckedHandle(arguments->NativeArgAt(0));
const LinkedHashMap& map =
LinkedHashMap::Handle(LinkedHashMap::New());
LinkedHashMap::Handle(LinkedHashMap::NewDefault());
map.SetTypeArguments(type_arguments);
return map.raw();
}
DEFINE_NATIVE_ENTRY(LinkedHashMap_getLength, 1) {
DEFINE_NATIVE_ENTRY(LinkedHashMap_getIndex, 1) {
const LinkedHashMap& map =
LinkedHashMap::CheckedHandle(arguments->NativeArgAt(0));
return Smi::New(map.Length());
return map.index();
}
DEFINE_NATIVE_ENTRY(LinkedHashMap_lookUp, 2) {
DEFINE_NATIVE_ENTRY(LinkedHashMap_setIndex, 2) {
const LinkedHashMap& map =
LinkedHashMap::CheckedHandle(arguments->NativeArgAt(0));
GET_NON_NULL_NATIVE_ARGUMENT(Instance, key, arguments->NativeArgAt(1));
return map.LookUp(key);
}
DEFINE_NATIVE_ENTRY(LinkedHashMap_containsKey, 2) {
const LinkedHashMap& map =
LinkedHashMap::CheckedHandle(arguments->NativeArgAt(0));
GET_NON_NULL_NATIVE_ARGUMENT(Instance, key, arguments->NativeArgAt(1));
return Bool::Get(map.Contains(key)).raw();
}
DEFINE_NATIVE_ENTRY(LinkedHashMap_insertOrUpdate, 3) {
LinkedHashMap& map =
LinkedHashMap::CheckedHandle(arguments->NativeArgAt(0));
GET_NON_NULL_NATIVE_ARGUMENT(Instance, key, arguments->NativeArgAt(1));
GET_NON_NULL_NATIVE_ARGUMENT(Instance, value, arguments->NativeArgAt(2));
map.InsertOrUpdate(key, value);
const TypedData& index =
TypedData::CheckedHandle(arguments->NativeArgAt(1));
map.SetIndex(index);
return Object::null();
}
DEFINE_NATIVE_ENTRY(LinkedHashMap_remove, 2) {
DEFINE_NATIVE_ENTRY(LinkedHashMap_getData, 1) {
const LinkedHashMap& map =
LinkedHashMap::CheckedHandle(arguments->NativeArgAt(0));
GET_NON_NULL_NATIVE_ARGUMENT(Instance, key, arguments->NativeArgAt(1));
return map.Remove(key);
return map.data();
}
DEFINE_NATIVE_ENTRY(LinkedHashMap_clear, 1) {
DEFINE_NATIVE_ENTRY(LinkedHashMap_setData, 2) {
const LinkedHashMap& map =
LinkedHashMap::CheckedHandle(arguments->NativeArgAt(0));
map.Clear();
const Array& data =
Array::CheckedHandle(arguments->NativeArgAt(1));
map.SetData(data);
return Object::null();
}
DEFINE_NATIVE_ENTRY(LinkedHashMap_toArray, 1) {
DEFINE_NATIVE_ENTRY(LinkedHashMap_getHashMask, 1) {
const LinkedHashMap& map =
LinkedHashMap::CheckedHandle(arguments->NativeArgAt(0));
return map.ToArray();
return map.hash_mask();
}
DEFINE_NATIVE_ENTRY(LinkedHashMap_getModMark, 2) {
DEFINE_NATIVE_ENTRY(LinkedHashMap_setHashMask, 2) {
const LinkedHashMap& map =
LinkedHashMap::CheckedHandle(arguments->NativeArgAt(0));
GET_NON_NULL_NATIVE_ARGUMENT(Bool, create, arguments->NativeArgAt(1));
return map.GetModificationMark(create.value());
const Smi& hashMask =
Smi::CheckedHandle(arguments->NativeArgAt(1));
map.SetHashMask(hashMask.Value());
return Object::null();
}
DEFINE_NATIVE_ENTRY(LinkedHashMap_useInternal, 0) {
return Bool::Get(FLAG_use_internal_hash_map).raw();
DEFINE_NATIVE_ENTRY(LinkedHashMap_getDeletedKeys, 1) {
const LinkedHashMap& map =
LinkedHashMap::CheckedHandle(arguments->NativeArgAt(0));
return map.deleted_keys();
}
DEFINE_NATIVE_ENTRY(LinkedHashMap_setDeletedKeys, 2) {
const LinkedHashMap& map =
LinkedHashMap::CheckedHandle(arguments->NativeArgAt(0));
const Smi& deletedKeys =
Smi::CheckedHandle(arguments->NativeArgAt(1));
map.SetDeletedKeys(deletedKeys.Value());
return Object::null();
}
DEFINE_NATIVE_ENTRY(LinkedHashMap_getUsedData, 1) {
const LinkedHashMap& map =
LinkedHashMap::CheckedHandle(arguments->NativeArgAt(0));
return map.used_data();
}
DEFINE_NATIVE_ENTRY(LinkedHashMap_setUsedData, 2) {
const LinkedHashMap& map =
LinkedHashMap::CheckedHandle(arguments->NativeArgAt(0));
const Smi& usedData =
Smi::CheckedHandle(arguments->NativeArgAt(1));
map.SetUsedData(usedData.Value());
return Object::null();
}
} // namespace dart

View file

@ -1,118 +0,0 @@
// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
// VM-internalized implementation of a default-constructed LinkedHashMap.
// Currently calls the runtime for most operations.
class _InternalLinkedHashMap<K, V> implements HashMap<K, V>,
LinkedHashMap<K, V> {
factory _InternalLinkedHashMap() native "LinkedHashMap_allocate";
int get length native "LinkedHashMap_getLength";
V operator [](K key) native "LinkedHashMap_lookUp";
void operator []=(K key, V value) native "LinkedHashMap_insertOrUpdate";
V remove(K key) native "LinkedHashMap_remove";
void clear() native "LinkedHashMap_clear";
bool containsKey(K key) native "LinkedHashMap_containsKey";
bool get isEmpty => length == 0;
bool get isNotEmpty => !isEmpty;
List _toArray() native "LinkedHashMap_toArray";
// "Modificaton marks" are tokens used to detect concurrent modification.
// Considering only modifications (M) and iterator creation (I) events, e.g.:
// M, M, M, I, I, M, I, M, M, I, I, I, M ...
// a new mark is allocated at the start of each run of I's and cleared from
// the map at the start of each run of M's. Iterators' moveNext check whether
// the map's mark was changed or cleared since the iterator was created.
// TODO(koda): Consider a counter instead.
Object _getModMark(bool create) native "LinkedHashMap_getModMark";
void addAll(Map<K, V> other) {
other.forEach((K key, V value) {
this[key] = value;
});
}
V putIfAbsent(K key, Function ifAbsent) {
if (containsKey(key)) {
return this[key];
} else {
V value = ifAbsent();
this[key] = value;
return value;
}
}
bool containsValue(V value) {
for (V v in values) {
if (v == value) {
return true;
}
}
return false;
}
void forEach(Function f) {
for (K key in keys) {
f(key, this[key]);
}
}
// The even-indexed entries of toArray are the keys.
Iterable<K> get keys =>
new _ListStepIterable<K>(this, _getModMark(true), _toArray(), -2, 2);
// The odd-indexed entries of toArray are the values.
Iterable<V> get values =>
new _ListStepIterable<V>(this, _getModMark(true), _toArray(), -1, 2);
String toString() => Maps.mapToString(this);
}
// Iterates over a list from a given offset and step size.
class _ListStepIterable<E> extends IterableBase<E> {
_InternalLinkedHashMap _map;
Object _modMark;
List _list;
int _offset;
int _step;
_ListStepIterable(this._map, this._modMark,
this._list, this._offset, this._step);
Iterator<E> get iterator =>
new _ListStepIterator(_map, _modMark, _list, _offset, _step);
// TODO(koda): Should this check for concurrent modification?
int get length => _map.length;
bool get isEmpty => length == 0;
bool get isNotEmpty => !isEmpty;
}
class _ListStepIterator<E> implements Iterator<E> {
_InternalLinkedHashMap _map;
Object _modMark;
List _list;
int _offset;
int _step;
_ListStepIterator(this._map, this._modMark,
this._list, this._offset, this._step);
bool moveNext() {
if (_map._getModMark(false) != _modMark) {
throw new ConcurrentModificationError(_map);
}
_offset += _step;
return _offset < _list.length;
}
E get current {
if (_offset < 0 || _offset >= _list.length) {
return null;
}
return _list[_offset];
}
}

View file

@ -607,4 +607,42 @@ BENCHMARK(SimpleMessage) {
benchmark->set_score(elapsed_time);
}
BENCHMARK(LargeMap) {
const char* kScript =
"makeMap() {\n"
" Map m = {};\n"
" for (int i = 0; i < 100000; ++i) m[i*13+i*(i>>7)] = i;\n"
" return m;\n"
"}";
Dart_Handle h_lib = TestCase::LoadTestScript(kScript, NULL);
EXPECT_VALID(h_lib);
Dart_Handle h_result = Dart_Invoke(h_lib, NewString("makeMap"), 0, NULL);
EXPECT_VALID(h_result);
Instance& map = Instance::Handle();
map ^= Api::UnwrapHandle(h_result);
const intptr_t kLoopCount = 100;
Isolate* isolate = Isolate::Current();
uint8_t* buffer;
Timer timer(true, "Large Map");
timer.Start();
for (intptr_t i = 0; i < kLoopCount; i++) {
StackZone zone(isolate);
MessageWriter writer(&buffer, &malloc_allocator, true);
writer.WriteMessage(map);
intptr_t buffer_len = writer.BytesWritten();
// Read object back from the snapshot.
MessageSnapshotReader reader(buffer,
buffer_len,
isolate,
zone.GetZone());
reader.ReadObject();
free(buffer);
}
timer.Stop();
int64_t elapsed_time = timer.TotalElapsedTime();
benchmark->set_score(elapsed_time);
}
} // namespace dart

View file

@ -357,15 +357,16 @@ namespace dart {
V(Internal_makeFixedListUnmodifiable, 1) \
V(Internal_inquireIs64Bit, 0) \
V(LinkedHashMap_allocate, 1) \
V(LinkedHashMap_getLength, 1) \
V(LinkedHashMap_insertOrUpdate, 3) \
V(LinkedHashMap_lookUp, 2) \
V(LinkedHashMap_containsKey, 2) \
V(LinkedHashMap_remove, 2) \
V(LinkedHashMap_clear, 1) \
V(LinkedHashMap_toArray, 1) \
V(LinkedHashMap_getModMark, 2) \
V(LinkedHashMap_useInternal, 0) \
V(LinkedHashMap_getIndex, 1) \
V(LinkedHashMap_setIndex, 2) \
V(LinkedHashMap_getData, 1) \
V(LinkedHashMap_setData, 2) \
V(LinkedHashMap_getHashMask, 1) \
V(LinkedHashMap_setHashMask, 2) \
V(LinkedHashMap_getUsedData, 1) \
V(LinkedHashMap_setUsedData, 2) \
V(LinkedHashMap_getDeletedKeys, 1) \
V(LinkedHashMap_setDeletedKeys, 2) \
V(WeakProperty_new, 2) \
V(WeakProperty_getKey, 1) \
V(WeakProperty_getValue, 1) \

View file

@ -219,6 +219,8 @@ void ClassFinalizer::VerifyBootstrapClasses() {
ASSERT(ImmutableArray::InstanceSize() == cls.instance_size());
cls = object_store->weak_property_class();
ASSERT(WeakProperty::InstanceSize() == cls.instance_size());
cls = object_store->linked_hash_map_class();
ASSERT(LinkedHashMap::InstanceSize() == cls.instance_size());
#endif // defined(DEBUG)
// Remember the currently pending classes.

View file

@ -3324,6 +3324,25 @@ LoadFieldInstr* EffectGraphVisitor::BuildNativeGetter(
}
ConstantInstr* EffectGraphVisitor::DoNativeSetterStoreValue(
NativeBodyNode* node,
intptr_t offset,
StoreBarrierType emit_store_barrier) {
Value* receiver = Bind(BuildLoadThisVar(node->scope()));
LocalVariable* value_var =
node->scope()->LookupVariable(Symbols::Value(), true);
Value* value = Bind(new(Z) LoadLocalInstr(*value_var));
StoreInstanceFieldInstr* store = new(Z) StoreInstanceFieldInstr(
offset,
receiver,
value,
emit_store_barrier,
node->token_pos());
Do(store);
return new(Z) ConstantInstr(Object::ZoneHandle(Z, Object::null()));
}
void EffectGraphVisitor::VisitNativeBodyNode(NativeBodyNode* node) {
const Function& function = owner()->function();
if (!function.IsClosureFunction()) {
@ -3427,6 +3446,59 @@ void EffectGraphVisitor::VisitNativeBodyNode(NativeBodyNode* node) {
node, kind, Bigint::used_offset(),
Type::ZoneHandle(Z, Type::SmiType()), kSmiCid));
}
case MethodRecognizer::kLinkedHashMap_getIndex: {
return ReturnDefinition(BuildNativeGetter(
node, kind, LinkedHashMap::index_offset(),
Type::ZoneHandle(Z, Type::DynamicType()),
kTypedDataUint32ArrayCid));
}
case MethodRecognizer::kLinkedHashMap_setIndex: {
return ReturnDefinition(DoNativeSetterStoreValue(
node, LinkedHashMap::index_offset(), kEmitStoreBarrier));
}
case MethodRecognizer::kLinkedHashMap_getData: {
return ReturnDefinition(BuildNativeGetter(
node, kind, LinkedHashMap::data_offset(),
Type::ZoneHandle(Z, Type::DynamicType()),
kArrayCid));
}
case MethodRecognizer::kLinkedHashMap_setData: {
return ReturnDefinition(DoNativeSetterStoreValue(
node, LinkedHashMap::data_offset(), kEmitStoreBarrier));
}
case MethodRecognizer::kLinkedHashMap_getHashMask: {
return ReturnDefinition(BuildNativeGetter(
node, kind, LinkedHashMap::hash_mask_offset(),
Type::ZoneHandle(Z, Type::SmiType()),
kSmiCid));
}
case MethodRecognizer::kLinkedHashMap_setHashMask: {
// Smi field; no barrier needed.
return ReturnDefinition(DoNativeSetterStoreValue(
node, LinkedHashMap::hash_mask_offset(), kNoStoreBarrier));
}
case MethodRecognizer::kLinkedHashMap_getUsedData: {
return ReturnDefinition(BuildNativeGetter(
node, kind, LinkedHashMap::used_data_offset(),
Type::ZoneHandle(Z, Type::SmiType()),
kSmiCid));
}
case MethodRecognizer::kLinkedHashMap_setUsedData: {
// Smi field; no barrier needed.
return ReturnDefinition(DoNativeSetterStoreValue(
node, LinkedHashMap::used_data_offset(), kNoStoreBarrier));
}
case MethodRecognizer::kLinkedHashMap_getDeletedKeys: {
return ReturnDefinition(BuildNativeGetter(
node, kind, LinkedHashMap::deleted_keys_offset(),
Type::ZoneHandle(Z, Type::SmiType()),
kSmiCid));
}
case MethodRecognizer::kLinkedHashMap_setDeletedKeys: {
// Smi field; no barrier needed.
return ReturnDefinition(DoNativeSetterStoreValue(
node, LinkedHashMap::deleted_keys_offset(), kNoStoreBarrier));
}
case MethodRecognizer::kBigint_getNeg: {
return ReturnDefinition(BuildNativeGetter(
node, kind, Bigint::neg_offset(),

View file

@ -342,6 +342,11 @@ class EffectGraphVisitor : public AstNodeVisitor {
intptr_t offset,
const Type& type,
intptr_t class_id);
// Assumes setter parameter is named 'value'. Returns null constant.
ConstantInstr* DoNativeSetterStoreValue(
NativeBodyNode* node,
intptr_t offset,
StoreBarrierType emit_store_barrier);
// Helpers for translating parts of the AST.
void BuildPushArguments(const ArgumentListNode& node,

View file

@ -147,6 +147,17 @@ namespace dart {
V(_Bigint, get:_neg, Bigint_getNeg, 1151633263) \
V(_Bigint, get:_used, Bigint_getUsed, 1308648707) \
V(_Bigint, get:_digits, Bigint_getDigits, 1408181836) \
V(_HashVMBase, get:_index, LinkedHashMap_getIndex, 1431607529) \
V(_HashVMBase, set:_index, LinkedHashMap_setIndex, 2007926178) \
V(_HashVMBase, get:_data, LinkedHashMap_getData, 958070909) \
V(_HashVMBase, set:_data, LinkedHashMap_setData, 1134236592) \
V(_HashVMBase, get:_usedData, LinkedHashMap_getUsedData, 421669312) \
V(_HashVMBase, set:_usedData, LinkedHashMap_setUsedData, 1152062737) \
V(_HashVMBase, get:_hashMask, LinkedHashMap_getHashMask, 969476186) \
V(_HashVMBase, set:_hashMask, LinkedHashMap_setHashMask, 1781420082) \
V(_HashVMBase, get:_deletedKeys, LinkedHashMap_getDeletedKeys, 63633039) \
V(_HashVMBase, set:_deletedKeys, LinkedHashMap_setDeletedKeys, 2079107858) \
// List of intrinsics:
// (class-name, function-name, intrinsification method, fingerprint).
@ -397,6 +408,17 @@ namespace dart {
V(_Bigint, get:_neg, Bigint_getNeg, 1151633263) \
V(_Bigint, get:_used, Bigint_getUsed, 1308648707) \
V(_Bigint, get:_digits, Bigint_getDigits, 1408181836) \
V(_HashVMBase, get:_index, LinkedHashMap_getIndex, 1431607529) \
V(_HashVMBase, set:_index, LinkedHashMap_setIndex, 2007926178) \
V(_HashVMBase, get:_data, LinkedHashMap_getData, 958070909) \
V(_HashVMBase, set:_data, LinkedHashMap_setData, 1134236592) \
V(_HashVMBase, get:_usedData, LinkedHashMap_getUsedData, 421669312) \
V(_HashVMBase, set:_usedData, LinkedHashMap_setUsedData, 1152062737) \
V(_HashVMBase, get:_hashMask, LinkedHashMap_getHashMask, 969476186) \
V(_HashVMBase, set:_hashMask, LinkedHashMap_setHashMask, 1781420082) \
V(_HashVMBase, get:_deletedKeys, LinkedHashMap_getDeletedKeys, 63633039) \
V(_HashVMBase, set:_deletedKeys, LinkedHashMap_setDeletedKeys, 2079107858) \
// A list of core function that should never be inlined.
#define INLINE_BLACK_LIST(V) \

View file

@ -965,10 +965,11 @@ RawError* Object::Init(Isolate* isolate) {
cls = Class::New<Array>();
object_store->set_array_class(cls);
// Array and ImmutableArray are the only VM classes that are parameterized.
// Since they are pre-finalized, CalculateFieldOffsets() is not called, so we
// need to set the offset of their type_arguments_ field, which is explicitly
// declared in RawArray.
// VM classes that are parameterized (Array, ImmutableArray,
// GrowableObjectArray, and LinkedHashMap) are also pre-finalized,
// so CalculateFieldOffsets() is not called, so we need to set the
// offset of their type_arguments_ field, which is explicitly
// declared in their respective Raw* classes.
cls.set_type_arguments_field_offset(Array::type_arguments_offset());
cls.set_num_type_arguments(1);
@ -1225,7 +1226,6 @@ RawError* Object::Init(Isolate* isolate) {
}
ASSERT(!lib.IsNull());
ASSERT(lib.raw() == Library::CollectionLibrary());
cls = Class::New<LinkedHashMap>();
object_store->set_linked_hash_map_class(cls);
cls.set_type_arguments_field_offset(LinkedHashMap::type_arguments_offset());
@ -10482,6 +10482,7 @@ void Library::CheckFunctionFingerprints() {
all_libs.Add(&Library::ZoneHandle(Library::MathLibrary()));
all_libs.Add(&Library::ZoneHandle(Library::TypedDataLibrary()));
all_libs.Add(&Library::ZoneHandle(Library::CollectionLibrary()));
OTHER_RECOGNIZED_LIST(CHECK_FINGERPRINTS);
INLINE_WHITE_LIST(CHECK_FINGERPRINTS);
INLINE_BLACK_LIST(CHECK_FINGERPRINTS);
@ -19593,103 +19594,45 @@ class DefaultHashTraits {
typedef EnumIndexHashMap<DefaultHashTraits> EnumIndexDefaultMap;
intptr_t LinkedHashMap::Length() const {
EnumIndexDefaultMap map(data());
intptr_t result = map.NumOccupied();
ASSERT(map.Release().raw() == data());
return result;
RawLinkedHashMap* LinkedHashMap::NewDefault(Heap::Space space) {
// Keep this in sync with Dart implementation (lib/compact_hash.dart).
static const intptr_t kInitialIndexBits = 3;
static const intptr_t kInitialIndexSize = 1 << (kInitialIndexBits + 1);
const Array& data = Array::Handle(Array::New(kInitialIndexSize, space));
const TypedData& index = TypedData::Handle(TypedData::New(
kTypedDataUint32ArrayCid, kInitialIndexSize, space));
static const intptr_t kInitialHashMask =
#if defined(ARCH_IS_64_BIT)
(1 << (32 - kInitialIndexBits)) - 1;
#else
(1 << (30 - kInitialIndexBits)) - 1;
#endif
return LinkedHashMap::New(data, index, kInitialHashMask, 0, 0, space);
}
void LinkedHashMap::InsertOrUpdate(const Object& key,
const Object& value) const {
ASSERT(!IsNull());
EnumIndexDefaultMap map(data());
if (!map.UpdateOrInsert(key, value)) {
SetModified();
}
StorePointer(&raw_ptr()->data_, map.Release().raw());
}
RawObject* LinkedHashMap::LookUp(const Object& key) const {
ASSERT(!IsNull());
EnumIndexDefaultMap map(data());
{
NoSafepointScope no_safepoint;
RawObject* result = map.GetOrNull(key);
ASSERT(map.Release().raw() == data());
return result;
}
}
bool LinkedHashMap::Contains(const Object& key) const {
ASSERT(!IsNull());
EnumIndexDefaultMap map(data());
bool result = map.ContainsKey(key);
ASSERT(map.Release().raw() == data());
return result;
}
RawObject* LinkedHashMap::Remove(const Object& key) const {
ASSERT(!IsNull());
EnumIndexDefaultMap map(data());
// TODO(koda): Make 'Remove' also return the old value.
const PassiveObject& result = PassiveObject::Handle(map.GetOrNull(key));
if (map.Remove(key)) {
SetModified();
}
StorePointer(&raw_ptr()->data_, map.Release().raw());
return result.raw();
}
void LinkedHashMap::Clear() const {
ASSERT(!IsNull());
if (Length() != 0) {
EnumIndexDefaultMap map(data());
map.Initialize();
SetModified();
StorePointer(&raw_ptr()->data_, map.Release().raw());
}
}
RawArray* LinkedHashMap::ToArray() const {
EnumIndexDefaultMap map(data());
const Array& result = Array::Handle(HashTables::ToArray(map, true));
ASSERT(map.Release().raw() == data());
return result.raw();
}
void LinkedHashMap::SetModified() const {
StorePointer(&raw_ptr()->cme_mark_, Instance::null());
}
RawInstance* LinkedHashMap::GetModificationMark(bool create) const {
if (create && raw_ptr()->cme_mark_ == Instance::null()) {
Isolate* isolate = Isolate::Current();
const Class& object_class =
Class::Handle(isolate, isolate->object_store()->object_class());
const Instance& current =
Instance::Handle(isolate, Instance::New(object_class));
StorePointer(&raw_ptr()->cme_mark_, current.raw());
}
return raw_ptr()->cme_mark_;
}
RawLinkedHashMap* LinkedHashMap::New(Heap::Space space) {
RawLinkedHashMap* LinkedHashMap::New(const Array& data,
const TypedData& index,
intptr_t hash_mask,
intptr_t used_data,
intptr_t deleted_keys,
Heap::Space space) {
ASSERT(Isolate::Current()->object_store()->linked_hash_map_class()
!= Class::null());
LinkedHashMap& result = LinkedHashMap::Handle(
LinkedHashMap::NewUninitialized(space));
result.SetData(data);
result.SetIndex(index);
result.SetHashMask(hash_mask);
result.SetUsedData(used_data);
result.SetDeletedKeys(deleted_keys);
return result.raw();
}
RawLinkedHashMap* LinkedHashMap::NewUninitialized(Heap::Space space) {
ASSERT(Isolate::Current()->object_store()->linked_hash_map_class()
!= Class::null());
static const intptr_t kInitialCapacity = 4;
const Array& data =
Array::Handle(HashTables::New<EnumIndexDefaultMap>(kInitialCapacity,
space));
LinkedHashMap& result = LinkedHashMap::Handle();
{
RawObject* raw = Object::Allocate(LinkedHashMap::kClassId,
@ -19697,8 +19640,6 @@ RawLinkedHashMap* LinkedHashMap::New(Heap::Space space) {
space);
NoSafepointScope no_safepoint;
result ^= raw;
result.SetData(data);
result.SetModified();
}
return result.raw();
}

View file

@ -2297,7 +2297,7 @@ FOR_EACH_FUNCTION_KIND_BIT(DEFINE_ACCESSORS)
kKindTagPos = 0,
kKindTagSize = 4,
kRecognizedTagPos = kKindTagPos + kKindTagSize,
kRecognizedTagSize = 8,
kRecognizedTagSize = 9,
kModifierPos = kRecognizedTagPos + kRecognizedTagSize,
kModifierSize = 2,
kLastModifierBitPos = kModifierPos + (kModifierSize - 1),
@ -6678,53 +6678,6 @@ class GrowableObjectArray : public Instance {
};
// Corresponds to
// - "new Map()",
// - non-const map literals, and
// - the default constructor of LinkedHashMap in dart:collection.
class LinkedHashMap : public Instance {
public:
intptr_t Length() const;
RawObject* LookUp(const Object& key) const;
void InsertOrUpdate(const Object& key, const Object& value) const;
bool Contains(const Object& key) const;
RawObject* Remove(const Object& key) const;
void Clear() const;
// List of key, value pairs in iteration (i.e., key insertion) order.
RawArray* ToArray() const;
static intptr_t InstanceSize() {
return RoundedAllocationSize(sizeof(RawLinkedHashMap));
}
static RawLinkedHashMap* New(Heap::Space space = Heap::kNew);
virtual RawTypeArguments* GetTypeArguments() const {
return raw_ptr()->type_arguments_;
}
virtual void SetTypeArguments(const TypeArguments& value) const {
ASSERT(value.IsNull() || ((value.Length() >= 2) && value.IsInstantiated()));
StorePointer(&raw_ptr()->type_arguments_, value.raw());
}
static intptr_t type_arguments_offset() {
return OFFSET_OF(RawLinkedHashMap, type_arguments_);
}
// Called whenever the set of keys changes.
void SetModified() const;
RawInstance* GetModificationMark(bool create) const;
private:
RawArray* data() const { return raw_ptr()->data_; }
void SetData(const Array& value) const {
StorePointer(&raw_ptr()->data_, value.raw());
}
FINAL_HEAP_OBJECT_IMPLEMENTATION(LinkedHashMap, Instance);
friend class Class;
};
class Float32x4 : public Instance {
public:
static RawFloat32x4* New(float value0, float value1, float value2,
@ -7195,6 +7148,97 @@ class ByteBuffer : public AllStatic {
};
// Corresponds to
// - "new Map()",
// - non-const map literals, and
// - the default constructor of LinkedHashMap in dart:collection.
class LinkedHashMap : public Instance {
public:
static intptr_t InstanceSize() {
return RoundedAllocationSize(sizeof(RawLinkedHashMap));
}
// Allocates a map with some default capacity, just like "new Map()".
static RawLinkedHashMap* NewDefault(Heap::Space space = Heap::kNew);
static RawLinkedHashMap* New(const Array& data,
const TypedData& index,
intptr_t hash_mask,
intptr_t used_data,
intptr_t deleted_keys,
Heap::Space space = Heap::kNew);
virtual RawTypeArguments* GetTypeArguments() const {
return raw_ptr()->type_arguments_;
}
virtual void SetTypeArguments(const TypeArguments& value) const {
ASSERT(value.IsNull() || ((value.Length() >= 2) && value.IsInstantiated()));
StorePointer(&raw_ptr()->type_arguments_, value.raw());
}
static intptr_t type_arguments_offset() {
return OFFSET_OF(RawLinkedHashMap, type_arguments_);
}
RawTypedData* index() const {
return raw_ptr()->index_;
}
void SetIndex(const TypedData& value) const {
StorePointer(&raw_ptr()->index_, value.raw());
}
static intptr_t index_offset() {
return OFFSET_OF(RawLinkedHashMap, index_);
}
RawArray* data() const {
return raw_ptr()->data_;
}
void SetData(const Array& value) const {
StorePointer(&raw_ptr()->data_, value.raw());
}
static intptr_t data_offset() {
return OFFSET_OF(RawLinkedHashMap, data_);
}
RawSmi* hash_mask() const {
return raw_ptr()->hash_mask_;
}
void SetHashMask(intptr_t value) const {
StoreSmi(&raw_ptr()->hash_mask_, Smi::New(value));
}
static intptr_t hash_mask_offset() {
return OFFSET_OF(RawLinkedHashMap, hash_mask_);
}
RawSmi* used_data() const {
return raw_ptr()->used_data_;
}
void SetUsedData(intptr_t value) const {
StoreSmi(&raw_ptr()->used_data_, Smi::New(value));
}
static intptr_t used_data_offset() {
return OFFSET_OF(RawLinkedHashMap, used_data_);
}
RawSmi* deleted_keys() const {
return raw_ptr()->deleted_keys_;
}
void SetDeletedKeys(intptr_t value) const {
StoreSmi(&raw_ptr()->deleted_keys_, Smi::New(value));
}
static intptr_t deleted_keys_offset() {
return OFFSET_OF(RawLinkedHashMap, deleted_keys_);
}
private:
FINAL_HEAP_OBJECT_IMPLEMENTATION(LinkedHashMap, Instance);
// Allocate a map, but leave all fields set to null.
// Used during deserialization (since map might contain itself as key/value).
static RawLinkedHashMap* NewUninitialized(Heap::Space space = Heap::kNew);
friend class Class;
};
class Closure : public AllStatic {
public:
static RawFunction* function(const Instance& closure) {

View file

@ -4469,7 +4469,8 @@ TEST_CASE(PrintJSONPrimitives) {
// LinkedHashMap reference
{
JSONStream js;
const LinkedHashMap& array = LinkedHashMap::Handle(LinkedHashMap::New());
const LinkedHashMap& array =
LinkedHashMap::Handle(LinkedHashMap::NewDefault());
array.PrintJSON(&js, true);
elideSubstring("classes", js.ToCString(), buffer);
elideSubstring("objects", buffer, buffer);
@ -4608,4 +4609,71 @@ TEST_CASE(HashCode) {
EXPECT(result.IsIdenticalTo(expected));
}
static void CheckIdenticalHashStructure(const Instance& a, const Instance& b) {
const char* kScript =
"(a, b) {\n"
" if (a._usedData != b._usedData ||\n"
" a._deletedKeys != b._deletedKeys ||\n"
" a._hashMask != b._hashMask ||\n"
" a._index.length != b._index.length ||\n"
" a._data.length != b._data.length) {\n"
" return false;\n"
" }\n"
" for (var i = 0; i < a._index.length; ++i) {\n"
" if (a._index[i] != b._index[i]) {\n"
" return false;\n"
" }\n"
" }\n"
" for (var i = 0; i < a._data.length; ++i) {\n"
" var ad = a._data[i];\n"
" var bd = b._data[i];\n"
" if (!identical(ad, bd) && !(ad == a && bd == b)) {\n"
" return false;\n"
" }\n"
" }\n"
" return true;\n"
"}(a, b)";
String& name = String::Handle();
Array& param_names = Array::Handle(Array::New(2));
name = String::New("a");
param_names.SetAt(0, name);
name = String::New("b");
param_names.SetAt(1, name);
Array& param_values = Array::Handle(Array::New(2));
param_values.SetAt(0, a);
param_values.SetAt(1, b);
name = String::New(kScript);
Library& lib = Library::Handle(Library::CollectionLibrary());
EXPECT(lib.Evaluate(name, param_names, param_values) == Bool::True().raw());
}
TEST_CASE(LinkedHashMap) {
// Check that initial index size and hash mask match in Dart vs. C++.
// 1. Create an empty custom linked hash map in Dart.
const char* kScript =
"import 'dart:collection';\n"
"makeMap() {\n"
" Function eq = (a, b) => true;\n"
" Function hc = (a) => 42;\n"
" return new LinkedHashMap(equals: eq, hashCode: hc);\n"
"}";
Dart_Handle h_lib = TestCase::LoadTestScript(kScript, NULL);
EXPECT_VALID(h_lib);
Library& lib = Library::Handle();
lib ^= Api::UnwrapHandle(h_lib);
EXPECT(!lib.IsNull());
Dart_Handle h_result = Dart_Invoke(h_lib, NewString("makeMap"), 0, NULL);
EXPECT_VALID(h_result);
// 2. Create an empty internalized LinkedHashMap in C++.
Instance& dart_map = Instance::Handle();
dart_map ^= Api::UnwrapHandle(h_result);
LinkedHashMap& cc_map = LinkedHashMap::Handle(LinkedHashMap::NewDefault());
// 3. Expect them to have identical structure.
CheckIdenticalHashStructure(dart_map, cc_map);
}
} // namespace dart

View file

@ -1698,12 +1698,16 @@ class RawLinkedHashMap : public RawInstance {
return reinterpret_cast<RawObject**>(&ptr()->type_arguments_);
}
RawTypeArguments* type_arguments_;
RawInstance* cme_mark_;
RawTypedData* index_;
RawSmi* hash_mask_;
RawArray* data_;
RawSmi* used_data_;
RawSmi* deleted_keys_;
RawObject** to() {
return reinterpret_cast<RawObject**>(&ptr()->data_);
return reinterpret_cast<RawObject**>(&ptr()->deleted_keys_);
}
friend class SnapshotReader;
};

View file

@ -2218,13 +2218,19 @@ RawLinkedHashMap* LinkedHashMap::ReadFrom(SnapshotReader* reader,
// we don't reach this.
UNREACHABLE();
} else {
map = LinkedHashMap::New(HEAP_SPACE(kind));
// Since the map might contain itself as a key or value, allocate first.
map = LinkedHashMap::NewUninitialized(HEAP_SPACE(kind));
}
reader->AddBackRef(object_id, &map, kIsDeserialized);
*(reader->ArrayHandle()) ^= reader->ReadObjectImpl();
map.SetData(*(reader->ArrayHandle()));
*(reader->TypeArgumentsHandle()) = reader->ArrayHandle()->GetTypeArguments();
map.SetTypeArguments(*(reader->TypeArgumentsHandle()));
// Set the object tags.
map.set_tags(tags);
// Read and set the fields.
intptr_t num_flds = (map.raw()->to() - map.raw()->from());
for (intptr_t i = 0; i <= num_flds; i++) {
(*reader->PassiveObjectHandle()) = reader->ReadObjectRef();
map.StorePointer((map.raw()->from() + i),
reader->PassiveObjectHandle()->raw());
}
return map.raw();
}
@ -2246,10 +2252,12 @@ void RawLinkedHashMap::WriteTo(SnapshotWriter* writer,
writer->WriteIndexedObject(kLinkedHashMapCid);
writer->WriteTags(writer->GetObjectTags(this));
// Write out the backing array.
// TODO(koda): Serialize as pairs (like ToArray) instead, to reduce space and
// support per-isolate salted hash codes.
writer->WriteObjectImpl(ptr()->data_);
// Write out all the object pointer fields.
// TODO(koda): Serialize only used parts of data_ (after compaction), to
// reduce space and support per-isolate salted hash codes. All allowed keys
// have types for which we can rehash without running Dart code.
SnapshotWriterVisitor visitor(writer);
visitor.VisitPointers(from(), to());
}

View file

@ -188,6 +188,7 @@ SnapshotReader::SnapshotReader(
tokens_(Array::Handle(isolate)),
stream_(TokenStream::Handle(isolate)),
data_(ExternalTypedData::Handle(isolate)),
typed_data_(TypedData::Handle(isolate)),
error_(UnhandledException::Handle(isolate)),
max_vm_isolate_object_id_(
Object::vm_isolate_snapshot_object_table().Length()),

View file

@ -77,6 +77,7 @@ class RawUnresolvedClass;
class String;
class TokenStream;
class TypeArguments;
class TypedData;
class UnhandledException;
// Serialized object header encoding is as follows:
@ -275,6 +276,7 @@ class SnapshotReader : public BaseReader {
Array* TokensHandle() { return &tokens_; }
TokenStream* StreamHandle() { return &stream_; }
ExternalTypedData* DataHandle() { return &data_; }
TypedData* TypedDataHandle() { return &typed_data_; }
Snapshot::Kind kind() const { return kind_; }
// Reads an object.
@ -400,6 +402,7 @@ class SnapshotReader : public BaseReader {
Array& tokens_; // Temporary tokens handle.
TokenStream& stream_; // Temporary token stream handle.
ExternalTypedData& data_; // Temporary stream data handle.
TypedData& typed_data_; // Temporary typed data handle.
UnhandledException& error_; // Error handle.
intptr_t max_vm_isolate_object_id_;
ZoneGrowableArray<BackRefNode>* backward_references_;