Reapply "New implementation of {,Linked}Hash{Set,Map}."

Review URL: https://codereview.chromium.org//12212211

git-svn-id: https://dart.googlecode.com/svn/branches/bleeding_edge/dart@18573 260f80e4-7a28-3924-810f-c04153c831b5
This commit is contained in:
lrn@google.com 2013-02-15 13:07:22 +00:00
parent 349cd9eae0
commit ecbf4478c2
23 changed files with 1693 additions and 600 deletions

View file

@ -52,6 +52,9 @@ intl/test/date_time_format_http_request_test: Skip # Timeout.
intl/test/find_default_locale_browser_test: Skip
intl/test/date_time_format_http_request_test: Skip
# Fails to recognize HashMap as an instance of Map.
analyzer-experimental/test/generated/ast_test: Fail
[ $runtime == vm && $system == windows ]
intl/test/find_default_locale_standalone_test: Fail # Issue 8110

View file

@ -306,7 +306,7 @@ class FieldTypesRegistry {
// TODO(sgjesse): Handle field types for classes with more than one
// constructor.
if (ctors.length == 2) {
optimizedFunctions.forEach((Element field, _) {
optimizedFunctions.keys.toList().forEach((Element field) {
if (identical(field.enclosingElement, cls)) {
scheduleRecompilation(field);
}
@ -347,12 +347,12 @@ class FieldTypesRegistry {
// TODO(sgjesse): Take the type of the setter into account.
if (setterSelectorsUsed.contains(setter.name)) return;
setterSelectorsUsed.add(setter.name);
optimizedStaticFunctions.forEach((Element field, _) {
optimizedStaticFunctions.keys.toList().forEach((Element field) {
if (field.name == setter.name) {
scheduleRecompilation(field);
}
});
optimizedFunctions.forEach((Element field, _) {
optimizedFunctions.keys.toList().forEach((Element field) {
if (field.name == setter.name) {
scheduleRecompilation(field);
}
@ -622,14 +622,14 @@ class JavaScriptBackend extends Backend {
CodeEmitterTask emitter;
/**
* The generated code as a js AST for compiled methods.
* The generated code as a js AST for compiled methods.
*/
Map<Element, jsAst.Expression> get generatedCode {
return compiler.enqueuer.codegen.generatedCode;
}
/**
* The generated code as a js AST for compiled bailout methods.
* The generated code as a js AST for compiled bailout methods.
*/
final Map<Element, jsAst.Expression> generatedBailoutCode =
new Map<Element, jsAst.Expression>();
@ -909,7 +909,7 @@ class JavaScriptBackend extends Backend {
// The backend will use a literal list to initialize the entries
// of the map.
if (enqueuer.isResolutionQueue) {
enqueuer.registerInstantiatedClass(compiler.listClass);
enqueuer.registerInstantiatedClass(compiler.listClass);
enqueuer.registerInstantiatedClass(compiler.mapLiteralClass);
}
}

View file

@ -362,7 +362,7 @@ abstract class NativeEnqueuerBase implements NativeEnqueuer {
cause,
[String reason]) {
Iterable matches = unusedClasses.where(predicate);
matches.forEach((c) => enqueueClass(c, cause));
matches.toList().forEach((c) => enqueueClass(c, cause));
}
onFirstNativeClass() {

View file

@ -134,7 +134,6 @@ class SourceMapBuilder {
int indexOf(List<String> list, String value, Map<String, int> map) {
return map.putIfAbsent(value, () {
int index = list.length;
map[value] = index;
list.add(value);
return index;
});

View file

@ -9,8 +9,12 @@ import 'dart:_collection-dev';
part 'arrays.dart';
part 'collections.dart';
part 'iterator.dart';
part 'map.dart';
part 'maps.dart';
part 'queue.dart';
part 'set.dart';
part 'splay_tree.dart';
part 'hash_table.dart';
part 'hash_set.dart';
part 'hash_map.dart';
part 'linked_hash_table.dart';
part 'linked_hash_set.dart';
part 'linked_hash_map.dart';

View file

@ -7,11 +7,15 @@
'sources': [
'arrays.dart',
'collections.dart',
'hash_map.dart',
'hash_set.dart',
'hash_table.dart',
'iterator.dart',
'map.dart',
'linked_hash_map.dart',
'linked_hash_set.dart',
'linked_hash_table.dart',
'maps.dart',
'queue.dart',
'set.dart',
'splay_tree.dart',
],
}

View file

@ -0,0 +1,136 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
part of dart.collection;
class _HashMapTable<K, V> extends _HashTable<K> {
static const int _INITIAL_CAPACITY = 8;
static const int _VALUE_INDEX = 1;
_HashMapTable() : super(_INITIAL_CAPACITY);
int get _entrySize => 2;
V _value(int offset) => _table[offset + _VALUE_INDEX];
void _setValue(int offset, V value) { _table[offset + _VALUE_INDEX] = value; }
_copyEntry(List fromTable, int fromOffset, int toOffset) {
_table[toOffset + _VALUE_INDEX] = fromTable[fromOffset + _VALUE_INDEX];
}
}
class HashMap<K, V> implements Map<K, V> {
final _HashMapTable<K, V> _hashTable;
HashMap() : _hashTable = new _HashMapTable<K, V>() {
_hashTable._container = this;
}
factory HashMap.from(Map<K, V> other) {
return new HashMap<K, V>()..addAll(other);
}
bool containsKey(K key) {
return _hashTable._get(key) >= 0;
}
bool containsValue(V value) {
List table = _hashTable._table;
int entrySize = _hashTable._entrySize;
for (int offset = 0; offset < table.length; offset += entrySize) {
if (!_hashTable._isFree(table[offset]) &&
_hashTable._value(offset) == value) {
return true;
}
}
return false;
}
void addAll(Map<K, V> other) {
other.forEach((K key, V value) {
int offset = _hashTable._put(key);
_hashTable._setValue(offset, value);
_hashTable._checkCapacity();
});
}
V operator [](K key) {
int offset = _hashTable._get(key);
if (offset >= 0) return _hashTable._value(offset);
return null;
}
void operator []=(K key, V value) {
int offset = _hashTable._put(key);
_hashTable._setValue(offset, value);
_hashTable._checkCapacity();
}
V putIfAbsent(K key, V ifAbsent()) {
int offset = _hashTable._probeForAdd(_hashTable._hashCodeOf(key), key);
Object entry = _hashTable._table[offset];
if (!_hashTable._isFree(entry)) {
return _hashTable._value(offset);
}
int modificationCount = _hashTable._modificationCount;
V value = ifAbsent();
if (modificationCount == _hashTable._modificationCount) {
_hashTable._setKey(offset, key);
_hashTable._setValue(offset, value);
if (entry == null) {
_hashTable._entryCount++;
_hashTable._checkCapacity();
} else {
assert(identical(entry, _TOMBSTONE));
_hashTable._deletedCount--;
}
_hashTable._recordModification();
} else {
// The table might have changed, so we can't trust [offset] any more.
// Do another lookup before setting the value.
offset = _hashTable._put(key);
_hashTable._setValue(offset, value);
_hashTable._checkCapacity();
}
return value;
}
V remove(K key) {
int offset = _hashTable._remove(key);
if (offset < 0) return null;
V oldValue = _hashTable._value(offset);
_hashTable._setValue(offset, null);
_hashTable._checkCapacity();
return oldValue;
}
void clear() {
_hashTable._clear();
}
void forEach(void action (K key, V value)) {
int modificationCount = _hashTable._modificationCount;
List table = _hashTable._table;
int entrySize = _hashTable._entrySize;
for (int offset = 0; offset < table.length; offset += entrySize) {
Object entry = table[offset];
if (!_hashTable._isFree(entry)) {
K key = entry;
V value = _hashTable._value(offset);
action(key, value);
_hashTable._checkModification(modificationCount);
}
}
}
Iterable<K> get keys => new _HashTableKeyIterable<K>(_hashTable);
Iterable<V> get values =>
new _HashTableValueIterable<V>(_hashTable, _HashMapTable._VALUE_INDEX);
int get length => _hashTable._elementCount;
bool get isEmpty => _hashTable._elementCount == 0;
String toString() => Maps.mapToString(this);
}

View file

@ -0,0 +1,115 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
part of dart.collection;
class HashSet<E> extends Collection<E> implements Set<E> {
static const int _INITIAL_CAPACITY = 8;
final _HashTable<E> _table;
HashSet() : _table = new _HashTable(_INITIAL_CAPACITY) {
_table._container = this;
}
factory HashSet.from(Iterable<E> iterable) {
return new HashSet<E>()..addAll(iterable);
}
// Iterable.
Iterator<E> get iterator => new _HashTableKeyIterator<E>(_table);
bool get isEmpty => _table._elementCount == 0;
bool contains(Object object) => _table._get(object) >= 0;
// Collection.
void add(E element) {
_table._put(element);
_table._checkCapacity();
}
void addAll(Iterable<E> objects) {
for (E object in objects) {
_table._put(object);
_table._checkCapacity();
}
}
bool remove(Object object) {
int offset = _table._remove(object);
_table._checkCapacity();
return offset >= 0;
}
void removeAll(Iterable objectsToRemove) {
for (Object object in objectsToRemove) {
_table._remove(object);
_table._checkCapacity();
}
}
void retainAll(Iterable objectsToRetain) {
IterableMixinWorkaround.retainAll(this, objectsToRetain);
}
void _filterMatching(bool test(E element), bool removeMatching) {
int entrySize = _table._entrySize;
int length = _table._table.length;
for (int offset = 0; offset < length; offset += entrySize) {
Object entry = _table._table[offset];
if (!_table._isFree(entry)) {
E key = identical(entry, _NULL) ? null : entry;
int modificationCount = _table._modificationCount;
bool shouldRemove = (removeMatching == test(key));
_table._checkModification(modificationCount);
if (shouldRemove) {
_table._deleteEntry(offset);
}
}
}
_table._checkCapacity();
}
void removeMatching(bool test(E element)) {
_filterMatching(test, true);
}
void retainMatching(bool test(E element)) {
_filterMatching(test, false);
}
void clear() {
_table._clear();
}
// Set.
bool isSubsetOf(Collection<E> collection) {
Set otherSet;
if (collection is Set) {
otherSet = collection;
} else {
otherSet = collection.toSet();
}
return otherSet.containsAll(this);
}
bool containsAll(Collection<E> collection) {
for (E element in collection) {
if (!this.contains(element)) return false;
}
return true;
}
Set<E> intersection(Collection<E> other) {
Set<E> result = new HashSet<E>();
for (E element in other) {
if (this.contains(element)) {
result.add(element);
}
}
return result;
}
String toString() => Collections.collectionToString(this);
}

View file

@ -0,0 +1,404 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
part of dart.collection;
class _DeadEntry {
const _DeadEntry();
}
class _NullKey {
const _NullKey();
int get hashCode => null.hashCode;
}
const _TOMBSTONE = const _DeadEntry();
const _NULL = const _NullKey();
class _HashTable<K> {
/**
* Table of entries with [_entrySize] slots per entry.
*
* Capacity in entries must be factor of two.
*/
List _table;
/** Current capacity. Always equal to [:_table.length ~/ _entrySize:]. */
int _capacity;
/** Count of occupied entries, including deleted ones. */
int _entryCount = 0;
/** Count of deleted entries. */
int _deletedCount = 0;
/** Counter incremented when table is modified. */
int _modificationCount = 0;
/** If set, used as the source object for [ConcurrentModificationError]s. */
Object _container;
_HashTable(int initialCapacity) : _capacity = initialCapacity {
_table = _createTable(initialCapacity);
}
/** Reads key from table. Converts _NULL marker to null. */
Object _key(offset) {
assert(!_isFree(_table[offset]));
Object key = _table[offset];
if (!identical(key, _NULL)) return key;
return null;
}
/** Writes key to table. Converts null to _NULL marker. */
void _setKey(int offset, Object key) {
if (key == null) key = _NULL;
_table[offset] = key;
}
int get _elementCount => _entryCount - _deletedCount;
/** Size of each entry. */
int get _entrySize => 1;
void _checkModification(int expectedModificationCount) {
if (_modificationCount != expectedModificationCount) {
throw new ConcurrentModificationError(_container);
}
}
void _recordModification() {
// Value cycles after 2^30 modifications. If you keep hold of an
// iterator for that long, you might miss a modification detection,
// and iteration can go sour. Don't do that.
_modificationCount = (_modificationCount + 1) & (0x3FFFFFFF);
}
/**
* Create an empty table.
*/
List _createTable(int capacity) {
List table = new List.fixedLength(capacity * _entrySize);
return table;
}
/** First table probe. */
int _firstProbe(int hashCode, int capacity) {
return hashCode & (capacity - 1);
}
/** Following table probes. */
int _nextProbe(int previousIndex, int probeCount, int capacity) {
// When capacity is a power of 2, this probing algorithm (the triangular
// number sequence modulo capacity) is guaranteed to hit all indices exactly
// once before repeating.
return (previousIndex + probeCount) & (capacity - 1);
}
/** Whether an object is a free-marker (either tombstone or free). */
bool _isFree(Object marker) =>
marker == null || identical(marker, _TOMBSTONE);
/**
* Look up the offset for an object in the table.
*
* Finds the offset of the object in the table, if it is there,
* or the first free offset for its hashCode.
*/
int _probeForAdd(int hashCode, Object object) {
int entrySize = _entrySize;
int index = _firstProbe(hashCode, _capacity);
int firstTombstone = -1;
int probeCount = 0;
while (true) {
int offset = index * entrySize;
Object entry = _table[offset];
if (identical(entry, _TOMBSTONE)) {
if (firstTombstone < 0) firstTombstone = offset;
} else if (entry == null) {
if (firstTombstone < 0) return offset;
return firstTombstone;
} else if (identical(_NULL, entry) ? _equals(null, object)
: _equals(entry, object)) {
return offset;
}
// The _nextProbe is designed so that it hits
// every index eventually.
index = _nextProbe(index, ++probeCount, _capacity);
}
}
/**
* Look up the offset for an object in the table.
*
* If the object is in the table, its offset is returned.
*
* If the object is not in the table, Otherwise a negative value is returned.
*/
int _probeForLookup(int hashCode, Object object) {
int entrySize = _entrySize;
int index = _firstProbe(hashCode, _capacity);
int probeCount = 0;
while (true) {
int offset = index * entrySize;
Object entry = _table[offset];
if (entry == null) {
return -1;
} else if (!identical(_TOMBSTONE, entry)) {
if (identical(_NULL, entry) ? _equals(null, object)
: _equals(entry, object)) {
return offset;
}
}
// The _nextProbe is designed so that it hits
// every index eventually.
index = _nextProbe(index, ++probeCount, _capacity);
}
}
// Override the following two to change equality/hashCode computations
/**
* Compare two object for equality.
*
* The first object is the one already in the table,
* and the second is the one being searched for.
*/
bool _equals(Object element, Object other) {
return element == other;
}
/**
* Compute hash-code for an object.
*/
int _hashCodeOf(Object object) => object.hashCode;
/**
* Ensure that the table isn't too full for its own good.
*
* Call this after adding an element.
*/
int _checkCapacity() {
// Compute everything in multiples of entrySize to avoid division.
int freeCount = _capacity - _entryCount;
if (freeCount * 4 < _capacity ||
freeCount < _deletedCount) {
// Less than 25% free or more deleted entries than free entries.
_grow(_entryCount - _deletedCount);
}
}
void _grow(int contentCount) {
int capacity = _capacity;
// Don't grow to less than twice the needed capacity.
int minCapacity = contentCount * 2;
while (capacity < minCapacity) {
capacity *= 2;
}
// Reset to another table and add all existing elements.
List oldTable = _table;
_table = _createTable(capacity);
_capacity = capacity;
_entryCount = 0;
_deletedCount = 0;
_addAllEntries(oldTable);
_recordModification();
}
/**
* Copies all non-free entries from the old table to the new empty table.
*/
void _addAllEntries(List oldTable) {
for (int i = 0; i < oldTable.length; i += _entrySize) {
Object object = oldTable[i];
if (!_isFree(object)) {
int toOffset = _put(object);
_copyEntry(oldTable, i, toOffset);
}
}
}
/**
* Copies everything but the key element from one entry to another.
*
* Called while growing the base array.
*
* Override this if any non-key fields need copying.
*/
void _copyEntry(List fromTable, int fromOffset, int toOffset) {}
// The following three methods are for simple get/set/remove operations.
// They only affect the key of an entry. The remaining fields must be
// filled by the caller.
/**
* Returns the offset of a key in [_table], or negative if it's not there.
*/
int _get(K key) {
return _probeForLookup(_hashCodeOf(key), key);
}
/**
* Puts the key into the table and returns its offset into [_table].
*
* If [_entrySize] is greater than 1, the caller should fill the
* remaining fields.
*
* Remember to call [_checkCapacity] after using this method.
*/
int _put(K key) {
int offset = _probeForAdd(_hashCodeOf(key), key);
Object oldEntry = _table[offset];
if (oldEntry == null) {
_entryCount++;
} else if (identical(oldEntry, _TOMBSTONE)) {
_deletedCount--;
} else {
return offset;
}
_setKey(offset, key);
_recordModification();
return offset;
}
/**
* Removes a key from the table and returns its offset into [_table].
*
* Returns null if the key was not in the table.
* If [_entrySize] is greater than 1, the caller should clean up the
* remaining fields.
*/
int _remove(K key) {
int offset = _probeForLookup(_hashCodeOf(key), key);
if (offset >= 0) {
_deleteEntry(offset);
}
return offset;
}
/** Clears the table completely, leaving it empty. */
void _clear() {
if (_elementCount == 0) return;
for (int i = 0; i < _table.length; i++) {
_table[i] = null;
}
_entryCount = _deletedCount = 0;
_recordModification();
}
/** Clears an entry in the table. */
void _deleteEntry(int offset) {
assert(!_isFree(_table[offset]));
_setKey(offset, _TOMBSTONE);
_deletedCount++;
_recordModification();
}
}
/**
* Generic iterable based on a [_HashTable].
*/
abstract class _HashTableIterable<E> extends Iterable<E> {
final _HashTable _hashTable;
_HashTableIterable(this._hashTable);
Iterator<E> get iterator;
/**
* Return the iterated value for a given entry.
*/
E _valueAt(int offset, Object key);
int get length => _hashTable._elementCount;
bool get isEmpty => _hashTable._elementCount == 0;
void forEach(void action(E element)) {
int entrySize = _hashTable._entrySize;
List table = _hashTable._table;
int modificationCount = _hashTable._modificationCount;
for (int offset = 0; offset < table.length; offset += entrySize) {
Object entry = table[offset];
if (!_hashTable._isFree(entry)) {
E value = _valueAt(offset, entry);
action(value);
}
_hashTable._checkModification(modificationCount);
}
}
}
abstract class _HashTableIterator<E> implements Iterator<E> {
final _HashTable _hashTable;
final int _modificationCount;
/** Location right after last found element. */
int _offset = 0;
E _current = null;
_HashTableIterator(_HashTable hashTable)
: _hashTable = hashTable,
_modificationCount = hashTable._modificationCount;
bool moveNext() {
_hashTable._checkModification(_modificationCount);
List table = _hashTable._table;
int entrySize = _hashTable._entrySize;
while (_offset < table.length) {
int currentOffset = _offset;
Object entry = table[currentOffset];
_offset = currentOffset + entrySize;
if (!_hashTable._isFree(entry)) {
_current = _valueAt(currentOffset, entry);
return true;
}
}
_current = null;
return false;
}
E get current => _current;
E _valueAt(int offset, Object key);
}
class _HashTableKeyIterable<K> extends _HashTableIterable<K> {
_HashTableKeyIterable(_HashTable<K> hashTable) : super(hashTable);
Iterator<K> get iterator => new _HashTableKeyIterator<K>(_hashTable);
K _valueAt(int offset, Object key) {
if (identical(key, _NULL)) return null;
return key;
}
bool contains(Object value) => _hashTable._get(value) >= 0;
}
class _HashTableKeyIterator<K> extends _HashTableIterator<K> {
_HashTableKeyIterator(_HashTable hashTable) : super(hashTable);
K _valueAt(int offset, Object key) {
if (identical(key, _NULL)) return null;
return key;
}
}
class _HashTableValueIterable<V> extends _HashTableIterable<V> {
final int _entryIndex;
_HashTableValueIterable(_HashTable hashTable, this._entryIndex)
: super(hashTable);
Iterator<V> get iterator {
return new _HashTableValueIterator<V>(_hashTable, _entryIndex);
}
V _valueAt(int offset, Object key) => _hashTable._table[offset + _entryIndex];
}
class _HashTableValueIterator<V> extends _HashTableIterator<V> {
final int _entryIndex;
_HashTableValueIterator(_HashTable hashTable, this._entryIndex)
: super(hashTable);
V _valueAt(int offset, Object key) => _hashTable._table[offset + _entryIndex];
}

View file

@ -0,0 +1,138 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
part of dart.collection;
class _LinkedHashMapTable<K, V> extends _LinkedHashTable<K> {
static const int _INITIAL_CAPACITY = 8;
static const int _VALUE_INDEX = 3;
int get _entrySize => 4;
_LinkedHashMapTable() : super(_INITIAL_CAPACITY);
V _value(int offset) => _table[offset + _VALUE_INDEX];
void _setValue(int offset, V value) { _table[offset + _VALUE_INDEX] = value; }
_copyEntry(List oldTable, int fromOffset, int toOffset) {
_table[toOffset + _VALUE_INDEX] = oldTable[fromOffset + _VALUE_INDEX];
}
}
/**
* A hash-based map that iterates keys and values in key insertion order.
*/
class LinkedHashMap<K, V> implements Map<K, V> {
final _LinkedHashMapTable _hashTable;
LinkedHashMap() : _hashTable = new _LinkedHashMapTable<K, V>() {
_hashTable._container = this;
}
factory LinkedHashMap.from(Map<K, V> other) {
return new LinkedHashMap<K, V>()..addAll(other);
}
bool containsKey(K key) {
return _hashTable._get(key) >= 0;
}
bool containsValue(V value) {
int modificationCount = _hashTable._modificationCount;
for (int offset = _hashTable._next(_LinkedHashTable._HEAD_OFFSET);
offset != _LinkedHashTable._HEAD_OFFSET;
offset = _hashTable._next(offset)) {
if (_hashTable._value(offset) == value) {
return true;
}
// The == call may modify the table.
_hashTable._checkModification(modificationCount);
}
return false;
}
void addAll(Map<K, V> other) {
other.forEach((K key, V value) {
int offset = _hashTable._put(key);
_hashTable._setValue(offset, value);
_hashTable._checkCapacity();
});
}
V operator [](K key) {
int offset = _hashTable._get(key);
if (offset >= 0) return _hashTable._value(offset);
return null;
}
void operator []=(K key, V value) {
int offset = _hashTable._put(key);
_hashTable._setValue(offset, value);
_hashTable._checkCapacity();
}
V putIfAbsent(K key, V ifAbsent()) {
int offset = _hashTable._probeForAdd(_hashTable._hashCodeOf(key), key);
Object entry = _hashTable._table[offset];
if (!_hashTable._isFree(entry)) {
return _hashTable._value(offset);
}
int modificationCount = _hashTable._modificationCount;
V value = ifAbsent();
if (modificationCount == _hashTable._modificationCount) {
_hashTable._setKey(offset, key);
_hashTable._setValue(offset, value);
_hashTable._linkLast(offset);
if (entry == null) {
_hashTable._entryCount++;
_hashTable._checkCapacity();
} else {
assert(identical(entry, _TOMBSTONE));
_hashTable._deletedCount--;
}
_hashTable._recordModification();
} else {
// The table might have changed, so we can't trust [offset] any more.
// Do another lookup before setting the value.
offset = _hashTable._put(key);
_hashTable._setValue(offset, value);
_hashTable._checkCapacity();
}
return value;
}
V remove(K key) {
int offset = _hashTable._remove(key);
if (offset < 0) return null;
Object oldValue = _hashTable._value(offset);
_hashTable._setValue(offset, null);
_hashTable._checkCapacity();
return oldValue;
}
void clear() {
_hashTable._clear();
}
void forEach(void action (K key, V value)) {
int modificationCount = _hashTable._modificationCount;
for (int offset = _hashTable._next(_LinkedHashTable._HEAD_OFFSET);
offset != _LinkedHashTable._HEAD_OFFSET;
offset = _hashTable._next(offset)) {
action(_hashTable._key(offset), _hashTable._value(offset));
_hashTable._checkModification(modificationCount);
}
}
Iterable<K> get keys => new _LinkedHashTableKeyIterable<K>(_hashTable);
Iterable<V> get values =>
new _LinkedHashTableValueIterable<V>(_hashTable,
_LinkedHashMapTable._VALUE_INDEX);
int get length => _hashTable._elementCount;
bool get isEmpty => _hashTable._elementCount == 0;
String toString() => Maps.mapToString(this);
}

View file

@ -0,0 +1,157 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
part of dart.collection;
class LinkedHashSet<E> extends Collection<E> implements Set<E> {
static const int _INITIAL_CAPACITY = 8;
_LinkedHashTable<E> _table;
LinkedHashSet() : _table = new _LinkedHashTable(_INITIAL_CAPACITY) {
_table._container = this;
}
factory LinkedHashSet.from(Iterable<E> iterable) {
return new LinkedHashSet<E>()..addAll(iterable);
}
// Iterable.
Iterator<E> get iterator => new _LinkedHashTableKeyIterator<E>(_table);
void forEach(void action(E element)) {
int offset = _table._next(_LinkedHashTable._HEAD_OFFSET);
int modificationCount = _table._modificationCount;
while (offset != _LinkedHashTable._HEAD_OFFSET) {
E key = _table._key(offset);
action(key);
_table._checkModification(modificationCount);
offset = _table._next(offset);
}
}
bool get isEmpty => _table._elementCount == 0;
bool contains(Object object) => _table._get(object) >= 0;
E get first {
int firstOffset = _table._next(_LinkedHashTable._HEAD_OFFSET);
if (firstOffset == _LinkedHashTable._HEAD_OFFSET) {
throw new StateError("No elements");
}
return _table._key(firstOffset);
}
E get last {
int lastOffset = _table._prev(_LinkedHashTable._HEAD_OFFSET);
if (lastOffset == _LinkedHashTable._HEAD_OFFSET) {
throw new StateError("No elements");
}
return _table._key(lastOffset);
}
E get single {
int firstOffset = _table._next(_LinkedHashTable._HEAD_OFFSET);
if (firstOffset == _LinkedHashTable._HEAD_OFFSET) {
throw new StateError("No elements");
}
int lastOffset = _table._prev(_LinkedHashTable._HEAD_OFFSET);
if (lastOffset != firstOffset) {
throw new StateError("Too many elements");
}
return _table._key(firstOffset);
}
// Collection.
void add(E element) {
_table._put(element);
_table._checkCapacity();
}
void addAll(Iterable<E> objects) {
for (E object in objects) {
_table._put(object);
_table._checkCapacity();
}
}
bool remove(Object object) {
int offset = _table._remove(object);
if (offset >= 0) {
_table._checkCapacity();
return true;
}
return false;
}
void removeAll(Iterable objectsToRemove) {
for (Object object in objectsToRemove) {
_table._remove(object);
_table._checkCapacity();
}
}
void retainAll(Iterable objectsToRemove) {
IterableMixinWorkaround.retainAll(this, objectsToRemove);
}
void _filterMatching(bool test(E element), bool removeMatching) {
int entrySize = _table._entrySize;
int length = _table._table.length;
int offset = _table._next(_LinkedHashTable._HEAD_OFFSET);
while (offset != _LinkedHashTable._HEAD_OFFSET) {
E key = _table._key(offset);
int nextOffset = _table._next(offset);
int modificationCount = _table._modificationCount;
bool shouldRemove = (removeMatching == test(key));
_table._checkModification(modificationCount);
if (shouldRemove) {
_table._deleteEntry(offset);
}
offset = nextOffset;
}
_table._checkCapacity();
}
void removeMatching(bool test(E element)) {
_filterMatching(test, true);
}
void retainMatching(bool test(E element)) {
_filterMatching(test, false);
}
void clear() {
_table._clear();
}
// Set.
bool isSubsetOf(Collection<E> collection) {
Set otherSet;
if (collection is Set) {
otherSet = collection;
} else {
otherSet = collection.toSet();
}
return otherSet.containsAll(this);
}
bool containsAll(Collection<E> collection) {
for (E element in collection) {
if (!this.contains(element)) return false;
}
return true;
}
Set<E> intersection(Collection<E> other) {
Set<E> result = new LinkedHashSet<E>();
for (E element in other) {
if (this.contains(element)) {
result.add(element);
}
}
return result;
}
String toString() => Collections.collectionToString(this);
}

View file

@ -0,0 +1,165 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
part of dart.collection;
/** Unique marker object for the head of a linked list of entries. */
class _LinkedHashTableHeadMarker {
const _LinkedHashTableHeadMarker();
}
const _LinkedHashTableHeadMarker _HEAD_MARKER =
const _LinkedHashTableHeadMarker();
class _LinkedHashTable<K> extends _HashTable<K> {
static const _NEXT_INDEX = 1;
static const _PREV_INDEX = 2;
static const _HEAD_OFFSET = 0;
_LinkedHashTable(int initialCapacity) : super(initialCapacity);
int get _entrySize => 3;
List _createTable(int capacity) {
List result = new List.fixedLength(capacity * _entrySize);
result[_HEAD_OFFSET] = _HEAD_MARKER;
result[_HEAD_OFFSET + _NEXT_INDEX] = _HEAD_OFFSET;
result[_HEAD_OFFSET + _PREV_INDEX] = _HEAD_OFFSET;
return result;
}
int _next(int offset) => _table[offset + _NEXT_INDEX];
void _setNext(int offset, int to) { _table[offset + _NEXT_INDEX] = to; }
int _prev(int offset) => _table[offset + _PREV_INDEX];
void _setPrev(int offset, int to) { _table[offset + _PREV_INDEX] = to; }
void _linkLast(int offset) {
// Add entry at offset at end of double-linked list.
int last = _prev(_HEAD_OFFSET);
_setNext(offset, _HEAD_OFFSET);
_setPrev(offset, last);
_setNext(last, offset);
_setPrev(_HEAD_OFFSET, offset);
}
void _unlink(int offset) {
assert(offset != _HEAD_OFFSET);
int next = _next(offset);
int prev = _prev(offset);
_setNext(offset, null);
_setPrev(offset, null);
_setNext(prev, next);
_setPrev(next, prev);
}
/**
* Copies all non-free entries from the old table to the new empty table.
*/
void _addAllEntries(List oldTable) {
int offset = oldTable[_HEAD_OFFSET + _NEXT_INDEX];
while (offset != _HEAD_OFFSET) {
Object object = oldTable[offset];
int nextOffset = oldTable[offset + _NEXT_INDEX];
int toOffset = _put(object);
_copyEntry(oldTable, offset, toOffset);
offset = nextOffset;
}
}
void _clear() {
if (_elementCount == 0) return;
_setNext(_HEAD_OFFSET, _HEAD_OFFSET);
_setPrev(_HEAD_OFFSET, _HEAD_OFFSET);
for (int i = _entrySize; i < _table.length; i++) {
_table[i] = null;
}
_entryCount = _deletedCount = 0;
_recordModification();
}
int _put(K key) {
int offset = _probeForAdd(_hashCodeOf(key), key);
Object oldEntry = _table[offset];
if (identical(oldEntry, _TOMBSTONE)) {
_deletedCount--;
} else if (oldEntry == null) {
_entryCount++;
} else {
return offset;
}
_recordModification();
_setKey(offset, key);
_linkLast(offset);
return offset;
}
void _deleteEntry(int offset) {
_unlink(offset);
_setKey(offset, _TOMBSTONE);
_deletedCount++;
_recordModification();
}
}
class _LinkedHashTableKeyIterable<K> extends Iterable<K> {
final _LinkedHashTable<K> _table;
_LinkedHashTableKeyIterable(this._table);
Iterator<K> get iterator => new _LinkedHashTableKeyIterator<K>(_table);
bool contains(Object value) => _table._get(value) >= 0;
int get length => _table._elementCount;
}
class _LinkedHashTableKeyIterator<K> extends _LinkedHashTableIterator<K> {
_LinkedHashTableKeyIterator(_LinkedHashTable<K> hashTable): super(hashTable);
K _getCurrent(int offset) => _hashTable._key(offset);
}
class _LinkedHashTableValueIterable<V> extends Iterable<V> {
final _LinkedHashTable _hashTable;
final int _valueIndex;
_LinkedHashTableValueIterable(this._hashTable, this._valueIndex);
Iterator<V> get iterator =>
new _LinkedHashTableValueIterator<V>(_hashTable, _valueIndex);
int get length => _hashTable._elementCount;
}
class _LinkedHashTableValueIterator<V> extends _LinkedHashTableIterator<V> {
final int _valueIndex;
_LinkedHashTableValueIterator(_LinkedHashTable hashTable, this._valueIndex)
: super(hashTable);
V _getCurrent(int offset) => _hashTable._table[offset + _valueIndex];
}
abstract class _LinkedHashTableIterator<T> implements Iterator<T> {
final _LinkedHashTable _hashTable;
final int _modificationCount;
int _offset;
T _current;
_LinkedHashTableIterator(_LinkedHashTable table)
: _hashTable = table,
_modificationCount = table._modificationCount,
_offset = table._next(_LinkedHashTable._HEAD_OFFSET);
bool moveNext() {
_hashTable._checkModification(_modificationCount);
if (_offset == _LinkedHashTable._HEAD_OFFSET) {
_current = null;
return false;
}
_current = _getCurrent(_offset);
_offset = _hashTable._next(_offset);
return true;
}
T _getCurrent(int offset);
T get current => _current;
}

View file

@ -1,474 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
part of dart.collection;
/**
* Hash map version of the [Map] interface. A [HashMap] does not
* provide any guarantees on the order of keys and values in [keys]
* and [values].
*/
abstract class HashMap<K, V> extends Map<K, V> {
/**
* Creates a map with the default implementation.
*/
factory HashMap() => new _HashMapImpl<K, V>();
/**
* Creates a [HashMap] that contains all key value pairs of [other].
*/
factory HashMap.from(Map<K, V> other) => new _HashMapImpl<K, V>.from(other);
}
/**
* Hash map version of the [Map] interface that preserves insertion
* order.
*/
abstract class LinkedHashMap<K, V> extends HashMap<K, V> {
/**
* Creates a map with the default implementation.
*/
factory LinkedHashMap() => new _LinkedHashMapImpl<K, V>();
/**
* Creates a [LinkedHashMap] that contains all key value pairs of [other].
*/
factory LinkedHashMap.from(Map<K, V> other)
=> new _LinkedHashMapImpl<K, V>.from(other);
}
// Hash map implementation with open addressing and quadratic probing.
class _HashMapImpl<K, V> implements HashMap<K, V> {
// The [_keys] list contains the keys inserted in the map.
// The [_keys] list must be a raw list because it
// will contain both elements of type K, and the [_DELETED_KEY] of type
// [_DeletedKeySentinel].
// The alternative of declaring the [_keys] list as of type Object
// does not work, because the HashSetIterator constructor would fail:
// HashSetIterator(HashSet<E> set)
// : _nextValidIndex = -1,
// _entries = set_._backingMap._keys {
// _advance();
// }
// With K being type int, for example, it would fail because
// List<Object> is not assignable to type List<int> of entries.
List _keys;
// The values inserted in the map. For a filled entry index in this
// list, there is always the corresponding key in the [keys_] list
// at the same entry index.
List<V> _values;
// The load limit is the number of entries we allow until we double
// the size of the lists.
int _loadLimit;
// The current number of entries in the map. Will never be greater
// than [_loadLimit].
int _numberOfEntries;
// The current number of deleted entries in the map.
int _numberOfDeleted;
// The sentinel when a key is deleted from the map.
static const _DeletedKeySentinel _DELETED_KEY = const _DeletedKeySentinel();
// The initial capacity of a hash map.
static const int _INITIAL_CAPACITY = 8; // must be power of 2
_HashMapImpl() {
_numberOfEntries = 0;
_numberOfDeleted = 0;
_loadLimit = _computeLoadLimit(_INITIAL_CAPACITY);
_keys = new List.fixedLength(_INITIAL_CAPACITY);
_values = new List<V>.fixedLength(_INITIAL_CAPACITY);
}
factory _HashMapImpl.from(Map<K, V> other) {
Map<K, V> result = new _HashMapImpl<K, V>();
other.forEach((K key, V value) { result[key] = value; });
return result;
}
static int _computeLoadLimit(int capacity) {
return (capacity * 3) ~/ 4;
}
static int _firstProbe(int hashCode, int length) {
return hashCode & (length - 1);
}
static int _nextProbe(int currentProbe, int numberOfProbes, int length) {
return (currentProbe + numberOfProbes) & (length - 1);
}
int _probeForAdding(K key) {
if (key == null) throw new ArgumentError(null);
int hash = _firstProbe(key.hashCode, _keys.length);
int numberOfProbes = 1;
int initialHash = hash;
// insertionIndex points to a slot where a key was deleted.
int insertionIndex = -1;
while (true) {
// [existingKey] can be either of type [K] or [_DeletedKeySentinel].
Object existingKey = _keys[hash];
if (existingKey == null) {
// We are sure the key is not already in the set.
// If the current slot is empty and we didn't find any
// insertion slot before, return this slot.
if (insertionIndex < 0) return hash;
// If we did find an insertion slot before, return it.
return insertionIndex;
} else if (existingKey == key) {
// The key is already in the map. Return its slot.
return hash;
} else if ((insertionIndex < 0) &&
(identical(existingKey, _DELETED_KEY))) {
// The slot contains a deleted element. Because previous calls to this
// method may not have had this slot deleted, we must continue iterate
// to find if there is a slot with the given key.
insertionIndex = hash;
}
// We did not find an insertion slot. Look at the next one.
hash = _nextProbe(hash, numberOfProbes++, _keys.length);
// _ensureCapacity has guaranteed the following cannot happen.
// assert(hash != initialHash);
}
}
int _probeForLookup(K key) {
if (key == null) throw new ArgumentError(null);
int hash = _firstProbe(key.hashCode, _keys.length);
int numberOfProbes = 1;
int initialHash = hash;
while (true) {
// [existingKey] can be either of type [K] or [_DeletedKeySentinel].
Object existingKey = _keys[hash];
// If the slot does not contain anything (in particular, it does not
// contain a deleted key), we know the key is not in the map.
if (existingKey == null) return -1;
// The key is in the map, return its index.
if (existingKey == key) return hash;
// Go to the next probe.
hash = _nextProbe(hash, numberOfProbes++, _keys.length);
// _ensureCapacity has guaranteed the following cannot happen.
// assert(hash != initialHash);
}
}
void _ensureCapacity() {
int newNumberOfEntries = _numberOfEntries + 1;
// Test if adding an element will reach the load limit.
if (newNumberOfEntries >= _loadLimit) {
_grow(_keys.length * 2);
return;
}
// Make sure that we don't have poor performance when a map
// contains lots of deleted entries: we _grow if
// there are more deleted entried than free entries.
int capacity = _keys.length;
int numberOfFreeOrDeleted = capacity - newNumberOfEntries;
int numberOfFree = numberOfFreeOrDeleted - _numberOfDeleted;
// assert(numberOfFree > 0);
if (_numberOfDeleted > numberOfFree) {
_grow(_keys.length);
}
}
static bool _isPowerOfTwo(int x) {
return ((x & (x - 1)) == 0);
}
void _grow(int newCapacity) {
assert(_isPowerOfTwo(newCapacity));
int capacity = _keys.length;
_loadLimit = _computeLoadLimit(newCapacity);
List oldKeys = _keys;
List<V> oldValues = _values;
_keys = new List.fixedLength(newCapacity);
_values = new List<V>.fixedLength(newCapacity);
for (int i = 0; i < capacity; i++) {
// [key] can be either of type [K] or [_DeletedKeySentinel].
Object key = oldKeys[i];
// If there is no key, we don't need to deal with the current slot.
if (key == null || identical(key, _DELETED_KEY)) {
continue;
}
V value = oldValues[i];
// Insert the {key, value} pair in their new slot.
int newIndex = _probeForAdding(key);
_keys[newIndex] = key;
_values[newIndex] = value;
}
_numberOfDeleted = 0;
}
void clear() {
_numberOfEntries = 0;
_numberOfDeleted = 0;
int length = _keys.length;
for (int i = 0; i < length; i++) {
_keys[i] = null;
_values[i] = null;
}
}
void operator []=(K key, V value) {
_ensureCapacity();
int index = _probeForAdding(key);
if ((_keys[index] == null) || (identical(_keys[index], _DELETED_KEY))) {
_numberOfEntries++;
}
_keys[index] = key;
_values[index] = value;
}
V operator [](K key) {
int index = _probeForLookup(key);
if (index < 0) return null;
return _values[index];
}
V putIfAbsent(K key, V ifAbsent()) {
int index = _probeForLookup(key);
if (index >= 0) return _values[index];
V value = ifAbsent();
this[key] = value;
return value;
}
V remove(K key) {
int index = _probeForLookup(key);
if (index >= 0) {
_numberOfEntries--;
V value = _values[index];
_values[index] = null;
// Set the key to the sentinel to not break the probing chain.
_keys[index] = _DELETED_KEY;
_numberOfDeleted++;
return value;
}
return null;
}
bool get isEmpty {
return _numberOfEntries == 0;
}
int get length {
return _numberOfEntries;
}
void forEach(void f(K key, V value)) {
Iterator<int> it = new _HashMapImplIndexIterator(this);
while (it.moveNext()) {
f(_keys[it.current], _values[it.current]);
}
}
Iterable<K> get keys => new _HashMapImplKeyIterable<K>(this);
Iterable<V> get values => new _HashMapImplValueIterable<V>(this);
bool containsKey(K key) {
return (_probeForLookup(key) != -1);
}
bool containsValue(V value) => values.contains(value);
String toString() {
return Maps.mapToString(this);
}
}
class _HashMapImplKeyIterable<E> extends Iterable<E> {
final _HashMapImpl _map;
_HashMapImplKeyIterable(this._map);
Iterator<E> get iterator => new _HashMapImplKeyIterator<E>(_map);
}
class _HashMapImplValueIterable<E> extends Iterable<E> {
final _HashMapImpl _map;
_HashMapImplValueIterable(this._map);
Iterator<E> get iterator => new _HashMapImplValueIterator<E>(_map);
}
abstract class _HashMapImplIterator<E> implements Iterator<E> {
final _HashMapImpl _map;
int _index = -1;
E _current;
_HashMapImplIterator(this._map);
E _computeCurrentFromIndex(int index, List keys, List values);
bool moveNext() {
int length = _map._keys.length;
int newIndex = _index + 1;
while (newIndex < length) {
var key = _map._keys[newIndex];
if ((key != null) && (!identical(key, _HashMapImpl._DELETED_KEY))) {
_current = _computeCurrentFromIndex(newIndex, _map._keys, _map._values);
_index = newIndex;
return true;
}
newIndex++;
}
_index = length;
_current = null;
return false;
}
E get current => _current;
}
class _HashMapImplKeyIterator<E> extends _HashMapImplIterator<E> {
_HashMapImplKeyIterator(_HashMapImpl map) : super(map);
E _computeCurrentFromIndex(int index, List keys, List values) {
return keys[index];
}
}
class _HashMapImplValueIterator<E> extends _HashMapImplIterator<E> {
_HashMapImplValueIterator(_HashMapImpl map) : super(map);
E _computeCurrentFromIndex(int index, List keys, List values) {
return values[index];
}
}
class _HashMapImplIndexIterator extends _HashMapImplIterator<int> {
_HashMapImplIndexIterator(_HashMapImpl map) : super(map);
int _computeCurrentFromIndex(int index, List keys, List values) {
return index;
}
}
/**
* A singleton sentinel used to represent when a key is deleted from the map.
* We can't use [: const Object() :] as a sentinel because it would end up
* canonicalized and then we cannot distinguish the deleted key from the
* canonicalized [: Object() :].
*/
class _DeletedKeySentinel {
const _DeletedKeySentinel();
}
/**
* This class represents a pair of two objects, used by LinkedHashMap
* to store a {key, value} in a list.
*/
class _KeyValuePair<K, V> {
_KeyValuePair(this.key, this.value) {}
final K key;
V value;
}
/**
* A LinkedHashMap is a hash map that preserves the insertion order
* when iterating over the keys or the values. Updating the value of a
* key does not change the order.
*/
class _LinkedHashMapImpl<K, V> implements LinkedHashMap<K, V> {
DoubleLinkedQueue<_KeyValuePair<K, V>> _list;
HashMap<K, DoubleLinkedQueueEntry<_KeyValuePair<K, V>>> _map;
_LinkedHashMapImpl() {
_map = new HashMap<K, DoubleLinkedQueueEntry<_KeyValuePair<K, V>>>();
_list = new DoubleLinkedQueue<_KeyValuePair<K, V>>();
}
factory _LinkedHashMapImpl.from(Map<K, V> other) {
Map<K, V> result = new _LinkedHashMapImpl<K, V>();
other.forEach((K key, V value) { result[key] = value; });
return result;
}
void operator []=(K key, V value) {
if (_map.containsKey(key)) {
_map[key].element.value = value;
} else {
_list.addLast(new _KeyValuePair<K, V>(key, value));
_map[key] = _list.lastEntry();
}
}
V operator [](K key) {
DoubleLinkedQueueEntry<_KeyValuePair<K, V>> entry = _map[key];
if (entry == null) return null;
return entry.element.value;
}
V remove(K key) {
DoubleLinkedQueueEntry<_KeyValuePair<K, V>> entry = _map.remove(key);
if (entry == null) return null;
entry.remove();
return entry.element.value;
}
V putIfAbsent(K key, V ifAbsent()) {
V value = this[key];
if ((this[key] == null) && !(containsKey(key))) {
value = ifAbsent();
this[key] = value;
}
return value;
}
Iterable<K> get keys {
return new MappedIterable<_KeyValuePair<K, V>, K>(
_list, (_KeyValuePair<K, V> entry) => entry.key);
}
Iterable<V> get values {
return new MappedIterable<_KeyValuePair<K, V>, V>(
_list, (_KeyValuePair<K, V> entry) => entry.value);
}
void forEach(void f(K key, V value)) {
_list.forEach((_KeyValuePair<K, V> entry) {
f(entry.key, entry.value);
});
}
bool containsKey(K key) {
return _map.containsKey(key);
}
bool containsValue(V value) {
return _list.any((_KeyValuePair<K, V> entry) {
return (entry.value == value);
});
}
int get length {
return _map.length;
}
bool get isEmpty {
return length == 0;
}
void clear() {
_map.clear();
_list.clear();
}
String toString() {
return Maps.mapToString(this);
}
}

View file

@ -1,110 +0,0 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
part of dart.collection;
class HashSet<E> extends Collection<E> implements Set<E> {
// The map backing this set. The associations in this map are all
// of the form element -> element. If a value is not in the map,
// then it is not in the set.
final _HashMapImpl<E, E> _backingMap;
HashSet() : _backingMap = new _HashMapImpl<E, E>();
/**
* Creates a [Set] that contains all elements of [other].
*/
factory HashSet.from(Iterable<E> other) {
Set<E> set = new HashSet<E>();
for (final e in other) {
set.add(e);
}
return set;
}
void clear() {
_backingMap.clear();
}
void add(E value) {
_backingMap[value] = value;
}
bool remove(Object value) {
if (!_backingMap.containsKey(value)) return false;
_backingMap.remove(value);
return true;
}
bool contains(E value) {
return _backingMap.containsKey(value);
}
Set<E> intersection(Collection<E> collection) {
Set<E> result = new Set<E>();
collection.forEach((E value) {
if (contains(value)) result.add(value);
});
return result;
}
bool isSubsetOf(Collection<E> other) {
return new Set<E>.from(other).containsAll(this);
}
bool containsAll(Collection<E> collection) {
return collection.every((E value) {
return contains(value);
});
}
void forEach(void f(E element)) {
_backingMap.forEach((E key, E value) {
f(key);
});
}
bool get isEmpty {
return _backingMap.isEmpty;
}
int get length {
return _backingMap.length;
}
Iterator<E> get iterator => new _HashSetIterator<E>(this);
String toString() {
return Collections.collectionToString(this);
}
}
class _HashSetIterator<E> implements Iterator<E> {
_HashSetIterator(HashSet<E> set)
: _keysIterator = set._backingMap._keys.iterator;
E get current {
var result = _keysIterator.current;
if (identical(result, _HashMapImpl._DELETED_KEY)) {
// TODO(floitsch): improve the error reporting.
throw new StateError("Concurrent modification.");
}
return result;
}
bool moveNext() {
bool result;
do {
result = _keysIterator.moveNext();
} while (result &&
(_keysIterator.current == null ||
identical(_keysIterator.current, _HashMapImpl._DELETED_KEY)));
return result;
}
Iterator _keysIterator;
}

View file

@ -47,6 +47,9 @@ abstract class Map<K, V> {
* If [key] is not associated to a value, calls [ifAbsent] and
* updates the map by mapping [key] to the value returned by
* [ifAbsent]. Returns the value in the map.
*
* It is an error to add or remove keys from map during the call to
* [ifAbsent].
*/
V putIfAbsent(K key, V ifAbsent());
@ -65,6 +68,8 @@ abstract class Map<K, V> {
/**
* Applies [f] to each {key, value} pair of the map.
*
* It is an error to add or remove keys from the map during iteration.
*/
void forEach(void f(K key, V value));

View file

@ -1917,14 +1917,15 @@ class _HttpClient implements HttpClient {
void shutdown({bool force: false}) {
if (force) _closeQueue.shutdown();
_openSockets.forEach((String key, Queue<_SocketConnection> connections) {
new Map.from(_openSockets).forEach(
(String key, Queue<_SocketConnection> connections) {
while (!connections.isEmpty) {
_SocketConnection socketConn = connections.removeFirst();
socketConn._socket.close();
}
});
if (force) {
_activeSockets.forEach((_SocketConnection socketConn) {
_activeSockets.toList().forEach((_SocketConnection socketConn) {
socketConn._httpClientConnection._onClientShutdown();
socketConn._close();
});

View file

@ -657,3 +657,20 @@ LibTest/core/Queue/removeLast_A01_t01: Fail # Moved collection classes from core
LibTest/core/Queue/removeLast_A02_t01: Fail # Moved collection classes from core to collection. co19 issue 371.
LibTest/core/Set/Set.from_A01_t01: Fail # Moved collection classes from core to collection. co19 issue 371.
Language/14_Types/4_Interface_Types_A08_t06: Fail # Moved collection classes from core to collection. co19 issue 371.
# Doesn't expect null to be allowed in Set or Map keys (issue 377).
LibTest/core/Map/containsKey_A01_t02: Fail
LibTest/core/Map/operator_subscript_A01_t02: Fail
LibTest/core/Map/operator_subscripted_assignment_A01_t02: Fail
LibTest/core/Map/remove_A01_t02: Fail
LibTest/core/Set/add_A01_t02: Fail
LibTest/core/Set/addAll_A01_t02: Fail
LibTest/core/Set/contains_A01_t02: Fail
LibTest/core/Set/containsAll_A01_t02: Fail
LibTest/core/Set/intersection_A03_t01: Fail
LibTest/core/Set/isSubsetOf_A01_t02: Fail
LibTest/core/Set/remove_A01_t02: Fail
LibTest/core/Set/removeAll_A01_t02: Fail
# Doesn't expect concurrent modification error (issue 376).
LibTest/core/Map/forEach_A01_t07: Fail

View file

@ -787,3 +787,20 @@ Language/14_Types/5_Function_Types_A02_t10: Fail # dartbug.com/7342
Language/14_Types/5_Function_Types_A03_t06: Fail # dartbug.com/7342
Language/14_Types/5_Function_Types_A03_t10: Fail # dartbug.com/7342
Language/14_Types/5_Function_Types_A03_t11: Fail # dartbug.com/7342
# Doesn't expect null to be allowed in Set or Map keys (issue 377).
LibTest/core/Map/containsKey_A01_t02: Fail
LibTest/core/Map/operator_subscript_A01_t02: Fail
LibTest/core/Map/operator_subscripted_assignment_A01_t02: Fail
LibTest/core/Map/remove_A01_t02: Fail
LibTest/core/Set/add_A01_t02: Fail
LibTest/core/Set/addAll_A01_t02: Fail
LibTest/core/Set/contains_A01_t02: Fail
LibTest/core/Set/containsAll_A01_t02: Fail
LibTest/core/Set/intersection_A03_t01: Fail
LibTest/core/Set/isSubsetOf_A01_t02: Fail
LibTest/core/Set/remove_A01_t02: Fail
LibTest/core/Set/removeAll_A01_t02: Fail
# Doesn't expect concurrent modification error (issue 376).
LibTest/core/Map/forEach_A01_t07: Fail

View file

@ -504,6 +504,23 @@ LibTest/core/Queue/removeLast_A01_t01: Fail # Moved collection classes from core
LibTest/core/Queue/removeLast_A02_t01: Fail # Moved collection classes from core to collection. co19 issue 371.
LibTest/core/Set/Set.from_A01_t01: Fail # Moved collection classes from core to collection. co19 issue 371.
# Doesn't expect null to be allowed in Set or Map keys (issue 377).
LibTest/core/Map/containsKey_A01_t02: Fail
LibTest/core/Map/operator_subscript_A01_t02: Fail
LibTest/core/Map/operator_subscripted_assignment_A01_t02: Fail
LibTest/core/Map/remove_A01_t02: Fail
LibTest/core/Set/add_A01_t02: Fail
LibTest/core/Set/addAll_A01_t02: Fail
LibTest/core/Set/contains_A01_t02: Fail
LibTest/core/Set/containsAll_A01_t02: Fail
LibTest/core/Set/intersection_A03_t01: Fail
LibTest/core/Set/isSubsetOf_A01_t02: Fail
LibTest/core/Set/remove_A01_t02: Fail
LibTest/core/Set/removeAll_A01_t02: Fail
# Doesn't expect concurrent modification error (issue 376).
LibTest/core/Map/forEach_A01_t07: Fail
[ $compiler == none && $runtime == vm && $unchecked ]
LibTest/core/Future/chain_A02_t05: Fail # Future is in async library. co19 issue 367
LibTest/core/Future/transform_A02_t04: Fail # Future is in async library. co19 issue 367

View file

@ -0,0 +1,289 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
// Tests of hash map behavior, with focus in iteration and concurrent
// modification errors.
library hash_map2_test;
import 'dart:collection';
testMap(Map newMap(), Map newMapFrom(Map map)) {
Map gen(int from, int to) {
Map map = new LinkedHashMap();
for (int i = from; i < to; i++) map[i] = i;
return map;
}
bool odd(int n) => (n & 1) == 1;
bool even(int n) => (n & 1) == 0;
void addAll(Map toMap, Map fromMap) {
fromMap.forEach((k, v) { toMap[k] = v; });
}
{ // Test growing to largish capacity.
Map map = newMap();
for (int i = 0; i < 256; i++) {
map[i] = i;
}
addAll(map, gen(256, 512));
addAll(map, newMapFrom(gen(512, 1000)));
Expect.equals(1000, map.length);
// Remove half.
for (int i = 0; i < 1000; i += 2) map.remove(i);
Expect.equals(500, map.length);
Expect.isFalse(map.keys.any(even));
Expect.isTrue(map.keys.every(odd));
// Re-add all.
addAll(map, gen(0, 1000));
Expect.equals(1000, map.length);
}
{ // Test having many deleted elements.
Map map = newMap();
map[0] = 0;
for (int i = 0; i < 1000; i++) {
map[i + 1] = i + 1;
map.remove(i);
Expect.equals(1, map.length);
}
}
{ // Test having many elements with same hashCode
Map map = newMap();
for (int i = 0; i < 1000; i++) {
map[new BadHashCode()] = 0;
}
Expect.equals(1000, map.length);
}
{ // Check concurrent modification
Map map = newMap()..[0] = 0..[1] = 1;
{ // Test adding before a moveNext.
Iterator iter = map.keys.iterator;
iter.moveNext();
map[1] = 9; // Updating existing key isn't a modification.
iter.moveNext();
map[2] = 2;
Expect.throws(iter.moveNext, (e) => e is Error);
}
{ // Test adding after last element.
Iterator iter = map.keys.iterator;
Expect.equals(3, map.length);
iter.moveNext();
iter.moveNext();
iter.moveNext();
map[3] = 3;
Expect.throws(iter.moveNext, (e) => e is Error);
}
{ // Test removing during iteration.
Iterator iter = map.keys.iterator;
iter.moveNext();
map.remove(1000); // Not a modification if it's not there.
iter.moveNext();
int n = iter.current;
map.remove(n);
// Removing doesn't change current.
Expect.equals(n, iter.current);
Expect.throws(iter.moveNext, (e) => e is Error);
}
{ // Test removing after last element.
Iterator iter = map.keys.iterator;
Expect.equals(3, map.length);
iter.moveNext();
iter.moveNext();
iter.moveNext();
int n = iter.current;
map.remove(n);
// Removing doesn't change current.
Expect.equals(n, iter.current);
Expect.throws(iter.moveNext, (e) => e is Error);
}
{ // Test that updating value of existing key doesn't cause concurrent
// modification error.
Iterator iter = map.keys.iterator;
Expect.equals(2, map.length);
iter.moveNext();
int n = iter.current;
map[n] = n * 2;
iter.moveNext();
Expect.equals(map[iter.current], iter.current);
}
{ // Test that modification during putIfAbsent is not an error.
map.putIfAbsent(4, () {
map[5] = 5;
map[4] = -1;
return 4;
});
Expect.equals(4, map[4]);
Expect.equals(5, map[5]);
}
{ // Check adding many existing keys isn't considered modification.
Map map2 = newMap();
for (var key in map.keys) {
map2[key] = map[key] + 1;
}
Iterator iter = map.keys.iterator;
addAll(map, map2);
// Shouldn't throw.
iter.moveNext();
}
}
{ // Regression test for bug in putIfAbsent where adding an element
// that make the table grow, can be lost.
Map map = newMap();
map.putIfAbsent("S", () => 0);
map.putIfAbsent("T", () => 0);
map.putIfAbsent("U", () => 0);
map.putIfAbsent("C", () => 0);
map.putIfAbsent("a", () => 0);
map.putIfAbsent("b", () => 0);
map.putIfAbsent("n", () => 0);
Expect.isTrue(map.containsKey("n"));
}
{ // Check that putIfAbsent works just as well as put.
Map map = newMap();
for (int i = 0; i < 128; i++) {
map.putIfAbsent(i, () => i);
Expect.isTrue(map.containsKey(i));
map.putIfAbsent(i >> 1, () => -1); // Never triggers.
}
for (int i = 0; i < 128; i++) {
Expect.equals(i, map[i]);
}
}
{ // Check that updating existing elements is not a modification.
// This must be the case even if the underlying data structure is
// nearly full.
for (int i = 1; i < 128; i++) {
// Create maps of different sizes, some of which should be
// at a limit of the underlying data structure.
Map map = newMapFrom(gen(0, i));
// ForEach-iteration.
map.forEach((key, v) {
Expect.equals(key, map[key]);
map[key] = key + 1;
map.remove(1000); // Removing something not there.
map.putIfAbsent(key, () => Expect.fail("SHOULD NOT BE ABSENT"));
// Doesn't cause ConcurrentModificationError.
});
// for-in iteration.
for (int key in map.keys) {
Expect.equals(key + 1, map[key]);
map[key] = map[key] + 1;
map.remove(1000); // Removing something not there.
map.putIfAbsent(key, () => Expect.fail("SHOULD NOT BE ABSENT"));
// Doesn't cause ConcurrentModificationError.
}
// Raw iterator.
Iterator iter = map.keys.iterator;
for (int key = 0; key < i; key++) {
Expect.equals(key + 2, map[key]);
map[key] = key + 3;
map.remove(1000); // Removing something not there.
map.putIfAbsent(key, () => Expect.fail("SHOULD NOT BE ABSENT"));
// Doesn't cause ConcurrentModificationError on the moveNext.
}
iter.moveNext(); // Should not throw.
// Remove a lot of elements, which can cause a re-tabulation.
for (int key = 1; key < i; key++) {
Expect.equals(key + 3, map[key]);
map.remove(key);
}
iter = map.keys.iterator;
map[0] = 2;
iter.moveNext(); // Should not throw.
}
}
{ // Check that null can be in the map.
Map map = newMap();
map[null] = 0;
Expect.equals(1, map.length);
Expect.isTrue(map.containsKey(null));
Expect.isNull(map.keys.first);
Expect.isNull(map.keys.last);
map[null] = 1;
Expect.equals(1, map.length);
Expect.isTrue(map.containsKey(null));
map.remove(null);
Expect.isTrue(map.isEmpty);
Expect.isFalse(map.containsKey(null));
// Created using map.from.
map = newMapFrom(new Map()..[null] = 0);
Expect.equals(1, map.length);
Expect.isTrue(map.containsKey(null));
Expect.isNull(map.keys.first);
Expect.isNull(map.keys.last);
map[null] = 1;
Expect.equals(1, map.length);
Expect.isTrue(map.containsKey(null));
map.remove(null);
Expect.isTrue(map.isEmpty);
Expect.isFalse(map.containsKey(null));
Map fromMap = new Map();
fromMap[1] = 0;
fromMap[2] = 0;
fromMap[3] = 0;
fromMap[null] = 0;
fromMap[4] = 0;
fromMap[5] = 0;
fromMap[6] = 0;
Expect.equals(7, fromMap.length);
// map that grows with null in it.
map = newMapFrom(fromMap);
Expect.equals(7, map.length);
for (int i = 7; i < 128; i++) {
map[i] = 0;
}
Expect.equals(128, map.length);
Expect.isTrue(map.containsKey(null));
map[null] = 1;
Expect.equals(128, map.length);
Expect.isTrue(map.containsKey(null));
map.remove(null);
Expect.equals(127, map.length);
Expect.isFalse(map.containsKey(null));
}
}
void main() {
Expect.isTrue(new HashMap<int, String>() is Map<int, String>);
Expect.isTrue(new LinkedHashMap<int, String>() is Map<int, String>);
Expect.isTrue(new HashMap<String, int>.from({}) is Map<String, int>);
Expect.isTrue(new LinkedHashMap<String, int>.from({}) is Map<String, int>);
Expect.isTrue(<String, int>{} is Map<String, int>);
Expect.isTrue(const <String, int>{} is Map<String, int>);
testMap(() => new HashMap(), (m) => new HashMap.from(m));
testMap(() => new LinkedHashMap(), (m) => new LinkedHashMap.from(m));
}
class BadHashCode {
static int idCounter = 0;
final int id;
BadHashCode() : id = idCounter++;
int get hashCode => 42;
}

View file

@ -0,0 +1,206 @@
// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
// Tests of hash set behavior, with focus in iteration and concurrent
// modification errors.
library hash_map2_test;
import 'dart:collection';
testSet(Set newSet(), Set newSetFrom(Set from)) {
Set gen(int from, int to) =>
new Set.from(new Iterable.generate(to - from, (n) => n + from));
bool odd(int n) => (n & 1) == 1;
bool even(int n) => (n & 1) == 0;
{ // Test growing to largish capacity.
Set set = newSet();
for (int i = 0; i < 256; i++) {
set.add(i);
}
set.addAll(gen(256, 512));
set.addAll(newSetFrom(gen(512, 1000)));
Expect.equals(1000, set.length);
// Remove half.
for (int i = 0; i < 1000; i += 2) set.remove(i);
Expect.equals(500, set.length);
Expect.isFalse(set.any(even));
Expect.isTrue(set.every(odd));
// Re-add all.
set.addAll(gen(0, 1000));
Expect.equals(1000, set.length);
}
{ // Test having many deleted elements.
Set set = newSet();
set.add(0);
for (int i = 0; i < 1000; i++) {
set.add(i + 1);
set.remove(i);
Expect.equals(1, set.length);
}
}
{ // Test having many elements with same hashCode
Set set = newSet();
for (int i = 0; i < 1000; i++) {
set.add(new BadHashCode());
}
Expect.equals(1000, set.length);
}
{ // Check concurrent modification
Set set = newSet()..add(0)..add(1);
{ // Test adding before a moveNext.
Iterator iter = set.iterator;
iter.moveNext();
set.add(1); // Updating existing key isn't a modification.
iter.moveNext();
set.add(2);
Expect.throws(iter.moveNext, (e) => e is Error);
}
{ // Test adding after last element.
Iterator iter = set.iterator;
Expect.equals(3, set.length);
iter.moveNext();
iter.moveNext();
iter.moveNext();
set.add(3);
Expect.throws(iter.moveNext, (e) => e is Error);
}
{ // Test removing during iteration.
Iterator iter = set.iterator;
iter.moveNext();
set.remove(1000); // Not a modification if it's not there.
iter.moveNext();
int n = iter.current;
set.remove(n);
// Removing doesn't change current.
Expect.equals(n, iter.current);
Expect.throws(iter.moveNext, (e) => e is Error);
}
{ // Test removing after last element.
Iterator iter = set.iterator;
Expect.equals(3, set.length);
iter.moveNext();
iter.moveNext();
iter.moveNext();
int n = iter.current;
set.remove(n);
// Removing doesn't change current.
Expect.equals(n, iter.current);
Expect.throws(iter.moveNext, (e) => e is Error);
}
{ // Test that updating value doesn't cause error.
Iterator iter = set.iterator;
Expect.equals(2, set.length);
iter.moveNext();
int n = iter.current;
set.add(n);
iter.moveNext();
Expect.isTrue(set.contains(iter.current));
}
{ // Check adding many existing values isn't considered modification.
Set set2 = newSet();
for (var value in set) {
set2.add(value);
}
Iterator iter = set.iterator;
set.addAll(set2);
// Shouldn't throw.
iter.moveNext();
}
}
{ // Check that updating existing elements is not a modification.
// This must be the case even if the underlying data structure is
// nearly full.
for (int i = 1; i < 128; i++) {
// Create maps of different sizes, some of which should be
// at a limit of the underlying data structure.
Set set = newSetFrom(gen(0, i));
Iterator iter = set.iterator;
for (int j = 0; j < i; j++) {
set.add(j);
}
iter.moveNext(); // Should not throw.
for (int j = 1; j < i; j++) {
set.remove(j);
}
iter = set.iterator;
set.add(0);
iter.moveNext(); // Should not throw.
}
}
{ // Check that null can be in the set.
Set set = newSet();
set.add(null);
Expect.equals(1, set.length);
Expect.isTrue(set.contains(null));
Expect.isNull(set.first);
Expect.isNull(set.last);
set.add(null);
Expect.equals(1, set.length);
Expect.isTrue(set.contains(null));
set.remove(null);
Expect.isTrue(set.isEmpty);
Expect.isFalse(set.contains(null));
// Created using Set.from.
set = newSetFrom([null]);
Expect.equals(1, set.length);
Expect.isTrue(set.contains(null));
Expect.isNull(set.first);
Expect.isNull(set.last);
set.add(null);
Expect.equals(1, set.length);
Expect.isTrue(set.contains(null));
set.remove(null);
Expect.isTrue(set.isEmpty);
Expect.isFalse(set.contains(null));
// Set that grows with null in it.
set = newSetFrom([1, 2, 3, null, 4, 5, 6]);
Expect.equals(7, set.length);
for (int i = 7; i < 128; i++) {
set.add(i);
}
Expect.equals(128, set.length);
Expect.isTrue(set.contains(null));
set.add(null);
Expect.equals(128, set.length);
Expect.isTrue(set.contains(null));
set.remove(null);
Expect.equals(127, set.length);
Expect.isFalse(set.contains(null));
}
}
void main() {
testSet(() => new HashSet(), (m) => new HashSet.from(m));
testSet(() => new LinkedHashSet(), (m) => new LinkedHashSet.from(m));
}
class BadHashCode {
static int idCounter = 0;
final int id;
BadHashCode() : id = idCounter++;
int get hashCode => 42;
// operator == is identity.
// Can't make a bad compareTo that isn't invalid.
int compareTo(BadHashCode other) => id - other.id;
}

View file

@ -71,7 +71,7 @@ testWithLinkedMap() {
var map = const { 'b': 1, 'a': 2, 'c': 3 };
var otherMap = new LinkedHashMap.from(map);
Expect.isTrue(otherMap is Map);
Expect.isTrue(otherMap is HashMap);
Expect.isTrue(otherMap is! HashMap);
Expect.isTrue(otherMap is LinkedHashMap);
var i = 1;
for (var val in map.values) {

View file

@ -42,7 +42,7 @@ Map<String, PubCommand> get pubCommands {
'uploader': new UploaderCommand(),
'version': new VersionCommand()
};
for (var command in commands.values) {
for (var command in commands.values.toList()) {
for (var alias in command.aliases) {
commands[alias] = command;
}