[dart2js] Refactor serialization indices.

Simplifies indices management for serialization by reusing the same indexing readers/writers for each phase. This way the caches are shared across all the phases and we can decouple the indices with any specific data read.

This fixes an issue that prevented the indices from being used in certain read/write patterns. None of our current usages of these indices exhibited this pattern but later changes I have planned do run into this issue.

This new pattern is also more resilient to sharding/parallel data files. Previously there could be collisions if two files were written in "parallel" (such as the codegen shards) and later their address spaces had to be merged. Now we do the address space merging at read time when we have full knowledge of the parallel files and can therefore avoid collisions.

Change-Id: Iff4c1461e734fc00f251d81f9fff1b9db83484d6
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/326460
Reviewed-by: Stephen Adams <sra@google.com>
This commit is contained in:
Nate Biggs 2023-10-21 00:31:34 +00:00 committed by Commit Queue
parent cbdbc19903
commit 0b1bad9ffc
12 changed files with 503 additions and 488 deletions

View file

@ -7,6 +7,7 @@ library dart2js.compiler_base;
import 'dart:async' show Future;
import 'dart:convert' show jsonEncode;
import 'package:compiler/src/serialization/indexed_sink_source.dart';
import 'package:compiler/src/universe/use.dart' show StaticUse;
import 'package:front_end/src/api_unstable/dart2js.dart' as fe;
import 'package:kernel/ast.dart' as ir;
@ -100,7 +101,6 @@ class Compiler {
late ir.Component componentForTesting;
late JClosedWorld? backendClosedWorldForTesting;
late DataSourceIndices? closedWorldIndicesForTesting;
late ResolutionEnqueuer resolutionEnqueuerForTesting;
late CodegenEnqueuer codegenEnqueuerForTesting;
late DumpInfoStateData dumpInfoStateForTesting;
@ -559,40 +559,43 @@ class Compiler {
return programSize;
}
DataAndIndices<GlobalTypeInferenceResults> globalTypeInferenceResultsTestMode(
DataAndIndices<GlobalTypeInferenceResults> results) {
GlobalTypeInferenceResults globalTypeInferenceResultsTestMode(
GlobalTypeInferenceResults results) {
SerializationIndices indices = SerializationIndices(testMode: true);
final strategy =
const BytesInMemorySerializationStrategy(useDataKinds: true);
final resultData = results.data!;
List<int> irData = strategy.unpackAndSerializeComponent(resultData);
List<int> irData = strategy.unpackAndSerializeComponent(results);
List<int> closedWorldData =
strategy.serializeClosedWorld(resultData.closedWorld, options);
var component = strategy.deserializeComponent(irData);
var closedWorldAndIndices = strategy.deserializeClosedWorld(
strategy.serializeClosedWorld(results.closedWorld, options, indices);
final component = strategy.deserializeComponent(irData);
final closedWorld = strategy.deserializeClosedWorld(
options,
reporter,
environment,
abstractValueStrategy,
component,
closedWorldData);
closedWorldData,
indices);
// Reset indices to clear references to old Kernel entities.
indices = SerializationIndices(testMode: true);
List<int> globalTypeInferenceResultsData =
strategy.serializeGlobalTypeInferenceResults(
closedWorldAndIndices.indices!, resultData, options);
strategy.serializeGlobalTypeInferenceResults(results, options, indices);
return strategy.deserializeGlobalTypeInferenceResults(
options,
reporter,
environment,
abstractValueStrategy,
component,
closedWorldAndIndices.data!,
closedWorldAndIndices.indices!,
globalTypeInferenceResultsData);
closedWorld,
globalTypeInferenceResultsData,
indices);
}
Future<DataAndIndices<JClosedWorld>?> produceClosedWorld(
load_kernel.Output output, ModuleData? moduleData) async {
Future<JClosedWorld?> produceClosedWorld(load_kernel.Output output,
ModuleData? moduleData, SerializationIndices indices) async {
ir.Component component = output.component;
DataAndIndices<JClosedWorld> closedWorldAndIndices;
JClosedWorld? closedWorld;
if (!stage.shouldReadClosedWorld) {
if (!usingModularAnalysis) {
// If we're deserializing the closed world, the input .dill already
@ -611,48 +614,40 @@ class Compiler {
Uri rootLibraryUri = output.rootLibraryUri!;
List<Uri> libraries = output.libraries!;
final closedWorld =
closedWorld =
computeClosedWorld(component, moduleData, rootLibraryUri, libraries);
closedWorldAndIndices = DataAndIndices<JClosedWorld>(closedWorld, null);
if (stage == Dart2JSStage.closedWorld && closedWorld != null) {
serializationTask.serializeComponent(
closedWorld.elementMap.programEnv.mainComponent,
includeSourceBytes: false);
serializationTask.serializeClosedWorld(closedWorld);
serializationTask.serializeClosedWorld(closedWorld, indices);
}
} else {
closedWorldAndIndices = await serializationTask.deserializeClosedWorld(
environment,
abstractValueStrategy,
component,
useDeferredSourceReads);
closedWorld = await serializationTask.deserializeClosedWorld(environment,
abstractValueStrategy, component, useDeferredSourceReads, indices);
}
if (retainDataForTesting) {
backendClosedWorldForTesting = closedWorldAndIndices.data;
closedWorldIndicesForTesting = closedWorldAndIndices.indices;
backendClosedWorldForTesting = closedWorld;
}
return closedWorldAndIndices;
return closedWorld;
}
bool shouldStopAfterClosedWorld(
DataAndIndices<JClosedWorld>? closedWorldAndIndices) =>
closedWorldAndIndices == null ||
closedWorldAndIndices.data == null ||
bool shouldStopAfterClosedWorld(JClosedWorld? closedWorld) =>
closedWorld == null ||
stage == Dart2JSStage.closedWorld ||
stage == Dart2JSStage.deferredLoadIds ||
stopAfterClosedWorldForTesting;
Future<DataAndIndices<GlobalTypeInferenceResults>>
produceGlobalTypeInferenceResults(
DataAndIndices<JClosedWorld> closedWorldAndIndices) async {
JClosedWorld closedWorld = closedWorldAndIndices.data!;
DataAndIndices<GlobalTypeInferenceResults> globalTypeInferenceResults;
Future<GlobalTypeInferenceResults> produceGlobalTypeInferenceResults(
JClosedWorld closedWorld,
ir.Component component,
SerializationIndices indices) async {
GlobalTypeInferenceResults globalTypeInferenceResults;
if (!stage.shouldReadGlobalInference) {
globalTypeInferenceResults =
DataAndIndices(performGlobalTypeInference(closedWorld), null);
globalTypeInferenceResults = performGlobalTypeInference(closedWorld);
if (stage == Dart2JSStage.globalInference) {
serializationTask.serializeGlobalTypeInference(
globalTypeInferenceResults.data!, closedWorldAndIndices.indices!);
globalTypeInferenceResults, indices);
} else if (options.testMode) {
globalTypeInferenceResults =
globalTypeInferenceResultsTestMode(globalTypeInferenceResults);
@ -663,8 +658,9 @@ class Compiler {
environment,
abstractValueStrategy,
closedWorld.elementMap.programEnv.mainComponent,
closedWorldAndIndices,
useDeferredSourceReads);
closedWorld,
useDeferredSourceReads,
indices);
}
return globalTypeInferenceResults;
}
@ -682,26 +678,26 @@ class Compiler {
}
Future<CodegenResults> produceCodegenResults(
DataAndIndices<GlobalTypeInferenceResults> globalTypeInferenceResults,
SourceLookup sourceLookup) async {
final globalTypeInferenceData = globalTypeInferenceResults.data!;
CodegenInputs codegenInputs = initializeCodegen(globalTypeInferenceData);
GlobalTypeInferenceResults globalTypeInferenceResults,
SourceLookup sourceLookup,
SerializationIndices indices) async {
CodegenInputs codegenInputs = initializeCodegen(globalTypeInferenceResults);
CodegenResults codegenResults;
if (!stage.shouldReadCodegenShards) {
codegenResults = OnDemandCodegenResults(globalTypeInferenceData,
codegenResults = OnDemandCodegenResults(globalTypeInferenceResults,
codegenInputs, backendStrategy.functionCompiler);
if (stage == Dart2JSStage.codegenSharded) {
serializationTask.serializeCodegen(backendStrategy, codegenResults,
globalTypeInferenceResults.indices!);
serializationTask.serializeCodegen(
backendStrategy, codegenResults, indices);
}
} else {
codegenResults = await serializationTask.deserializeCodegen(
backendStrategy,
globalTypeInferenceData,
globalTypeInferenceResults,
codegenInputs,
globalTypeInferenceResults.indices!,
useDeferredSourceReads,
sourceLookup);
sourceLookup,
indices);
}
return codegenResults;
}
@ -725,38 +721,38 @@ class Compiler {
moduleData = await produceModuleData(output!);
}
if (shouldStopAfterModularAnalysis) return;
final indices = SerializationIndices();
// Compute closed world.
DataAndIndices<JClosedWorld>? closedWorldAndIndices =
await produceClosedWorld(output!, moduleData);
if (shouldStopAfterClosedWorld(closedWorldAndIndices)) return;
JClosedWorld? closedWorld =
await produceClosedWorld(output!, moduleData, indices);
if (shouldStopAfterClosedWorld(closedWorld)) return;
// Run global analysis.
DataAndIndices<GlobalTypeInferenceResults> globalTypeInferenceResults =
await produceGlobalTypeInferenceResults(closedWorldAndIndices!);
GlobalTypeInferenceResults globalTypeInferenceResults =
await produceGlobalTypeInferenceResults(
closedWorld!, output.component, indices);
if (shouldStopAfterGlobalTypeInference) return;
// Allow the original references to these to be GCed and only hold
// references to them if we are actually running the dump info task later.
JClosedWorld? closedWorldForDumpInfo;
DataSourceIndices? globalInferenceIndicesForDumpInfo;
SerializationIndices? indicesForDumpInfo;
if (options.dumpInfoWriteUri != null || options.dumpInfoReadUri != null) {
closedWorldForDumpInfo = closedWorldAndIndices.data;
globalInferenceIndicesForDumpInfo = globalTypeInferenceResults.indices;
closedWorldForDumpInfo = closedWorld;
indicesForDumpInfo = indices;
}
// Run codegen.
final sourceLookup = SourceLookup(output.component);
CodegenResults codegenResults =
await produceCodegenResults(globalTypeInferenceResults, sourceLookup);
CodegenResults codegenResults = await produceCodegenResults(
globalTypeInferenceResults, sourceLookup, indices);
if (shouldStopAfterCodegen) return;
if (options.dumpInfoReadUri != null) {
final dumpInfoData =
await serializationTask.deserializeDumpInfoProgramData(
backendStrategy,
closedWorldForDumpInfo!,
globalInferenceIndicesForDumpInfo);
backendStrategy, closedWorldForDumpInfo!, indicesForDumpInfo!);
await runDumpInfo(codegenResults, dumpInfoData);
} else {
// Link.
@ -768,11 +764,8 @@ class Compiler {
if (options.dumpInfo) {
await runDumpInfo(codegenResults, dumpInfoData);
} else {
serializationTask.serializeDumpInfoProgramData(
backendStrategy,
dumpInfoData,
closedWorldForDumpInfo!,
globalInferenceIndicesForDumpInfo);
serializationTask.serializeDumpInfoProgramData(backendStrategy,
dumpInfoData, closedWorldForDumpInfo!, indicesForDumpInfo!);
}
}
}

View file

@ -42,8 +42,8 @@ class PositionSourceInformation extends SourceInformation {
() => SourceLocation.readFromDataSource(source));
SourceLocation? innerPosition = source.readCachedOrNull<SourceLocation>(
() => SourceLocation.readFromDataSource(source));
List<FrameContext>? inliningContext = source
.readCachedOrNull<List<FrameContext>?>(() => source.readListOrNull(() =>
List<FrameContext>? inliningContext =
source.readCachedOrNull<List<FrameContext>>(() => source.readList(() =>
source.readCached(() => FrameContext.readFromDataSource(source))));
source.end(tag);
return PositionSourceInformation(

View file

@ -333,17 +333,18 @@ class JsBackendStrategy {
MemberEntity member = work.element;
CodegenResult result = codegenResults.getCodegenResults(member);
if (_compiler.options.testMode) {
final indices = SerializationIndices(testMode: true);
bool useDataKinds = true;
List<Object> data = [];
DataSinkWriter sink = DataSinkWriter(
ObjectDataSink(data), _compiler.options,
ObjectDataSink(data), _compiler.options, indices,
useDataKinds: useDataKinds);
sink.registerCodegenWriter(
CodegenWriterImpl(closedWorld, result.deferredExpressionData));
result.writeToDataSink(sink);
sink.close();
DataSourceReader source = DataSourceReader(
ObjectDataSource(data), _compiler.options,
ObjectDataSource(data), _compiler.options, indices,
useDataKinds: useDataKinds);
source.registerCodegenReader(CodegenReaderImpl(closedWorld));
source.registerEntityLookup(entityLookup);

View file

@ -111,6 +111,8 @@ class BinaryDataSource implements DataSource {
@override
int get length => _bytes.length;
@override
int get currentOffset => _byteOffset;
@override
String get errorContext => ' Offset $_byteOffset in ${_bytes.length}.';

View file

@ -4,18 +4,106 @@
import 'serialization.dart';
abstract class IndexedSource<E> {
E? read(E readValue());
abstract class IndexedSource<E extends Object> {
Map<int, E> get cache;
/// Reshapes the cache to a [Map<E, int>] using [_getValue] if provided or
/// leaving the cache entry as is otherwise.
Map<T?, int> reshapeCacheAsMap<T>([T Function(E? value)? getValue]);
E? read(DataSourceReader source, E readValue());
}
abstract class IndexedSink<E> {
void write(E value, void writeValue(E value));
abstract class IndexedSink<E extends Object> {
Map<E, int> get cache;
void write(DataSinkWriter sink, E? value, void writeValue(E value));
}
const int _dataInPlaceIndicator = 0;
const int _nullIndicator = 1;
const int _indicatorOffset = 2;
/// Facilitates indexed reads and writes for [IndexedSource] and [IndexedSink].
///
/// Created and stores shared [IndexedSource] and [IndexedSink] instances for
/// cached types. Copies indices from sources to sinks when a sink is requested
/// so that the indices are shared across data files.
///
/// [DataSourceReader] instances must be registered so that contiguous start
/// offsets can be set on each reader. This allows global offsets to be
/// correctly calculated by the indices. See [UnorderedIndexedSource] for more
/// info.
class SerializationIndices {
final Map<Type, IndexedSource> _indexedSources = {};
final Map<Type, IndexedSink> _indexedSinks = {};
final List<DataSourceReader> _sources = [];
final bool testMode;
SerializationIndices({this.testMode = false});
int registerSource(DataSourceReader source) {
int startOffset;
if (_sources.isEmpty) {
startOffset = 0;
} else {
final lastSource = _sources.last;
startOffset = lastSource.startOffset + lastSource.length;
}
_sources.add(source);
return startOffset;
}
IndexedSource<E> getIndexedSource<E extends Object>() {
final source = (_indexedSources[E] ??= UnorderedIndexedSource<E>(this))
as IndexedSource<E>;
if (testMode) {
/// In test mode we ensure that the values we read out are identical to
/// the values we write in. When copying the elements we turn the local
/// offsets to global offsets so that the source cache will hit.
/// Note: Mapped sinks will not get copied over since the mapped write
/// type will be different from the read type.
final sink = _indexedSinks[E] as IndexedSink<E>?;
sink?.cache.forEach((value, offset) {
source.cache[offset] = value;
});
}
return source;
}
IndexedSink<E> getIndexedSink<E extends Object>({bool identity = false}) {
return _getIndexedSink<E, E>(null, identity: identity);
}
IndexedSink<T> getMappedIndexedSink<E extends Object, T extends Object>(
T Function(E value) f) {
return _getIndexedSink<E, T>(f, identity: false);
}
IndexedSink<T> _getIndexedSink<E extends Object, T extends Object>(
T Function(E value)? f,
{required bool identity}) {
final sink = (_indexedSinks[T] ??=
UnorderedIndexedSink<T>(identity: identity)) as IndexedSink<T>;
final source = _indexedSources[E] as UnorderedIndexedSource<E>?;
source?.cache.forEach((offset, value) {
final key = (f != null ? f(value) : value) as T;
sink.cache[key] = offset;
});
return sink;
}
}
// Real offsets are the offsets into the file the data is written in.
// Local offsets are real offsets with an extra indicator bit set to 1.
// Global offsets are offsets into the address space of all files with an
// extra indicator bit set to 0.
int _realToLocalOffset(int offset) => (offset << 1) | 1;
int _realToGlobalOffset(int offset, DataSourceReader source) =>
(offset + source.startOffset) << 1;
bool _isLocalOffset(int offset) => (offset & 1) == 1;
int _offsetWithoutIndicator(int offset) => offset >> 1;
int _globalToRealOffset(int offset, DataSourceReader source) =>
(offset >> 1) - source.startOffset;
int _localToGlobalOffset(int offset, DataSourceReader source) =>
_realToGlobalOffset(offset >> 1, source);
/// Data sink helper that canonicalizes [E?] values using IDs.
///
/// Writes a unique ID in place of previously visited indexable values. This
@ -23,38 +111,37 @@ abstract class IndexedSink<E> {
/// read. The read and write order do not need to be the same because no matter
/// what occurrence of the ID we encounter, we can always recover the value.
///
/// We increment all written offsets by [_startOffset] in order to distinguish
/// which source file the offset is from on deserialization.
/// We increment all written offsets by an adjustment value in order to
/// distinguish which source file the offset is from on deserialization.
/// See [UnorderedIndexedSource] for more info.
class UnorderedIndexedSink<E> implements IndexedSink<E> {
final DataSinkWriter _sinkWriter;
final Map<E?, int> _cache;
final int _startOffset;
class UnorderedIndexedSink<E extends Object> implements IndexedSink<E> {
final Map<E, int> _cache;
UnorderedIndexedSink(this._sinkWriter,
{Map<E?, int>? cache, int? startOffset, bool identity = false})
: // [cache] slot 1 is pre-allocated to `null`.
this._cache = cache != null
? (identity ? (Map.identity()..addAll(cache)) : cache)
: ((identity ? Map.identity() : {})..[null] = 1),
this._startOffset = startOffset ?? 0;
UnorderedIndexedSink({bool identity = false})
: this._cache = identity ? Map.identity() : {};
@override
Map<E, int> get cache => _cache;
/// Write a reference to [value] to the data sink.
///
/// If [value] has not been canonicalized yet, [writeValue] is called to
/// serialize the [value] itself.
@override
void write(E? value, void writeValue(E value)) {
void write(DataSinkWriter sink, E? value, void writeValue(E value)) {
if (value == null) {
// We reserve 1 as an indicator for `null`.
sink.writeInt(_nullIndicator);
return;
}
final offset = _cache[value];
if (offset == null) {
// We reserve 0 as an indicator that the data is written 'here'.
_sinkWriter.writeInt(0);
final adjustedOffset = _sinkWriter.length + _startOffset;
_sinkWriter.writeInt(adjustedOffset);
_cache[value] = adjustedOffset;
writeValue(value!); // null would have been found in slot 1
sink.writeInt(_dataInPlaceIndicator);
_cache[value] = _realToLocalOffset(sink.length);
writeValue(value);
} else {
_sinkWriter.writeInt(offset);
sink.writeInt(offset + _indicatorOffset);
}
}
}
@ -73,72 +160,77 @@ class UnorderedIndexedSink<E> implements IndexedSink<E> {
/// offset K1 .. K2 --- source S2
/// offset K2 .. K3 --- source S3
///
/// This effectively treats all the file as a contiguous address space with
/// offsets being relative to the start of the first source.
/// This effectively treats all the files as a contiguous address space with
/// offsets being global to the start of the first source.
///
/// Offsets are written in one of two forms. Either as a local offset, an offset
/// relative to the start of the same file, or as a global offset, an offset
/// relative to the start of the concatenated address space of all sources. The
/// two forms are indicated via the lowest bit, the former has that bit set,
/// the latter does not. Local offsets are turned into global offsets when they
/// are written into a later file.
///
/// If an offset is encountered outside the block accessible to current source,
/// [previousSource] provides a pointer to the next source to check (i.e. the
/// previous block in the address space).
class UnorderedIndexedSource<E> implements IndexedSource<E> {
final DataSourceReader _sourceReader;
final Map<int, E?> _cache;
final UnorderedIndexedSource<E>? previousSource;
/// [SerializationIndices] provides pointers to the previous sources to check
/// (i.e. previous blocks in the address space).
class UnorderedIndexedSource<E extends Object> implements IndexedSource<E> {
final Map<int, E> _cache = {};
final SerializationIndices _indices;
UnorderedIndexedSource(this._sourceReader, {this.previousSource})
// [cache] slot 1 is pre-allocated to `null`.
: _cache =
previousSource != null ? {...previousSource._cache} : {1: null};
UnorderedIndexedSource(this._indices);
@override
Map<int, E> get cache => _cache;
/// Reads a reference to an [E?] value from the data source.
///
/// If the value hasn't yet been read, [readValue] is called to deserialize
/// the value itself.
@override
E? read(E readValue()) {
final markerOrOffset = _sourceReader.readInt();
E? read(DataSourceReader source, E readValue()) {
final markerOrOffset = source.readInt();
// We reserve 0 as an indicator that the data is written 'here'.
if (markerOrOffset == 0) {
final offset = _sourceReader.readInt();
if (markerOrOffset == _dataInPlaceIndicator) {
final globalOffset = _realToGlobalOffset(source.currentOffset, source);
// We have to read the value regardless of whether or not it's cached to
// move the reader passed it.
// move the reader past it.
final value = readValue();
final cachedValue = _cache[offset];
final cachedValue = _cache[globalOffset];
if (cachedValue != null) return cachedValue;
_cache[offset] = value;
_cache[globalOffset] = value;
return value;
} else if (markerOrOffset == _nullIndicator) {
return null;
} else {
final offset = markerOrOffset - _indicatorOffset;
bool isLocal = _isLocalOffset(offset);
final globalOffset =
isLocal ? _localToGlobalOffset(offset, source) : offset;
final cachedValue = _cache[globalOffset];
if (cachedValue != null) return cachedValue;
return _readAtOffset(source, readValue, globalOffset, isLocal);
}
if (markerOrOffset == 1) return null;
final cachedValue = _cache[markerOrOffset];
if (cachedValue != null) return cachedValue;
return _readAtOffset(readValue, markerOrOffset);
}
UnorderedIndexedSource<E> _findSource(int offset) {
return offset >= _sourceReader.startOffset
? this
: previousSource!._findSource(offset);
DataSourceReader findSource(int globalOffset) {
final offset = _offsetWithoutIndicator(globalOffset);
final sources = _indices._sources;
for (int i = sources.length - 1; i >= 0; i--) {
final source = sources[i];
if (source.startOffset <= offset) return source;
}
throw StateError('Could not find source for $offset.');
}
E? _readAtOffset(E readValue(), int offset) {
final realSource = _findSource(offset);
var adjustedOffset = offset - realSource._sourceReader.startOffset;
final reader = () {
_sourceReader.readInt();
return readValue();
};
final value = realSource == this
? _sourceReader.readWithOffset(adjustedOffset, reader)
: _sourceReader.readWithSource(realSource._sourceReader,
() => _sourceReader.readWithOffset(adjustedOffset, reader));
_cache[offset] = value;
E _readAtOffset(
DataSourceReader source, E readValue(), int globalOffset, bool isLocal) {
final realSource = isLocal ? source : findSource(globalOffset);
final realOffset = _globalToRealOffset(globalOffset, realSource);
final value = isLocal
? source.readWithOffset(realOffset, readValue)
: source.readWithSource(
realSource, () => source.readWithOffset(realOffset, readValue));
_cache[globalOffset] = value;
return value;
}
@override
Map<T?, int> reshapeCacheAsMap<T>([T Function(E? value)? getValue]) {
return _cache.map((key, value) =>
MapEntry(getValue == null ? value as T? : getValue(value), key));
}
}

View file

@ -76,6 +76,9 @@ class ObjectDataSource implements DataSource {
@override
int get length => _data.length;
@override
int get currentOffset => _index;
@override
String get errorContext {
StringBuffer sb = StringBuffer();

View file

@ -32,6 +32,7 @@ import 'tags.dart';
export 'binary_sink.dart';
export 'binary_source.dart';
export 'indexed_sink_source.dart' show SerializationIndices;
export 'member_data.dart' show ComponentLookup, computeMemberName;
export 'object_sink.dart';
export 'object_source.dart';
@ -58,32 +59,6 @@ class ValueInterner {
}
}
/// Data class representing cache information for a given [T] which can be
/// passed from a [DataSourceReader] to other [DataSourceReader]s and [DataSinkWriter]s.
class DataSourceTypeIndices<E, T> {
Map<E?, int> get cache => _cache ??= source.reshapeCacheAsMap(_getValue);
final E Function(T? value)? _getValue;
Map<E?, int>? _cache;
final IndexedSource<T> source;
/// Uses the cache from the provided [source] and reshapes it if necessary
/// to create a lookup map of cached entities. If [_getValue] is provided,
/// the function will be used to map the cached entities into lookup keys.
DataSourceTypeIndices(this.source, [this._getValue]) {
assert(_getValue != null || T == E);
}
}
/// Data class representing the sum of all cache information for a given
/// [DataSourceReader].
class DataSourceIndices {
final Map<Type, DataSourceTypeIndices> caches = {};
final DataSourceReader? previousSourceReader;
DataSourceIndices(this.previousSourceReader);
}
/// Interface used for looking up locals by index during deserialization.
abstract class LocalLookup {
Local getLocalByIndex(MemberEntity memberContext, int index);

View file

@ -48,7 +48,7 @@ class DataSinkWriter {
/// and deserialization.
final bool useDataKinds;
DataSourceIndices? importedIndices;
final SerializationIndices importedIndices;
/// Visitor used for serializing [ir.DartType]s.
late final DartTypeNodeWriter _dartTypeNodeWriter;
@ -66,9 +66,9 @@ class DataSinkWriter {
late final IndexedSink<ImportEntity> _importIndex;
late final IndexedSink<ConstantValue> _constantIndex;
EntityWriter _entityWriter = const EntityWriter();
final Map<Type, IndexedSink> _generalCaches = {};
EntityWriter _entityWriter = const EntityWriter();
late CodegenWriter _codegenWriter;
final Map<String, int>? tagFrequencyMap;
@ -76,30 +76,16 @@ class DataSinkWriter {
ir.Member? _currentMemberContext;
MemberData? _currentMemberData;
IndexedSink<T> _createSink<T>({bool identity = false}) {
final indices = importedIndices;
if (indices == null)
return UnorderedIndexedSink<T>(this, identity: identity);
final sourceInfo = indices.caches[T];
if (sourceInfo == null) {
return UnorderedIndexedSink<T>(this,
startOffset: indices.previousSourceReader?.endOffset,
identity: identity);
}
return UnorderedIndexedSink<T>(this,
cache: Map.from(sourceInfo.cache),
startOffset: indices.previousSourceReader?.endOffset,
identity: identity);
}
DataSinkWriter(this._sinkWriter, CompilerOptions options,
{this.useDataKinds = false, this.tagFrequencyMap, this.importedIndices}) {
DataSinkWriter(
this._sinkWriter, CompilerOptions options, this.importedIndices,
{this.useDataKinds = false, this.tagFrequencyMap}) {
_dartTypeNodeWriter = DartTypeNodeWriter(this);
_stringIndex = _createSink<String>();
_uriIndex = _createSink<Uri>();
_memberNodeIndex = _createSink<ir.Member>();
_importIndex = _createSink<ImportEntity>();
_constantIndex = _createSink<ConstantValue>();
_stringIndex = importedIndices.getIndexedSink<String>();
_uriIndex = importedIndices.getIndexedSink<Uri>();
_memberNodeIndex = importedIndices
.getMappedIndexedSink<MemberData, ir.Member>((data) => data.node);
_importIndex = importedIndices.getIndexedSink<ImportEntity>();
_constantIndex = importedIndices.getIndexedSink<ConstantValue>();
}
/// The amount of data written to this data sink.
@ -150,9 +136,12 @@ class DataSinkWriter {
/// [identity] is true then the cache is backed by a [Map] created using
/// [Map.identity]. (i.e. comparisons are done using [identical] rather than
/// `==`)
void writeCached<E>(E? value, void f(E value), {bool identity = false}) {
IndexedSink sink = _generalCaches[E] ??= _createSink<E>(identity: identity);
sink.write(value, (v) => f(v));
void writeCached<E extends Object>(E? value, void f(E value),
{bool identity = false}) {
IndexedSink<E> sink = (_generalCaches[E] ??=
importedIndices.getIndexedSink<E>(identity: identity))
as IndexedSink<E>;
sink.write(this, value, f);
}
/// Writes the potentially `null` [value] to this data sink. If [value] is
@ -218,7 +207,7 @@ class DataSinkWriter {
}
void _writeString(String value) {
_stringIndex.write(value, _sinkWriter.writeString);
_stringIndex.write(this, value, _sinkWriter.writeString);
}
/// Writes the potentially `null` string [value] to this data sink.
@ -306,7 +295,7 @@ class DataSinkWriter {
}
void _writeUri(Uri value) {
_uriIndex.write(value, _doWriteUri);
_uriIndex.write(this, value, _doWriteUri);
}
void _doWriteUri(Uri value) {
@ -364,7 +353,7 @@ class DataSinkWriter {
}
void _writeMemberNode(ir.Member value) {
_memberNodeIndex.write(value, _writeMemberNodeInternal);
_memberNodeIndex.write(this, value, _writeMemberNodeInternal);
}
void _writeMemberNodeInternal(ir.Member value) {
@ -958,7 +947,7 @@ class DataSinkWriter {
}
void _writeConstant(ConstantValue value) {
_constantIndex.write(value, _writeConstantInternal);
_constantIndex.write(this, value, _writeConstantInternal);
}
void _writeConstantInternal(ConstantValue value) {
@ -1137,7 +1126,7 @@ class DataSinkWriter {
}
void _writeImport(ImportEntity value) {
_importIndex.write(value, _writeImportInternal);
_importIndex.write(this, value, _writeImportInternal);
}
void _writeImportInternal(ImportEntity value) {

View file

@ -38,6 +38,9 @@ abstract class DataSource {
/// The length of the underlying data source.
int get length;
/// The current offset being read from.
int get currentOffset;
/// Returns a string representation of the current state of the data source
/// useful for debugging in consistencies between serialization and
/// deserialization.
@ -60,7 +63,7 @@ class DataSourceReader {
final bool useDeferredStrategy;
final bool useDataKinds;
final ValueInterner? interner;
DataSourceIndices? importedIndices;
final SerializationIndices importedIndices;
EntityReader _entityReader = const EntityReader();
ComponentLookup? _componentLookup;
EntityLookup? _entityLookup;
@ -79,6 +82,7 @@ class DataSourceReader {
ir.Member? _currentMemberContext;
MemberData? _currentMemberData;
int get currentOffset => _sourceReader.currentOffset;
int get length => _sourceReader.length;
/// Defines the beginning of this block in the address space created by all
@ -88,76 +92,19 @@ class DataSourceReader {
/// shifted. That is the length of all the sources read before this one.
///
/// See [UnorderedIndexedSource] for more info.
int get startOffset => importedIndices?.previousSourceReader?.endOffset ?? 0;
late int startOffset;
/// Defines the end of this block in the address space created by all
/// instances of [DataSourceReader].
///
/// Indexed values read from this source will all have offsets less than this
/// value.
///
/// See [UnorderedIndexedSource] for more info.
final int endOffset;
UnorderedIndexedSource<T>? _getPreviousUncreatedSource<T>() {
final previousSourceReader = importedIndices?.previousSourceReader;
if (previousSourceReader == null) return null;
return UnorderedIndexedSource<T>(previousSourceReader,
previousSource: previousSourceReader._getPreviousUncreatedSource<T>());
}
IndexedSource<T> _createSource<T>() {
final indices = importedIndices;
if (indices != null) {
if (indices.caches.containsKey(T)) {
final index = indices.caches.remove(T);
return UnorderedIndexedSource<T>(this,
previousSource: index!.source as UnorderedIndexedSource<T>);
}
final newPreviousSource = _getPreviousUncreatedSource<T>();
if (newPreviousSource != null) {
return UnorderedIndexedSource<T>(this,
previousSource: newPreviousSource);
}
}
return UnorderedIndexedSource<T>(this);
}
DataSourceReader(this._sourceReader, CompilerOptions options,
DataSourceReader(
this._sourceReader, CompilerOptions options, this.importedIndices,
{this.useDataKinds = false,
DataSourceIndices? importedIndices,
this.interner,
this.useDeferredStrategy = false})
: this.importedIndices = importedIndices == null
? null
: (DataSourceIndices(importedIndices.previousSourceReader)
..caches.addAll(importedIndices.caches)),
endOffset = (importedIndices?.previousSourceReader?.endOffset ?? 0) +
_sourceReader.length {
_stringIndex = _createSource<String>();
_uriIndex = _createSource<Uri>();
_importIndex = _createSource<ImportEntity>();
_memberNodeIndex = _createSource<MemberData>();
_constantIndex = _createSource<ConstantValue>();
}
/// Exports [DataSourceIndices] for use in other [DataSourceReader]s and
/// [DataSinkWriter]s.
DataSourceIndices exportIndices() {
final indices = DataSourceIndices(this);
indices.caches[String] = DataSourceTypeIndices(_stringIndex);
indices.caches[Uri] = DataSourceTypeIndices(_uriIndex);
indices.caches[ImportEntity] = DataSourceTypeIndices(_importIndex);
// _memberNodeIndex needs two entries depending on if the indices will be
// consumed by a [DataSource] or [DataSink].
indices.caches[MemberData] = DataSourceTypeIndices(_memberNodeIndex);
indices.caches[ir.Member] = DataSourceTypeIndices<ir.Member?, MemberData>(
_memberNodeIndex, (MemberData? data) => data?.node);
indices.caches[ConstantValue] = DataSourceTypeIndices(_constantIndex);
_generalCaches.forEach((type, indexedSource) {
indices.caches[type] = DataSourceTypeIndices(indexedSource);
});
return indices;
this.useDeferredStrategy = false}) {
startOffset = importedIndices.registerSource(this);
_stringIndex = importedIndices.getIndexedSource<String>();
_uriIndex = importedIndices.getIndexedSource<Uri>();
_importIndex = importedIndices.getIndexedSource<ImportEntity>();
_memberNodeIndex = importedIndices.getIndexedSource<MemberData>();
_constantIndex = importedIndices.getIndexedSource<ConstantValue>();
}
/// Registers that the section [tag] starts.
@ -237,12 +184,14 @@ class DataSourceReader {
final lastLocalLookup = _localLookup;
final lastComponentLookup = _componentLookup;
final lastCodegenReader = _codegenReader;
final lastStartOffset = startOffset;
_sourceReader = source._sourceReader;
_entityReader = source._entityReader;
_entityLookup = source._entityLookup;
_localLookup = source._localLookup;
_componentLookup = source._componentLookup;
_codegenReader = source._codegenReader;
startOffset = source.startOffset;
final value = f();
_sourceReader = lastSource;
_entityReader = lastEntityReader;
@ -250,6 +199,7 @@ class DataSourceReader {
_localLookup = lastLocalLookup;
_componentLookup = lastComponentLookup;
_codegenReader = lastCodegenReader;
startOffset = lastStartOffset;
return value;
}
@ -297,7 +247,7 @@ class DataSourceReader {
/// Reads a reference to an [E] value from this data source. If the value has
/// not yet been deserialized, [f] is called to deserialize the value itself.
E readCached<E>(E f()) {
E readCached<E extends Object>(E f()) {
E? value = readCachedOrNull(f);
if (value == null) throw StateError("Unexpected 'null' for $E");
return value;
@ -305,10 +255,10 @@ class DataSourceReader {
/// Reads a reference to an [E] value from this data source. If the value has
/// not yet been deserialized, [f] is called to deserialize the value itself.
E? readCachedOrNull<E>(E f()) {
IndexedSource<E> source =
(_generalCaches[E] ??= _createSource<E>()) as IndexedSource<E>;
return source.read(f);
E? readCachedOrNull<E extends Object>(E f()) {
IndexedSource<E> source = (_generalCaches[E] ??=
importedIndices.getIndexedSource<E>()) as IndexedSource<E>;
return source.read(this, f);
}
/// Reads a potentially `null` [E] value from this data source, calling [f] to
@ -386,7 +336,9 @@ class DataSourceReader {
}
String _readString() {
return _stringIndex.read(() => _sourceReader.readString())!;
// Cannot use a tear-off for `_sourceReader.readString` because the data
// source may be different at the time of reading.
return _stringIndex.read(this, () => _sourceReader.readString())!;
}
/// Reads a potentially `null` string value from this data source.
@ -473,7 +425,7 @@ class DataSourceReader {
}
Uri _readUri() {
return _uriIndex.read(_doReadUri)!;
return _uriIndex.read(this, _doReadUri)!;
}
Uri _doReadUri() {
@ -535,7 +487,7 @@ class DataSourceReader {
}
MemberData _readMemberData() {
return _memberNodeIndex.read(_readMemberDataInternal)!;
return _memberNodeIndex.read(this, _readMemberDataInternal)!;
}
MemberData _readMemberDataInternal() {
@ -1269,7 +1221,7 @@ class DataSourceReader {
}
ConstantValue _readConstant() {
return _constantIndex.read(_readConstantInternal)!;
return _constantIndex.read(this, _readConstantInternal)!;
}
ConstantValue _readConstantInternal() {
@ -1438,7 +1390,7 @@ class DataSourceReader {
/// Reads a import from this data source.
ImportEntity _readImport() {
return _importIndex.read(_readImportInternal)!;
return _importIndex.read(this, _readImportInternal)!;
}
ImportEntity _readImportInternal() {

View file

@ -29,8 +29,10 @@ abstract class SerializationStrategy<T> {
return serializeComponent(component);
}
List<T> serializeGlobalTypeInferenceResults(DataSourceIndices? indices,
GlobalTypeInferenceResults results, CompilerOptions options);
List<T> serializeGlobalTypeInferenceResults(
GlobalTypeInferenceResults results,
CompilerOptions options,
SerializationIndices indices);
List<int> serializeComponent(ir.Component component) {
return ir.serializeComponent(component);
@ -42,27 +44,27 @@ abstract class SerializationStrategy<T> {
return component;
}
DataAndIndices<GlobalTypeInferenceResults>
deserializeGlobalTypeInferenceResults(
CompilerOptions options,
DiagnosticReporter reporter,
Environment environment,
AbstractValueStrategy abstractValueStrategy,
ir.Component component,
JClosedWorld closedWorld,
DataSourceIndices? indices,
List<T> globalTypeInferenceResultsData);
List<T> serializeClosedWorld(
JClosedWorld closedWorld, CompilerOptions options);
DataAndIndices<JClosedWorld> deserializeClosedWorld(
GlobalTypeInferenceResults deserializeGlobalTypeInferenceResults(
CompilerOptions options,
DiagnosticReporter reporter,
Environment environment,
AbstractValueStrategy abstractValueStrategy,
ir.Component component,
List<T> data);
JClosedWorld closedWorld,
List<T> globalTypeInferenceResultsData,
SerializationIndices indices);
List<T> serializeClosedWorld(JClosedWorld closedWorld,
CompilerOptions options, SerializationIndices indices);
JClosedWorld deserializeClosedWorld(
CompilerOptions options,
DiagnosticReporter reporter,
Environment environment,
AbstractValueStrategy abstractValueStrategy,
ir.Component component,
List<T> data,
SerializationIndices indices);
}
class BytesInMemorySerializationStrategy extends SerializationStrategy<int> {
@ -71,29 +73,31 @@ class BytesInMemorySerializationStrategy extends SerializationStrategy<int> {
const BytesInMemorySerializationStrategy({this.useDataKinds = false});
@override
List<int> serializeGlobalTypeInferenceResults(DataSourceIndices? indices,
GlobalTypeInferenceResults results, CompilerOptions options) {
List<int> serializeGlobalTypeInferenceResults(
GlobalTypeInferenceResults results,
CompilerOptions options,
SerializationIndices indices) {
ByteSink byteSink = ByteSink();
DataSinkWriter sink = DataSinkWriter(BinaryDataSink(byteSink), options,
useDataKinds: useDataKinds, importedIndices: indices);
DataSinkWriter sink = DataSinkWriter(
BinaryDataSink(byteSink), options, indices,
useDataKinds: useDataKinds);
serializeGlobalTypeInferenceResultsToSink(results, sink);
return byteSink.builder.takeBytes();
}
@override
DataAndIndices<GlobalTypeInferenceResults>
deserializeGlobalTypeInferenceResults(
CompilerOptions options,
DiagnosticReporter reporter,
Environment environment,
AbstractValueStrategy abstractValueStrategy,
ir.Component component,
JClosedWorld closedWorld,
DataSourceIndices? indices,
List<int> globalTypeInferenceResultsData) {
GlobalTypeInferenceResults deserializeGlobalTypeInferenceResults(
CompilerOptions options,
DiagnosticReporter reporter,
Environment environment,
AbstractValueStrategy abstractValueStrategy,
ir.Component component,
JClosedWorld closedWorld,
List<int> globalTypeInferenceResultsData,
SerializationIndices indices) {
DataSourceReader globalTypeInferenceResultsSource = DataSourceReader(
BinaryDataSource(globalTypeInferenceResultsData), options,
useDataKinds: useDataKinds, importedIndices: indices);
BinaryDataSource(globalTypeInferenceResultsData), options, indices,
useDataKinds: useDataKinds);
final results = deserializeGlobalTypeInferenceResultsFromSource(
options,
reporter,
@ -102,33 +106,35 @@ class BytesInMemorySerializationStrategy extends SerializationStrategy<int> {
component,
closedWorld,
globalTypeInferenceResultsSource);
return DataAndIndices(
results, globalTypeInferenceResultsSource.exportIndices());
return results;
}
@override
List<int> serializeClosedWorld(
JClosedWorld closedWorld, CompilerOptions options) {
List<int> serializeClosedWorld(JClosedWorld closedWorld,
CompilerOptions options, SerializationIndices indices) {
ByteSink byteSink = ByteSink();
DataSinkWriter sink = DataSinkWriter(BinaryDataSink(byteSink), options,
DataSinkWriter sink = DataSinkWriter(
BinaryDataSink(byteSink), options, indices,
useDataKinds: useDataKinds);
serializeClosedWorldToSink(closedWorld, sink);
return byteSink.builder.takeBytes();
}
@override
DataAndIndices<JClosedWorld> deserializeClosedWorld(
JClosedWorld deserializeClosedWorld(
CompilerOptions options,
DiagnosticReporter reporter,
Environment environment,
AbstractValueStrategy abstractValueStrategy,
ir.Component component,
List<int> data) {
DataSourceReader source = DataSourceReader(BinaryDataSource(data), options,
List<int> data,
SerializationIndices indices) {
DataSourceReader source = DataSourceReader(
BinaryDataSource(data), options, indices,
useDataKinds: useDataKinds);
var closedWorld = deserializeClosedWorldFromSource(options, reporter,
environment, abstractValueStrategy, component, source);
return DataAndIndices<JClosedWorld>(closedWorld, source.exportIndices());
return closedWorld;
}
}
@ -138,66 +144,70 @@ class BytesOnDiskSerializationStrategy extends SerializationStrategy<int> {
const BytesOnDiskSerializationStrategy({this.useDataKinds = false});
@override
List<int> serializeGlobalTypeInferenceResults(DataSourceIndices? indices,
GlobalTypeInferenceResults results, CompilerOptions options) {
List<int> serializeGlobalTypeInferenceResults(
GlobalTypeInferenceResults results,
CompilerOptions options,
SerializationIndices indices) {
Uri uri = Uri.base.resolve('world.data');
DataSinkWriter sink = DataSinkWriter(
BinaryDataSink(RandomAccessBinaryOutputSink(uri)), options,
useDataKinds: useDataKinds, importedIndices: indices);
BinaryDataSink(RandomAccessBinaryOutputSink(uri)), options, indices,
useDataKinds: useDataKinds);
serializeGlobalTypeInferenceResultsToSink(results, sink);
return File.fromUri(uri).readAsBytesSync();
}
@override
DataAndIndices<GlobalTypeInferenceResults>
deserializeGlobalTypeInferenceResults(
CompilerOptions options,
DiagnosticReporter reporter,
Environment environment,
AbstractValueStrategy abstractValueStrategy,
ir.Component component,
JClosedWorld closedWorld,
DataSourceIndices? indices,
List<int> globalTypeInferenceResultsData) {
GlobalTypeInferenceResults deserializeGlobalTypeInferenceResults(
CompilerOptions options,
DiagnosticReporter reporter,
Environment environment,
AbstractValueStrategy abstractValueStrategy,
ir.Component component,
JClosedWorld closedWorld,
List<int> globalTypeInferenceResultsData,
SerializationIndices indices) {
DataSourceReader globalTypeInferenceResultsSource = DataSourceReader(
BinaryDataSource(globalTypeInferenceResultsData), options,
useDataKinds: useDataKinds, importedIndices: indices);
return DataAndIndices(
deserializeGlobalTypeInferenceResultsFromSource(
options,
reporter,
environment,
abstractValueStrategy,
component,
closedWorld,
globalTypeInferenceResultsSource),
globalTypeInferenceResultsSource.exportIndices());
BinaryDataSource(globalTypeInferenceResultsData),
options,
indices,
useDataKinds: useDataKinds,
);
return deserializeGlobalTypeInferenceResultsFromSource(
options,
reporter,
environment,
abstractValueStrategy,
component,
closedWorld,
globalTypeInferenceResultsSource);
}
@override
List<int> serializeClosedWorld(
JClosedWorld closedWorld, CompilerOptions options) {
List<int> serializeClosedWorld(JClosedWorld closedWorld,
CompilerOptions options, SerializationIndices indices) {
Uri uri = Uri.base.resolve('closed_world.data');
DataSinkWriter sink = DataSinkWriter(
BinaryDataSink(RandomAccessBinaryOutputSink(uri)), options,
BinaryDataSink(RandomAccessBinaryOutputSink(uri)), options, indices,
useDataKinds: useDataKinds);
serializeClosedWorldToSink(closedWorld, sink);
return File.fromUri(uri).readAsBytesSync();
}
@override
DataAndIndices<JClosedWorld> deserializeClosedWorld(
JClosedWorld deserializeClosedWorld(
CompilerOptions options,
DiagnosticReporter reporter,
Environment environment,
AbstractValueStrategy abstractValueStrategy,
ir.Component component,
List<int> data) {
DataSourceReader source = DataSourceReader(BinaryDataSource(data), options,
List<int> data,
SerializationIndices indices) {
DataSourceReader source = DataSourceReader(
BinaryDataSource(data), options, indices,
useDataKinds: useDataKinds);
var closedWorld = deserializeClosedWorldFromSource(options, reporter,
environment, abstractValueStrategy, component, source);
return DataAndIndices<JClosedWorld>(closedWorld, source.exportIndices());
return closedWorld;
}
}
@ -208,63 +218,64 @@ class ObjectsInMemorySerializationStrategy
const ObjectsInMemorySerializationStrategy({this.useDataKinds = true});
@override
List<Object> serializeGlobalTypeInferenceResults(DataSourceIndices? indices,
GlobalTypeInferenceResults results, CompilerOptions options) {
List<Object> serializeGlobalTypeInferenceResults(
GlobalTypeInferenceResults results,
CompilerOptions options,
SerializationIndices indices) {
List<Object> data = [];
DataSinkWriter sink = DataSinkWriter(ObjectDataSink(data), options,
useDataKinds: useDataKinds, importedIndices: indices);
DataSinkWriter sink = DataSinkWriter(ObjectDataSink(data), options, indices,
useDataKinds: useDataKinds);
serializeGlobalTypeInferenceResultsToSink(results, sink);
return data;
}
@override
DataAndIndices<GlobalTypeInferenceResults>
deserializeGlobalTypeInferenceResults(
CompilerOptions options,
DiagnosticReporter reporter,
Environment environment,
AbstractValueStrategy abstractValueStrategy,
ir.Component component,
JClosedWorld closedWorld,
DataSourceIndices? indices,
List<Object> globalTypeInferenceResultsData) {
GlobalTypeInferenceResults deserializeGlobalTypeInferenceResults(
CompilerOptions options,
DiagnosticReporter reporter,
Environment environment,
AbstractValueStrategy abstractValueStrategy,
ir.Component component,
JClosedWorld closedWorld,
List<Object> globalTypeInferenceResultsData,
SerializationIndices indices) {
DataSourceReader globalTypeInferenceResultsSource = DataSourceReader(
ObjectDataSource(globalTypeInferenceResultsData), options,
ObjectDataSource(globalTypeInferenceResultsData), options, indices,
useDataKinds: useDataKinds);
return DataAndIndices(
deserializeGlobalTypeInferenceResultsFromSource(
options,
reporter,
environment,
abstractValueStrategy,
component,
closedWorld,
globalTypeInferenceResultsSource),
globalTypeInferenceResultsSource.exportIndices());
return deserializeGlobalTypeInferenceResultsFromSource(
options,
reporter,
environment,
abstractValueStrategy,
component,
closedWorld,
globalTypeInferenceResultsSource);
}
@override
List<Object> serializeClosedWorld(
JClosedWorld closedWorld, CompilerOptions options) {
List<Object> serializeClosedWorld(JClosedWorld closedWorld,
CompilerOptions options, SerializationIndices indices) {
List<Object> data = [];
DataSinkWriter sink = DataSinkWriter(ObjectDataSink(data), options,
DataSinkWriter sink = DataSinkWriter(ObjectDataSink(data), options, indices,
useDataKinds: useDataKinds);
serializeClosedWorldToSink(closedWorld, sink);
return data;
}
@override
DataAndIndices<JClosedWorld> deserializeClosedWorld(
JClosedWorld deserializeClosedWorld(
CompilerOptions options,
DiagnosticReporter reporter,
Environment environment,
AbstractValueStrategy abstractValueStrategy,
ir.Component component,
List<Object> data) {
DataSourceReader source = DataSourceReader(ObjectDataSource(data), options,
List<Object> data,
SerializationIndices indices) {
DataSourceReader source = DataSourceReader(
ObjectDataSource(data), options, indices,
useDataKinds: useDataKinds);
var closedWorld = deserializeClosedWorldFromSource(options, reporter,
environment, abstractValueStrategy, component, source);
return DataAndIndices<JClosedWorld>(closedWorld, source.exportIndices());
return closedWorld;
}
}

View file

@ -39,14 +39,6 @@ class _StringInterner implements ir.StringInterner, StringInterner {
}
}
/// A data class holding some data [T] and the associated [DataSourceIndices].
class DataAndIndices<T> {
final T? data;
final DataSourceIndices? indices;
DataAndIndices(this.data, this.indices);
}
class SerializationTask extends CompilerTask {
final CompilerOptions _options;
final DiagnosticReporter _reporter;
@ -138,8 +130,10 @@ class SerializationTask extends CompilerTask {
_reporter.log('Writing data to $outputUri');
api.BinaryOutputSink dataOutput =
_outputProvider.createBinarySink(outputUri);
DataSinkWriter sink =
DataSinkWriter(BinaryDataSink(dataOutput), _options);
// Use empty indices since module data is ephemeral, later phases should
// not depend on data indexed in this file.
DataSinkWriter sink = DataSinkWriter(
BinaryDataSink(dataOutput), _options, SerializationIndices());
data.toDataSink(sink);
sink.close();
});
@ -156,13 +150,17 @@ class SerializationTask extends CompilerTask {
// DataSource source = ObjectSource(encoding, useDataKinds: true);
// source.registerComponentLookup(new ComponentLookup(component));
// ModuleData.fromDataSource(source);
// Use empty indices since module data is ephemeral, later phases should
// not depend on data indexed in this file.
BytesSink bytes = BytesSink();
DataSinkWriter binarySink =
DataSinkWriter(BinaryDataSink(bytes), _options, useDataKinds: true);
DataSinkWriter binarySink = DataSinkWriter(
BinaryDataSink(bytes), _options, SerializationIndices(),
useDataKinds: true);
data.toDataSink(binarySink);
binarySink.close();
var source = DataSourceReader(
BinaryDataSource(bytes.builder.toBytes()), _options,
var source = DataSourceReader(BinaryDataSource(bytes.builder.toBytes()),
_options, SerializationIndices(),
useDataKinds: true, interner: _valueInterner);
source.registerComponentLookup(ComponentLookup(component));
ModuleData.fromDataSource(source);
@ -176,8 +174,10 @@ class SerializationTask extends CompilerTask {
for (Uri uri in _options.modularAnalysisInputs!) {
final dataInput =
await _provider.readFromUri(uri, inputKind: api.InputKind.binary);
// Use empty indices since module data is ephemeral, later phases should
// not depend on data indexed in this file.
DataSourceReader source = DataSourceReader(
BinaryDataSource(dataInput.data), _options,
BinaryDataSource(dataInput.data), _options, SerializationIndices(),
interner: _valueInterner);
source.registerComponentLookup(ComponentLookup(component));
results.readMoreFromDataSource(source);
@ -186,7 +186,8 @@ class SerializationTask extends CompilerTask {
});
}
void serializeClosedWorld(JClosedWorld closedWorld) {
void serializeClosedWorld(
JClosedWorld closedWorld, SerializationIndices indices) {
measureSubtask('serialize closed world', () {
final outputUri =
_options.dataOutputUriForStage(Dart2JSStage.closedWorld);
@ -194,16 +195,17 @@ class SerializationTask extends CompilerTask {
api.BinaryOutputSink dataOutput =
_outputProvider.createBinarySink(outputUri);
DataSinkWriter sink =
DataSinkWriter(BinaryDataSink(dataOutput), _options);
DataSinkWriter(BinaryDataSink(dataOutput), _options, indices);
serializeClosedWorldToSink(closedWorld, sink);
});
}
Future<DataAndIndices<JClosedWorld>> deserializeClosedWorld(
Future<JClosedWorld> deserializeClosedWorld(
Environment environment,
AbstractValueStrategy abstractValueStrategy,
ir.Component component,
bool useDeferredSourceReads) async {
bool useDeferredSourceReads,
SerializationIndices indices) async {
return await measureIoSubtask('deserialize closed world', () async {
final uri = _options.dataInputUriForStage(Dart2JSStage.closedWorld);
_reporter.log('Reading data from $uri');
@ -212,35 +214,36 @@ class SerializationTask extends CompilerTask {
DataSourceReader source = DataSourceReader(
BinaryDataSource(dataInput.data, stringInterner: _stringInterner),
_options,
indices,
interner: _valueInterner,
useDeferredStrategy: useDeferredSourceReads);
var closedWorld = deserializeClosedWorldFromSource(_options, _reporter,
environment, abstractValueStrategy, component, source);
return DataAndIndices(closedWorld, source.exportIndices());
return closedWorld;
});
}
void serializeGlobalTypeInference(
GlobalTypeInferenceResults results, DataSourceIndices indices) {
GlobalTypeInferenceResults results, SerializationIndices indices) {
measureSubtask('serialize data', () {
final outputUri =
_options.dataOutputUriForStage(Dart2JSStage.globalInference);
_reporter.log('Writing data to $outputUri');
api.BinaryOutputSink dataOutput =
_outputProvider.createBinarySink(outputUri);
DataSinkWriter sink = DataSinkWriter(BinaryDataSink(dataOutput), _options,
importedIndices: indices);
DataSinkWriter sink =
DataSinkWriter(BinaryDataSink(dataOutput), _options, indices);
serializeGlobalTypeInferenceResultsToSink(results, sink);
});
}
Future<DataAndIndices<GlobalTypeInferenceResults>>
deserializeGlobalTypeInferenceResults(
Environment environment,
AbstractValueStrategy abstractValueStrategy,
ir.Component component,
DataAndIndices<JClosedWorld> closedWorldAndIndices,
bool useDeferredSourceReads) async {
Future<GlobalTypeInferenceResults> deserializeGlobalTypeInferenceResults(
Environment environment,
AbstractValueStrategy abstractValueStrategy,
ir.Component component,
JClosedWorld closedWorld,
bool useDeferredSourceReads,
SerializationIndices indices) async {
return await measureIoSubtask('deserialize data', () async {
final uri = _options.dataInputUriForStage(Dart2JSStage.globalInference);
_reporter.log('Reading data from $uri');
@ -249,24 +252,22 @@ class SerializationTask extends CompilerTask {
DataSourceReader source = DataSourceReader(
BinaryDataSource(dataInput.data, stringInterner: _stringInterner),
_options,
indices,
interner: _valueInterner,
importedIndices: closedWorldAndIndices.indices,
useDeferredStrategy: useDeferredSourceReads);
return DataAndIndices(
deserializeGlobalTypeInferenceResultsFromSource(
_options,
_reporter,
environment,
abstractValueStrategy,
component,
closedWorldAndIndices.data!,
source),
source.exportIndices());
return deserializeGlobalTypeInferenceResultsFromSource(
_options,
_reporter,
environment,
abstractValueStrategy,
component,
closedWorld,
source);
});
}
void serializeCodegen(JsBackendStrategy backendStrategy,
CodegenResults codegenResults, DataSourceIndices indices) {
CodegenResults codegenResults, SerializationIndices indices) {
GlobalTypeInferenceResults globalTypeInferenceResults =
codegenResults.globalTypeInferenceResults;
JClosedWorld closedWorld = globalTypeInferenceResults.closedWorld;
@ -287,8 +288,8 @@ class SerializationTask extends CompilerTask {
_options.dataOutputUriForStage(Dart2JSStage.codegenSharded);
Uri uri = Uri.parse('$outputUri$shard');
api.BinaryOutputSink dataOutput = _outputProvider.createBinarySink(uri);
DataSinkWriter sink = DataSinkWriter(BinaryDataSink(dataOutput), _options,
importedIndices: indices);
DataSinkWriter sink =
DataSinkWriter(BinaryDataSink(dataOutput), _options, indices);
_reporter.log('Writing data to ${uri}');
sink.registerEntityWriter(entityWriter);
sink.writeMemberMap(results, (MemberEntity member, CodegenResult result) {
@ -304,9 +305,9 @@ class SerializationTask extends CompilerTask {
JsBackendStrategy backendStrategy,
GlobalTypeInferenceResults globalTypeInferenceResults,
CodegenInputs codegenInputs,
DataSourceIndices indices,
bool useDeferredSourceReads,
SourceLookup sourceLookup) async {
SourceLookup sourceLookup,
SerializationIndices indices) async {
int shards = _options.codegenShards!;
JClosedWorld closedWorld = globalTypeInferenceResults.closedWorld;
Map<MemberEntity, Deferrable<CodegenResult>> results = {};
@ -320,7 +321,7 @@ class SerializationTask extends CompilerTask {
// TODO(36983): This code is extracted because there appeared to be a
// memory leak for large buffer held by `source`.
_deserializeCodegenInput(backendStrategy, closedWorld, uri, dataInput,
indices, results, useDeferredSourceReads, sourceLookup);
results, useDeferredSourceReads, sourceLookup, indices);
dataInput.release();
});
}
@ -341,15 +342,15 @@ class SerializationTask extends CompilerTask {
JClosedWorld closedWorld,
Uri uri,
api.Input<List<int>> dataInput,
DataSourceIndices importedIndices,
Map<MemberEntity, Deferrable<CodegenResult>> results,
bool useDeferredSourceReads,
SourceLookup sourceLookup) {
SourceLookup sourceLookup,
SerializationIndices indices) {
DataSourceReader source = DataSourceReader(
BinaryDataSource(dataInput.data, stringInterner: _stringInterner),
_options,
indices,
interner: _valueInterner,
importedIndices: importedIndices,
useDeferredStrategy: useDeferredSourceReads);
backendStrategy.prepareCodegenReader(source);
source.registerSourceLookup(sourceLookup);
@ -366,12 +367,11 @@ class SerializationTask extends CompilerTask {
JsBackendStrategy backendStrategy,
DumpInfoProgramData dumpInfoProgramData,
JClosedWorld closedWorld,
DataSourceIndices? importedIndices) {
SerializationIndices indices) {
final outputUri = _options.dumpInfoWriteUri!;
api.BinaryOutputSink dataOutput =
_outputProvider.createBinarySink(outputUri);
final sink = DataSinkWriter(BinaryDataSink(dataOutput), _options,
importedIndices: importedIndices);
final sink = DataSinkWriter(BinaryDataSink(dataOutput), _options, indices);
EntityWriter entityWriter = backendStrategy.forEachCodegenMember((_) {});
sink.registerEntityWriter(entityWriter);
sink.registerCodegenWriter(
@ -383,14 +383,14 @@ class SerializationTask extends CompilerTask {
Future<DumpInfoProgramData> deserializeDumpInfoProgramData(
JsBackendStrategy backendStrategy,
JClosedWorld closedWorld,
DataSourceIndices? importedIndices) async {
SerializationIndices indices) async {
final inputUri = _options.dumpInfoReadUri!;
final dataInput =
await _provider.readFromUri(inputUri, inputKind: api.InputKind.binary);
final source = DataSourceReader(
BinaryDataSource(dataInput.data, stringInterner: _stringInterner),
_options,
importedIndices: importedIndices);
indices);
backendStrategy.prepareCodegenReader(source);
source.registerCodegenReader(CodegenReaderImpl(closedWorld));
return DumpInfoProgramData.readFromDataSource(source, closedWorld,

View file

@ -14,7 +14,6 @@ import 'package:compiler/src/js_model/js_world.dart';
import 'package:compiler/src/inferrer/types.dart';
import 'package:compiler/src/serialization/serialization.dart';
import 'package:compiler/src/serialization/strategies.dart';
import 'package:compiler/src/serialization/task.dart';
import 'package:expect/expect.dart';
import 'package:kernel/ast.dart' as ir;
import 'package:compiler/src/util/memory_compiler.dart';
@ -52,18 +51,15 @@ Future<void> finishCompileAndCompare(
bool stoppedAfterTypeInference = false}) async {
if (stoppedAfterClosedWorld) {
JClosedWorld closedWorld = compiler.backendClosedWorldForTesting!;
var newClosedWorldAndIndices =
cloneClosedWorld(compiler, closedWorld, strategy);
compiler.performGlobalTypeInference(newClosedWorldAndIndices.data!);
var newClosedWorld = cloneClosedWorld(compiler, closedWorld, strategy);
compiler.performGlobalTypeInference(newClosedWorld);
}
if (stoppedAfterClosedWorld || stoppedAfterTypeInference) {
GlobalTypeInferenceResults globalInferenceResults =
compiler.globalInference.resultsForTesting!;
var indices = compiler.closedWorldIndicesForTesting;
GlobalTypeInferenceResults newGlobalInferenceResults =
cloneInferenceResults(
indices, compiler, globalInferenceResults, strategy);
cloneInferenceResults(compiler, globalInferenceResults, strategy);
await generateJavaScriptCode(compiler, newGlobalInferenceResults);
}
var actualOutput = actualOutputCollector.clear();
@ -269,26 +265,29 @@ void checkData(List<int> data, List<int> newData) {
Expect.listEquals(data, newData);
}
DataAndIndices<JClosedWorld> cloneClosedWorld(Compiler compiler,
JClosedWorld closedWorld, SerializationStrategy strategy) {
JClosedWorld cloneClosedWorld(Compiler compiler, JClosedWorld closedWorld,
SerializationStrategy strategy) {
SerializationIndices indices = SerializationIndices();
ir.Component component = closedWorld.elementMap.programEnv.mainComponent;
List<int> irData = strategy.serializeComponent(component);
final closedWorldData =
strategy.serializeClosedWorld(closedWorld, compiler.options) as List<int>;
final closedWorldData = strategy.serializeClosedWorld(
closedWorld, compiler.options, indices) as List<int>;
print('data size: ${closedWorldData.length}');
ir.Component newComponent = strategy.deserializeComponent(irData);
var newClosedWorldAndIndices = strategy.deserializeClosedWorld(
var newClosedWorld = strategy.deserializeClosedWorld(
compiler.options,
compiler.reporter,
compiler.environment,
compiler.abstractValueStrategy,
newComponent,
closedWorldData);
closedWorldData,
indices);
indices = SerializationIndices();
final newClosedWorldData = strategy.serializeClosedWorld(
newClosedWorldAndIndices.data!, compiler.options) as List<int>;
newClosedWorld, compiler.options, indices) as List<int>;
checkData(closedWorldData, newClosedWorldData);
return newClosedWorldAndIndices;
return newClosedWorld;
}
/// Tests that cloned inference results serialize to the same data.
@ -297,53 +296,51 @@ DataAndIndices<JClosedWorld> cloneClosedWorld(Compiler compiler,
/// round normalizes the data as some information might be dropped in the
/// serialization/deserialization process. The second and third rounds are
/// compared for consistency.
GlobalTypeInferenceResults cloneInferenceResults(
DataSourceIndices? indices,
Compiler compiler,
GlobalTypeInferenceResults results,
SerializationStrategy strategy) {
GlobalTypeInferenceResults cloneInferenceResults(Compiler compiler,
GlobalTypeInferenceResults results, SerializationStrategy strategy) {
SerializationIndices indices = SerializationIndices();
List<int> irData = strategy.unpackAndSerializeComponent(results);
final closedWorldData = strategy.serializeClosedWorld(
results.closedWorld, compiler.options) as List<int>;
results.closedWorld, compiler.options, indices) as List<int>;
indices = SerializationIndices();
final worldData = strategy.serializeGlobalTypeInferenceResults(
indices, results, compiler.options) as List<int>;
results, compiler.options, indices) as List<int>;
print('data size: ${worldData.length}');
ir.Component newComponent = strategy.deserializeComponent(irData);
var newClosedWorldAndIndices = strategy.deserializeClosedWorld(
var newClosedWorld = strategy.deserializeClosedWorld(
compiler.options,
compiler.reporter,
compiler.environment,
compiler.abstractValueStrategy,
newComponent,
closedWorldData);
var newIndices = indices == null ? null : newClosedWorldAndIndices.indices;
GlobalTypeInferenceResults initialResults = strategy
.deserializeGlobalTypeInferenceResults(
closedWorldData,
indices);
GlobalTypeInferenceResults initialResults =
strategy.deserializeGlobalTypeInferenceResults(
compiler.options,
compiler.reporter,
compiler.environment,
compiler.abstractValueStrategy,
newComponent,
newClosedWorldAndIndices.data!,
newIndices,
worldData)
.data!;
newClosedWorld,
worldData,
indices);
indices = SerializationIndices();
final initialWorldData = strategy.serializeGlobalTypeInferenceResults(
newIndices, initialResults, compiler.options) as List<int>;
GlobalTypeInferenceResults finalResults = strategy
.deserializeGlobalTypeInferenceResults(
initialResults, compiler.options, indices) as List<int>;
GlobalTypeInferenceResults finalResults =
strategy.deserializeGlobalTypeInferenceResults(
compiler.options,
compiler.reporter,
compiler.environment,
compiler.abstractValueStrategy,
newComponent,
newClosedWorldAndIndices.data!,
newIndices,
worldData)
.data!;
newClosedWorld,
worldData,
indices);
indices = SerializationIndices();
final finalWorldData = strategy.serializeGlobalTypeInferenceResults(
newIndices, finalResults, compiler.options) as List<int>;
finalResults, compiler.options, indices) as List<int>;
checkData(initialWorldData, finalWorldData);
return finalResults;
}