Reland "migrate part of painting to nullsafety (#62696)" (#62872)

This commit is contained in:
Michael Goderbauer 2020-08-04 16:06:04 -07:00 committed by GitHub
parent 428be90768
commit 3b887bec47
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
10 changed files with 159 additions and 176 deletions

View file

@ -1,7 +1,7 @@
<<skip until matching line>>
══╡ EXCEPTION CAUGHT BY WIDGETS LIBRARY ╞═══════════════════════════════════════════════════════════
The following assertion was thrown building Listener
'package:flutter\/src\/painting\/basic_types\.dart': Failed assertion: line 225 pos 10: 'textDirection
'package:flutter\/src\/painting\/basic_types\.dart': Failed assertion: line 222 pos 10: 'textDirection
!= null': is not true\.
Either the assertion indicates an error in the framework itself, or we should provide substantially

View file

@ -1,7 +1,7 @@
<<skip until matching line>>
══╡ EXCEPTION CAUGHT BY WIDGETS LIBRARY ╞═══════════════════════════════════════════════════════════
The following assertion was thrown building Listener
'package:flutter\/src\/painting\/basic_types\.dart': Failed assertion: line 225 pos 10: 'textDirection
'package:flutter\/src\/painting\/basic_types\.dart': Failed assertion: line 222 pos 10: 'textDirection
!= null': is not true\.
Either the assertion indicates an error in the framework itself, or we should provide substantially

View file

@ -2,7 +2,6 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// @dart = 2.8
import 'dart:ui' as ui show lerpDouble;
@ -88,12 +87,12 @@ abstract class AlignmentGeometry {
/// into a concrete [Alignment] using [resolve].
///
/// {@macro dart.ui.shadow.lerp}
static AlignmentGeometry lerp(AlignmentGeometry a, AlignmentGeometry b, double t) {
static AlignmentGeometry? lerp(AlignmentGeometry? a, AlignmentGeometry? b, double t) {
assert(t != null);
if (a == null && b == null)
return null;
if (a == null)
return b * t;
return b! * t;
if (b == null)
return a * (1.0 - t);
if (a is Alignment && b is Alignment)
@ -101,9 +100,9 @@ abstract class AlignmentGeometry {
if (a is AlignmentDirectional && b is AlignmentDirectional)
return AlignmentDirectional.lerp(a, b, t);
return _MixedAlignment(
ui.lerpDouble(a._x, b._x, t),
ui.lerpDouble(a._start, b._start, t),
ui.lerpDouble(a._y, b._y, t),
ui.lerpDouble(a._x, b._x, t)!,
ui.lerpDouble(a._start, b._start, t)!,
ui.lerpDouble(a._y, b._y, t)!,
);
}
@ -116,7 +115,7 @@ abstract class AlignmentGeometry {
/// * [Alignment], for which this is a no-op (returns itself).
/// * [AlignmentDirectional], which flips the horizontal direction
/// based on the `direction` argument.
Alignment resolve(TextDirection direction);
Alignment resolve(TextDirection? direction);
@override
String toString() {
@ -333,19 +332,19 @@ class Alignment extends AlignmentGeometry {
/// If either is null, this function interpolates from [Alignment.center].
///
/// {@macro dart.ui.shadow.lerp}
static Alignment lerp(Alignment a, Alignment b, double t) {
static Alignment? lerp(Alignment? a, Alignment? b, double t) {
assert(t != null);
if (a == null && b == null)
return null;
if (a == null)
return Alignment(ui.lerpDouble(0.0, b.x, t), ui.lerpDouble(0.0, b.y, t));
return Alignment(ui.lerpDouble(0.0, b!.x, t)!, ui.lerpDouble(0.0, b.y, t)!);
if (b == null)
return Alignment(ui.lerpDouble(a.x, 0.0, t), ui.lerpDouble(a.y, 0.0, t));
return Alignment(ui.lerpDouble(a.x, b.x, t), ui.lerpDouble(a.y, b.y, t));
return Alignment(ui.lerpDouble(a.x, 0.0, t)!, ui.lerpDouble(a.y, 0.0, t)!);
return Alignment(ui.lerpDouble(a.x, b.x, t)!, ui.lerpDouble(a.y, b.y, t)!);
}
@override
Alignment resolve(TextDirection direction) => this;
Alignment resolve(TextDirection? direction) => this;
static String _stringify(double x, double y) {
if (x == -1.0 && y == -1.0)
@ -514,27 +513,26 @@ class AlignmentDirectional extends AlignmentGeometry {
/// If either is null, this function interpolates from [AlignmentDirectional.center].
///
/// {@macro dart.ui.shadow.lerp}
static AlignmentDirectional lerp(AlignmentDirectional a, AlignmentDirectional b, double t) {
static AlignmentDirectional? lerp(AlignmentDirectional? a, AlignmentDirectional? b, double t) {
assert(t != null);
if (a == null && b == null)
return null;
if (a == null)
return AlignmentDirectional(ui.lerpDouble(0.0, b.start, t), ui.lerpDouble(0.0, b.y, t));
return AlignmentDirectional(ui.lerpDouble(0.0, b!.start, t)!, ui.lerpDouble(0.0, b.y, t)!);
if (b == null)
return AlignmentDirectional(ui.lerpDouble(a.start, 0.0, t), ui.lerpDouble(a.y, 0.0, t));
return AlignmentDirectional(ui.lerpDouble(a.start, b.start, t), ui.lerpDouble(a.y, b.y, t));
return AlignmentDirectional(ui.lerpDouble(a.start, 0.0, t)!, ui.lerpDouble(a.y, 0.0, t)!);
return AlignmentDirectional(ui.lerpDouble(a.start, b.start, t)!, ui.lerpDouble(a.y, b.y, t)!);
}
@override
Alignment resolve(TextDirection direction) {
Alignment resolve(TextDirection? direction) {
assert(direction != null, 'Cannot resolve $runtimeType without a TextDirection.');
switch (direction) {
switch (direction!) {
case TextDirection.rtl:
return Alignment(-start, y);
case TextDirection.ltr:
return Alignment(start, y);
}
return null;
}
static String _stringify(double start, double y) {
@ -622,15 +620,14 @@ class _MixedAlignment extends AlignmentGeometry {
}
@override
Alignment resolve(TextDirection direction) {
Alignment resolve(TextDirection? direction) {
assert(direction != null, 'Cannot resolve $runtimeType without a TextDirection.');
switch (direction) {
switch (direction!) {
case TextDirection.rtl:
return Alignment(_x - _start, _y);
case TextDirection.ltr:
return Alignment(_x + _start, _y);
}
return null;
}
}
@ -652,7 +649,7 @@ class _MixedAlignment extends AlignmentGeometry {
class TextAlignVertical {
/// Creates a TextAlignVertical from any y value between -1.0 and 1.0.
const TextAlignVertical({
@required this.y,
required this.y,
}) : assert(y != null),
assert(y >= -1.0 && y <= 1.0);

View file

@ -2,7 +2,6 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// @dart = 2.8
import 'dart:ui' show TextDirection;
@ -143,7 +142,6 @@ Axis flipAxis(Axis direction) {
case Axis.vertical:
return Axis.horizontal;
}
return null;
}
/// A direction in which boxes flow vertically.
@ -214,7 +212,6 @@ Axis axisDirectionToAxis(AxisDirection axisDirection) {
case AxisDirection.right:
return Axis.horizontal;
}
return null;
}
/// Returns the [AxisDirection] in which reading occurs in the given [TextDirection].
@ -229,7 +226,6 @@ AxisDirection textDirectionToAxisDirection(TextDirection textDirection) {
case TextDirection.ltr:
return AxisDirection.right;
}
return null;
}
/// Returns the opposite of the given [AxisDirection].
@ -253,7 +249,6 @@ AxisDirection flipAxisDirection(AxisDirection axisDirection) {
case AxisDirection.left:
return AxisDirection.right;
}
return null;
}
/// Returns whether traveling along the given axis direction visits coordinates
@ -271,5 +266,4 @@ bool axisDirectionIsReversed(AxisDirection axisDirection) {
case AxisDirection.right:
return false;
}
return null;
}

View file

@ -2,7 +2,6 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// @dart = 2.8
import 'dart:typed_data' show Uint8List;
import 'dart:ui' as ui show instantiateImageCodec, Codec;
@ -23,14 +22,12 @@ mixin PaintingBinding on BindingBase, ServicesBinding {
super.initInstances();
_instance = this;
_imageCache = createImageCache();
if (shaderWarmUp != null) {
shaderWarmUp.execute();
}
shaderWarmUp?.execute();
}
/// The current [PaintingBinding], if one has been created.
static PaintingBinding get instance => _instance;
static PaintingBinding _instance;
static PaintingBinding? get instance => _instance;
static PaintingBinding? _instance;
/// [ShaderWarmUp] to be executed during [initInstances].
///
@ -53,7 +50,7 @@ mixin PaintingBinding on BindingBase, ServicesBinding {
/// See also:
///
/// * [ShaderWarmUp], the interface of how this warm up works.
static ShaderWarmUp shaderWarmUp = const DefaultShaderWarmUp();
static ShaderWarmUp? shaderWarmUp = const DefaultShaderWarmUp();
/// The singleton that implements the Flutter framework's image cache.
///
@ -62,8 +59,8 @@ mixin PaintingBinding on BindingBase, ServicesBinding {
///
/// The image cache is created during startup by the [createImageCache]
/// method.
ImageCache get imageCache => _imageCache;
ImageCache _imageCache;
ImageCache? get imageCache => _imageCache;
ImageCache? _imageCache;
/// Creates the [ImageCache] singleton (accessible via [imageCache]).
///
@ -90,8 +87,8 @@ mixin PaintingBinding on BindingBase, ServicesBinding {
/// above its native resolution should prefer scaling the canvas the image is
/// drawn into.
Future<ui.Codec> instantiateImageCodec(Uint8List bytes, {
int cacheWidth,
int cacheHeight,
int? cacheWidth,
int? cacheHeight,
bool allowUpscaling = false,
}) {
assert(cacheWidth == null || cacheWidth > 0);
@ -108,8 +105,8 @@ mixin PaintingBinding on BindingBase, ServicesBinding {
@override
void evict(String asset) {
super.evict(asset);
imageCache.clear();
imageCache.clearLiveImages();
imageCache!.clear();
imageCache!.clearLiveImages();
}
@override
@ -170,4 +167,4 @@ class _SystemFontsNotifier extends Listenable {
///
/// The image cache is created during startup by the [PaintingBinding]'s
/// [PaintingBinding.createImageCache] method.
ImageCache get imageCache => PaintingBinding.instance.imageCache;
ImageCache? get imageCache => PaintingBinding.instance!.imageCache;

View file

@ -2,7 +2,6 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// @dart = 2.8
import 'dart:math' as math;
import 'dart:ui' show Color, lerpDouble, hashValues;
@ -10,7 +9,7 @@ import 'dart:ui' show Color, lerpDouble, hashValues;
import 'package:flutter/foundation.dart';
double _getHue(double red, double green, double blue, double max, double delta) {
double hue;
late double hue;
if (max == 0.0) {
hue = 0.0;
} else if (max == red) {
@ -199,19 +198,19 @@ class HSVColor {
/// {@macro dart.ui.shadow.lerp}
///
/// Values outside of the valid range for each channel will be clamped.
static HSVColor lerp(HSVColor a, HSVColor b, double t) {
static HSVColor? lerp(HSVColor? a, HSVColor? b, double t) {
assert(t != null);
if (a == null && b == null)
return null;
if (a == null)
return b._scaleAlpha(t);
return b!._scaleAlpha(t);
if (b == null)
return a._scaleAlpha(1.0 - t);
return HSVColor.fromAHSV(
lerpDouble(a.alpha, b.alpha, t).clamp(0.0, 1.0) as double,
lerpDouble(a.hue, b.hue, t) % 360.0,
lerpDouble(a.saturation, b.saturation, t).clamp(0.0, 1.0) as double,
lerpDouble(a.value, b.value, t).clamp(0.0, 1.0) as double,
lerpDouble(a.alpha, b.alpha, t)!.clamp(0.0, 1.0) as double,
lerpDouble(a.hue, b.hue, t)! % 360.0,
lerpDouble(a.saturation, b.saturation, t)!.clamp(0.0, 1.0) as double,
lerpDouble(a.value, b.value, t)!.clamp(0.0, 1.0) as double,
);
}
@ -383,19 +382,19 @@ class HSLColor {
///
/// Values for `t` are usually obtained from an [Animation<double>], such as
/// an [AnimationController].
static HSLColor lerp(HSLColor a, HSLColor b, double t) {
static HSLColor? lerp(HSLColor? a, HSLColor? b, double t) {
assert(t != null);
if (a == null && b == null)
return null;
if (a == null)
return b._scaleAlpha(t);
return b!._scaleAlpha(t);
if (b == null)
return a._scaleAlpha(1.0 - t);
return HSLColor.fromAHSL(
lerpDouble(a.alpha, b.alpha, t).clamp(0.0, 1.0) as double,
lerpDouble(a.hue, b.hue, t) % 360.0,
lerpDouble(a.saturation, b.saturation, t).clamp(0.0, 1.0) as double,
lerpDouble(a.lightness, b.lightness, t).clamp(0.0, 1.0) as double,
lerpDouble(a.alpha, b.alpha, t)!.clamp(0.0, 1.0) as double,
lerpDouble(a.hue, b.hue, t)! % 360.0,
lerpDouble(a.saturation, b.saturation, t)!.clamp(0.0, 1.0) as double,
lerpDouble(a.lightness, b.lightness, t)!.clamp(0.0, 1.0) as double,
);
}
@ -441,7 +440,7 @@ class ColorSwatch<T> extends Color {
final Map<T, Color> _swatch;
/// Returns an element of the swatch table.
Color operator [](T index) => _swatch[index];
Color? operator [](T index) => _swatch[index];
@override
bool operator ==(Object other) {
@ -468,9 +467,9 @@ class ColorProperty extends DiagnosticsProperty<Color> {
/// The [showName], [style], and [level] arguments must not be null.
ColorProperty(
String name,
Color value, {
Color? value, {
bool showName = true,
Object defaultValue = kNoDefaultValue,
Object? defaultValue = kNoDefaultValue,
DiagnosticsTreeStyle style = DiagnosticsTreeStyle.singleLine,
DiagnosticLevel level = DiagnosticLevel.info,
}) : assert(showName != null),
@ -484,14 +483,14 @@ class ColorProperty extends DiagnosticsProperty<Color> {
);
@override
Map<String, Object> toJsonMap(DiagnosticsSerializationDelegate delegate) {
final Map<String, Object> json = super.toJsonMap(delegate);
Map<String, Object?> toJsonMap(DiagnosticsSerializationDelegate delegate) {
final Map<String, Object?> json = super.toJsonMap(delegate);
if (value != null) {
json['valueProperties'] = <String, Object>{
'red': value.red,
'green': value.green,
'blue': value.blue,
'alpha': value.alpha,
'red': value!.red,
'green': value!.green,
'blue': value!.blue,
'alpha': value!.alpha,
};
}
return json;

View file

@ -2,7 +2,6 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// @dart = 2.8
import 'dart:io';
import 'dart:ui' show Size, hashValues;
@ -30,7 +29,7 @@ typedef HttpClientProvider = HttpClient Function();
/// a mock client that hasn't been affected by other tests.
///
/// This value is ignored in non-debug builds.
HttpClientProvider debugNetworkImageHttpClientProvider;
HttpClientProvider? debugNetworkImageHttpClientProvider;
typedef PaintImageCallback = void Function(ImageSizeInfo);
@ -44,20 +43,20 @@ class ImageSizeInfo {
/// This class is used by the framework when it paints an image to a canvas
/// to report to `dart:developer`'s [postEvent], as well as to the
/// [debugOnPaintImage] callback if it is set.
const ImageSizeInfo({this.source, this.displaySize, this.imageSize});
const ImageSizeInfo({this.source, this.displaySize, required this.imageSize});
/// A unique identifier for this image, for example its asset path or network
/// URL.
final String source;
final String? source;
/// The size of the area the image will be rendered in.
final Size displaySize;
final Size? displaySize;
/// The size the image has been decoded to.
final Size imageSize;
/// The number of bytes needed to render the image without scaling it.
int get displaySizeInBytes => _sizeToBytes(displaySize);
int get displaySizeInBytes => _sizeToBytes(displaySize!);
/// The number of bytes used by the image in memory.
int get decodedSizeInBytes => _sizeToBytes(imageSize);
@ -69,14 +68,15 @@ class ImageSizeInfo {
}
/// Returns a JSON encodable representation of this object.
Map<String, Object> toJson() {
return <String, Object>{
Map<String, Object?> toJson() {
return <String, Object?>{
'source': source,
'displaySize': <String, double>{
'width': displaySize.width,
'height': displaySize.height,
},
'imageSize': <String, double>{
if (displaySize != null)
'displaySize': <String, Object?>{
'width': displaySize!.width,
'height': displaySize!.height,
},
'imageSize': <String, Object?>{
'width': imageSize.width,
'height': imageSize.height,
},
@ -125,7 +125,7 @@ class ImageSizeInfo {
/// a higher resolution while animating, but it would be problematic to have
/// a grid or list of such thumbnails all be at the full resolution at the same
/// time.
PaintImageCallback debugOnPaintImage;
PaintImageCallback? debugOnPaintImage;
/// If true, the framework will color invert and horizontally flip images that
/// have been decoded to a size taking at least [debugImageOverheadAllowance]

View file

@ -2,7 +2,6 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// @dart = 2.8
import 'dart:developer';
import 'dart:ui' show hashValues;
@ -103,7 +102,7 @@ class ImageCache {
assert(value >= 0);
if (value == maximumSize)
return;
TimelineTask timelineTask;
TimelineTask? timelineTask;
if (!kReleaseMode) {
timelineTask = TimelineTask()..start(
'ImageCache.setMaximumSize',
@ -117,7 +116,7 @@ class ImageCache {
_checkCacheSize(timelineTask);
}
if (!kReleaseMode) {
timelineTask.finish();
timelineTask!.finish();
}
}
@ -142,7 +141,7 @@ class ImageCache {
assert(value >= 0);
if (value == _maximumSizeBytes)
return;
TimelineTask timelineTask;
TimelineTask? timelineTask;
if (!kReleaseMode) {
timelineTask = TimelineTask()..start(
'ImageCache.setMaximumSizeBytes',
@ -156,7 +155,7 @@ class ImageCache {
_checkCacheSize(timelineTask);
}
if (!kReleaseMode) {
timelineTask.finish();
timelineTask!.finish();
}
}
@ -239,10 +238,10 @@ class ImageCache {
// will never complete, e.g. it was loaded in a FakeAsync zone.
// In such a case, we need to make sure subsequent calls to
// putIfAbsent don't return this image that may never complete.
final _LiveImage image = _liveImages.remove(key);
final _LiveImage? image = _liveImages.remove(key);
image?.removeListener();
}
final _PendingImage pendingImage = _pendingImages.remove(key);
final _PendingImage? pendingImage = _pendingImages.remove(key);
if (pendingImage != null) {
if (!kReleaseMode) {
Timeline.instantSync('ImageCache.evict', arguments: <String, dynamic>{
@ -252,7 +251,7 @@ class ImageCache {
pendingImage.removeListener();
return true;
}
final _CachedImage image = _cache.remove(key);
final _CachedImage? image = _cache.remove(key);
if (image != null) {
if (!kReleaseMode) {
Timeline.instantSync('ImageCache.evict', arguments: <String, dynamic>{
@ -260,7 +259,7 @@ class ImageCache {
'sizeInBytes': image.sizeBytes,
});
}
_currentSizeBytes -= image.sizeBytes;
_currentSizeBytes -= image.sizeBytes!;
return true;
}
if (!kReleaseMode) {
@ -276,13 +275,13 @@ class ImageCache {
///
/// Resizes the cache as appropriate to maintain the constraints of
/// [maximumSize] and [maximumSizeBytes].
void _touch(Object key, _CachedImage image, TimelineTask timelineTask) {
void _touch(Object key, _CachedImage image, TimelineTask? timelineTask) {
// TODO(dnfield): Some customers test in release mode with asserts enabled.
// This is bound to cause problems, b/150295238 is tracking that. For now,
// avoid this being a point of failure.
assert(kReleaseMode || timelineTask != null);
if (image.sizeBytes != null && image.sizeBytes <= maximumSizeBytes) {
_currentSizeBytes += image.sizeBytes;
if (image.sizeBytes != null && image.sizeBytes! <= maximumSizeBytes) {
_currentSizeBytes += image.sizeBytes!;
_cache[key] = image;
_checkCacheSize(timelineTask);
}
@ -310,11 +309,11 @@ class ImageCache {
/// `onError` is also provided. When an exception is caught resolving an image,
/// no completers are cached and `null` is returned instead of a new
/// completer.
ImageStreamCompleter putIfAbsent(Object key, ImageStreamCompleter loader(), { ImageErrorListener onError }) {
ImageStreamCompleter? putIfAbsent(Object key, ImageStreamCompleter loader(), { ImageErrorListener? onError }) {
assert(key != null);
assert(loader != null);
TimelineTask timelineTask;
TimelineTask listenerTask;
TimelineTask? timelineTask;
TimelineTask? listenerTask;
if (!kReleaseMode) {
timelineTask = TimelineTask()..start(
'ImageCache.putIfAbsent',
@ -323,11 +322,11 @@ class ImageCache {
},
);
}
ImageStreamCompleter result = _pendingImages[key]?.completer;
ImageStreamCompleter? result = _pendingImages[key]?.completer;
// Nothing needs to be done because the image hasn't loaded yet.
if (result != null) {
if (!kReleaseMode) {
timelineTask.finish(arguments: <String, dynamic>{'result': 'pending'});
timelineTask!.finish(arguments: <String, dynamic>{'result': 'pending'});
}
return result;
}
@ -335,10 +334,10 @@ class ImageCache {
// recently used position below.
// Don't use _touch here, which would trigger a check on cache size that is
// not needed since this is just moving an existing cache entry to the head.
final _CachedImage image = _cache.remove(key);
final _CachedImage? image = _cache.remove(key);
if (image != null) {
if (!kReleaseMode) {
timelineTask.finish(arguments: <String, dynamic>{'result': 'keepAlive'});
timelineTask!.finish(arguments: <String, dynamic>{'result': 'keepAlive'});
}
// The image might have been keptAlive but had no listeners (so not live).
// Make sure the cache starts tracking it as live again.
@ -347,11 +346,11 @@ class ImageCache {
return image.completer;
}
final _CachedImage liveImage = _liveImages[key];
final _CachedImage? liveImage = _liveImages[key];
if (liveImage != null) {
_touch(key, liveImage, timelineTask);
if (!kReleaseMode) {
timelineTask.finish(arguments: <String, dynamic>{'result': 'keepAlive'});
timelineTask!.finish(arguments: <String, dynamic>{'result': 'keepAlive'});
}
return liveImage.completer;
}
@ -361,7 +360,7 @@ class ImageCache {
_trackLiveImage(key, _LiveImage(result, null, () => _liveImages.remove(key)));
} catch (error, stackTrace) {
if (!kReleaseMode) {
timelineTask.finish(arguments: <String, dynamic>{
timelineTask!.finish(arguments: <String, dynamic>{
'result': 'error',
'error': error.toString(),
'stackTrace': stackTrace.toString(),
@ -387,12 +386,12 @@ class ImageCache {
// will have to listen to the image at least once so we don't leak it in
// the live image tracking.
// If the cache is disabled, this variable will be set.
_PendingImage untrackedPendingImage;
void listener(ImageInfo info, bool syncCall) {
_PendingImage? untrackedPendingImage;
void listener(ImageInfo? info, bool syncCall) {
// Images that fail to load don't contribute to cache size.
final int imageSize = info?.image == null ? 0 : info.image.height * info.image.width * 4;
final int imageSize = info == null || info.image == null ? 0 : info.image.height * info.image.width * 4;
final _CachedImage image = _CachedImage(result, imageSize);
final _CachedImage image = _CachedImage(result!, imageSize);
_trackLiveImage(
key,
@ -403,7 +402,7 @@ class ImageCache {
),
);
final _PendingImage pendingImage = untrackedPendingImage ?? _pendingImages.remove(key);
final _PendingImage? pendingImage = untrackedPendingImage ?? _pendingImages.remove(key);
if (pendingImage != null) {
pendingImage.removeListener();
}
@ -413,11 +412,11 @@ class ImageCache {
}
if (!kReleaseMode && !listenedOnce) {
listenerTask.finish(arguments: <String, dynamic>{
listenerTask!.finish(arguments: <String, dynamic>{
'syncCall': syncCall,
'sizeInBytes': imageSize,
});
timelineTask.finish(arguments: <String, dynamic>{
timelineTask!.finish(arguments: <String, dynamic>{
'currentSizeBytes': currentSizeBytes,
'currentSize': currentSize,
});
@ -481,9 +480,9 @@ class ImageCache {
// Remove images from the cache until both the length and bytes are below
// maximum, or the cache is empty.
void _checkCacheSize(TimelineTask timelineTask) {
void _checkCacheSize(TimelineTask? timelineTask) {
final Map<String, dynamic> finishArgs = <String, dynamic>{};
TimelineTask checkCacheTask;
TimelineTask? checkCacheTask;
if (!kReleaseMode) {
checkCacheTask = TimelineTask(parent: timelineTask)..start('checkCacheSize');
finishArgs['evictedKeys'] = <String>[];
@ -492,8 +491,8 @@ class ImageCache {
}
while (_currentSizeBytes > _maximumSizeBytes || _cache.length > _maximumSize) {
final Object key = _cache.keys.first;
final _CachedImage image = _cache[key];
_currentSizeBytes -= image.sizeBytes;
final _CachedImage image = _cache[key]!;
_currentSizeBytes -= image.sizeBytes!;
_cache.remove(key);
if (!kReleaseMode) {
finishArgs['evictedKeys'].add(key.toString());
@ -502,7 +501,7 @@ class ImageCache {
if (!kReleaseMode) {
finishArgs['endSize'] = currentSize;
finishArgs['endSizeBytes'] = currentSizeBytes;
checkCacheTask.finish(arguments: finishArgs);
checkCacheTask!.finish(arguments: finishArgs);
}
assert(_currentSizeBytes >= 0);
assert(_cache.length <= maximumSize);
@ -585,11 +584,11 @@ class _CachedImage {
_CachedImage(this.completer, this.sizeBytes);
final ImageStreamCompleter completer;
int sizeBytes;
int? sizeBytes;
}
class _LiveImage extends _CachedImage {
_LiveImage(ImageStreamCompleter completer, int sizeBytes, this.handleRemove)
_LiveImage(ImageStreamCompleter completer, int? sizeBytes, this.handleRemove)
: super(completer, sizeBytes);
final VoidCallback handleRemove;

View file

@ -2,7 +2,6 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// @dart = 2.8
import 'dart:async';
import 'dart:ui' as ui show Image, Codec, FrameInfo;
@ -22,7 +21,7 @@ class ImageInfo {
/// Both the image and the scale must not be null.
///
/// The tag may be used to identify the source of this image.
const ImageInfo({ @required this.image, this.scale = 1.0, this.debugLabel })
const ImageInfo({ required this.image, this.scale = 1.0, this.debugLabel })
: assert(image != null),
assert(scale != null);
@ -45,7 +44,7 @@ class ImageInfo {
final double scale;
/// A string used for debugging purpopses to identify the source of this image.
final String debugLabel;
final String? debugLabel;
@override
String toString() => '${debugLabel != null ? '$debugLabel ' : ''}$image @ ${debugFormatDouble(scale)}x';
@ -112,13 +111,13 @@ class ImageStreamListener {
/// This callback may also continue to fire after the [onImage] callback has
/// fired (e.g. for multi-frame images that continue to load after the first
/// frame is available).
final ImageChunkListener onChunk;
final ImageChunkListener? onChunk;
/// Callback for getting notified when an error occurs while loading an image.
///
/// If an error occurs during loading, [onError] will be called instead of
/// [onImage].
final ImageErrorListener onError;
final ImageErrorListener? onError;
@override
int get hashCode => hashValues(onImage, onChunk, onError);
@ -155,7 +154,7 @@ typedef ImageChunkListener = void Function(ImageChunkEvent event);
///
/// Used in [ImageStreamListener], as well as by [ImageCache.putIfAbsent] and
/// [precacheImage], to report errors.
typedef ImageErrorListener = void Function(dynamic exception, StackTrace stackTrace);
typedef ImageErrorListener = void Function(dynamic exception, StackTrace? stackTrace);
/// An immutable notification of image bytes that have been incrementally loaded.
///
@ -170,8 +169,8 @@ typedef ImageErrorListener = void Function(dynamic exception, StackTrace stackTr
class ImageChunkEvent with Diagnosticable {
/// Creates a new chunk event.
const ImageChunkEvent({
@required this.cumulativeBytesLoaded,
@required this.expectedTotalBytes,
required this.cumulativeBytesLoaded,
required this.expectedTotalBytes,
}) : assert(cumulativeBytesLoaded >= 0),
assert(expectedTotalBytes == null || expectedTotalBytes >= 0);
@ -189,7 +188,7 @@ class ImageChunkEvent with Diagnosticable {
/// When this value is null, the chunk event may still be useful as an
/// indication that data is loading (and how much), but it cannot represent a
/// loading completion percentage.
final int expectedTotalBytes;
final int? expectedTotalBytes;
@override
void debugFillProperties(DiagnosticPropertiesBuilder properties) {
@ -229,10 +228,10 @@ class ImageStream with Diagnosticable {
/// The completer that has been assigned to this image stream.
///
/// Generally there is no need to deal with the completer directly.
ImageStreamCompleter get completer => _completer;
ImageStreamCompleter _completer;
ImageStreamCompleter? get completer => _completer;
ImageStreamCompleter? _completer;
List<ImageStreamListener> _listeners;
List<ImageStreamListener>? _listeners;
/// Assigns a particular [ImageStreamCompleter] to this [ImageStream].
///
@ -246,9 +245,9 @@ class ImageStream with Diagnosticable {
assert(_completer == null);
_completer = value;
if (_listeners != null) {
final List<ImageStreamListener> initialListeners = _listeners;
final List<ImageStreamListener> initialListeners = _listeners!;
_listeners = null;
initialListeners.forEach(_completer.addListener);
initialListeners.forEach(_completer!.addListener);
}
}
@ -272,9 +271,9 @@ class ImageStream with Diagnosticable {
/// {@endtemplate}
void addListener(ImageStreamListener listener) {
if (_completer != null)
return _completer.addListener(listener);
return _completer!.addListener(listener);
_listeners ??= <ImageStreamListener>[];
_listeners.add(listener);
_listeners!.add(listener);
}
/// Stops listening for events from this stream's [ImageStreamCompleter].
@ -283,11 +282,11 @@ class ImageStream with Diagnosticable {
/// instance of the listener.
void removeListener(ImageStreamListener listener) {
if (_completer != null)
return _completer.removeListener(listener);
return _completer!.removeListener(listener);
assert(_listeners != null);
for (int i = 0; i < _listeners.length; i += 1) {
if (_listeners[i] == listener) {
_listeners.removeAt(i);
for (int i = 0; i < _listeners!.length; i += 1) {
if (_listeners![i] == listener) {
_listeners!.removeAt(i);
break;
}
}
@ -334,11 +333,11 @@ class ImageStream with Diagnosticable {
/// configure it with the right [ImageStreamCompleter] when possible.
abstract class ImageStreamCompleter with Diagnosticable {
final List<ImageStreamListener> _listeners = <ImageStreamListener>[];
ImageInfo _currentImage;
FlutterErrorDetails _currentError;
ImageInfo? _currentImage;
FlutterErrorDetails? _currentError;
/// A string identifying the source of the underlying image.
String debugLabel;
String? debugLabel;
/// Whether any listeners are currently registered.
///
@ -372,7 +371,7 @@ abstract class ImageStreamCompleter with Diagnosticable {
_listeners.add(listener);
if (_currentImage != null) {
try {
listener.onImage(_currentImage, true);
listener.onImage(_currentImage!, true);
} catch (exception, stack) {
reportError(
context: ErrorDescription('by a synchronously-called image listener'),
@ -383,7 +382,7 @@ abstract class ImageStreamCompleter with Diagnosticable {
}
if (_currentError != null && listener.onError != null) {
try {
listener.onError(_currentError.exception, _currentError.stack);
listener.onError!(_currentError!.exception, _currentError!.stack);
} catch (exception, stack) {
FlutterError.reportError(
FlutterErrorDetails(
@ -486,10 +485,10 @@ abstract class ImageStreamCompleter with Diagnosticable {
/// See [FlutterErrorDetails] for further details on these values.
@protected
void reportError({
DiagnosticsNode context,
DiagnosticsNode? context,
dynamic exception,
StackTrace stack,
InformationCollector informationCollector,
StackTrace? stack,
InformationCollector? informationCollector,
bool silent = false,
}) {
_currentError = FlutterErrorDetails(
@ -503,12 +502,12 @@ abstract class ImageStreamCompleter with Diagnosticable {
// Make a copy to allow for concurrent modification.
final List<ImageErrorListener> localErrorListeners = _listeners
.map<ImageErrorListener>((ImageStreamListener listener) => listener.onError)
.where((ImageErrorListener errorListener) => errorListener != null)
.map<ImageErrorListener?>((ImageStreamListener listener) => listener.onError)
.whereType<ImageErrorListener>()
.toList();
if (localErrorListeners.isEmpty) {
FlutterError.reportError(_currentError);
FlutterError.reportError(_currentError!);
} else {
for (final ImageErrorListener errorListener in localErrorListeners) {
try {
@ -535,8 +534,8 @@ abstract class ImageStreamCompleter with Diagnosticable {
if (hasListeners) {
// Make a copy to allow for concurrent modification.
final List<ImageChunkListener> localListeners = _listeners
.map<ImageChunkListener>((ImageStreamListener listener) => listener.onChunk)
.where((ImageChunkListener chunkListener) => chunkListener != null)
.map<ImageChunkListener?>((ImageStreamListener listener) => listener.onChunk)
.whereType<ImageChunkListener>()
.toList();
for (final ImageChunkListener listener in localListeners) {
listener(event);
@ -553,7 +552,7 @@ abstract class ImageStreamCompleter with Diagnosticable {
description.add(ObjectFlagProperty<List<ImageStreamListener>>(
'listeners',
_listeners,
ifPresent: '${_listeners?.length} listener${_listeners?.length == 1 ? "" : "s" }',
ifPresent: '${_listeners.length} listener${_listeners.length == 1 ? "" : "s" }',
));
}
}
@ -575,7 +574,7 @@ class OneFrameImageStreamCompleter extends ImageStreamCompleter {
/// argument on [FlutterErrorDetails] set to true, meaning that by default the
/// message is only dumped to the console in debug mode (see [new
/// FlutterErrorDetails]).
OneFrameImageStreamCompleter(Future<ImageInfo> image, { InformationCollector informationCollector })
OneFrameImageStreamCompleter(Future<ImageInfo> image, { InformationCollector? informationCollector })
: assert(image != null) {
image.then<void>(setImage, onError: (dynamic error, StackTrace stack) {
reportError(
@ -640,11 +639,11 @@ class MultiFrameImageStreamCompleter extends ImageStreamCompleter {
/// produced by the stream will be delivered to registered [ImageChunkListener]s
/// (see [addListener]).
MultiFrameImageStreamCompleter({
@required Future<ui.Codec> codec,
@required double scale,
String debugLabel,
Stream<ImageChunkEvent> chunkEvents,
InformationCollector informationCollector,
required Future<ui.Codec> codec,
required double scale,
String? debugLabel,
Stream<ImageChunkEvent>? chunkEvents,
InformationCollector? informationCollector,
}) : assert(codec != null),
_informationCollector = informationCollector,
_scale = scale {
@ -673,17 +672,17 @@ class MultiFrameImageStreamCompleter extends ImageStreamCompleter {
}
}
ui.Codec _codec;
ui.Codec? _codec;
final double _scale;
final InformationCollector _informationCollector;
ui.FrameInfo _nextFrame;
final InformationCollector? _informationCollector;
ui.FrameInfo? _nextFrame;
// When the current was first shown.
Duration _shownTimestamp;
late Duration _shownTimestamp;
// The requested duration for the current frame;
Duration _frameDuration;
Duration? _frameDuration;
// How many frames have been emitted so far.
int _framesEmitted = 0;
Timer _timer;
Timer? _timer;
// Used to guard against registering multiple _handleAppFrame callbacks for the same frame.
bool _frameCallbackScheduled = false;
@ -702,17 +701,17 @@ class MultiFrameImageStreamCompleter extends ImageStreamCompleter {
if (!hasListeners)
return;
if (_isFirstFrame() || _hasFrameDurationPassed(timestamp)) {
_emitFrame(ImageInfo(image: _nextFrame.image, scale: _scale, debugLabel: debugLabel));
_emitFrame(ImageInfo(image: _nextFrame!.image, scale: _scale, debugLabel: debugLabel));
_shownTimestamp = timestamp;
_frameDuration = _nextFrame.duration;
_frameDuration = _nextFrame!.duration;
_nextFrame = null;
final int completedCycles = _framesEmitted ~/ _codec.frameCount;
if (_codec.repetitionCount == -1 || completedCycles <= _codec.repetitionCount) {
final int completedCycles = _framesEmitted ~/ _codec!.frameCount;
if (_codec!.repetitionCount == -1 || completedCycles <= _codec!.repetitionCount) {
_decodeNextFrameAndSchedule();
}
return;
}
final Duration delay = _frameDuration - (timestamp - _shownTimestamp);
final Duration delay = _frameDuration! - (timestamp - _shownTimestamp);
_timer = Timer(delay * timeDilation, () {
_scheduleAppFrame();
});
@ -723,13 +722,12 @@ class MultiFrameImageStreamCompleter extends ImageStreamCompleter {
}
bool _hasFrameDurationPassed(Duration timestamp) {
assert(_shownTimestamp != null);
return timestamp - _shownTimestamp >= _frameDuration;
return timestamp - _shownTimestamp >= _frameDuration!;
}
Future<void> _decodeNextFrameAndSchedule() async {
try {
_nextFrame = await _codec.getNextFrame();
_nextFrame = await _codec!.getNextFrame();
} catch (exception, stack) {
reportError(
context: ErrorDescription('resolving an image frame'),
@ -740,10 +738,10 @@ class MultiFrameImageStreamCompleter extends ImageStreamCompleter {
);
return;
}
if (_codec.frameCount == 1) {
if (_codec!.frameCount == 1) {
// This is not an animated image, just return it and don't schedule more
// frames.
_emitFrame(ImageInfo(image: _nextFrame.image, scale: _scale, debugLabel: debugLabel));
_emitFrame(ImageInfo(image: _nextFrame!.image, scale: _scale, debugLabel: debugLabel));
return;
}
_scheduleAppFrame();
@ -754,7 +752,7 @@ class MultiFrameImageStreamCompleter extends ImageStreamCompleter {
return;
}
_frameCallbackScheduled = true;
SchedulerBinding.instance.scheduleFrameCallback(_handleAppFrame);
SchedulerBinding.instance!.scheduleFrameCallback(_handleAppFrame);
}
void _emitFrame(ImageInfo imageInfo) {

View file

@ -2,7 +2,6 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// @dart = 2.8
import 'dart:async';
import 'dart:developer';