Fix more strong mode errors in analyzer

R=rnystrom@google.com

Review URL: https://codereview.chromium.org/1847633002 .
This commit is contained in:
Brian Wilkerson 2016-03-31 13:30:04 -07:00
parent ca100e0be2
commit e531f01e73
18 changed files with 211 additions and 156 deletions

View file

@ -40,8 +40,9 @@ void main(List<String> args) {
new PackageUriResolver([new JavaFile(packageRoot)])
]);
context.analysisOptions.strongMode = true;
context.analysisOptions.strongModeHints = true;
AnalysisOptionsImpl options = context.analysisOptions;
options.strongMode = true;
options.strongModeHints = true;
var mainSource =
new FileBasedSource(new JavaFile(p.fromUri(Platform.script)));

View file

@ -296,7 +296,7 @@ class _MemoryDummyLink extends _MemoryResource implements File {
}
@override
Resource renameSync(String newPath) {
File renameSync(String newPath) {
throw new FileSystemException(path, 'File could not be renamed');
}

View file

@ -132,7 +132,7 @@ class _PhysicalFile extends _PhysicalResource implements File {
}
@override
Resource renameSync(String newPath) {
File renameSync(String newPath) {
try {
io.File file = _entry as io.File;
io.File newFile = file.renameSync(newPath);

View file

@ -173,12 +173,13 @@ class AnalysisCache {
* It does not update the cache, if the corresponding [CacheEntry] does not
* exist, then the default value is returned.
*/
Object getValue(AnalysisTarget target, ResultDescriptor result) {
Object/*=V*/ getValue/*<V>*/(
AnalysisTarget target, ResultDescriptor/*<V>*/ result) {
CacheEntry entry = get(target);
if (entry == null) {
return result.defaultValue;
}
return entry.getValue(result);
return entry.getValue(result) as Object/*=V*/;
}
/**

View file

@ -7600,8 +7600,7 @@ class NodeListImpl<E extends AstNode> extends Object
/**
* The node that is the parent of each of the elements in the list.
*/
@override
AstNodeImpl owner;
AstNodeImpl _owner;
/**
* The elements contained in the list.
@ -7613,7 +7612,7 @@ class NodeListImpl<E extends AstNode> extends Object
* are added to the list will have their parent set to the given [owner]. The
* list will initially be populated with the given [elements].
*/
NodeListImpl(this.owner, [List<E> elements]) {
NodeListImpl(this._owner, [List<E> elements]) {
addAll(elements);
}
@ -7642,6 +7641,14 @@ class NodeListImpl<E extends AstNode> extends Object
throw new UnsupportedError("Cannot resize NodeList.");
}
@override
AstNode get owner => _owner;
@override
void set owner(AstNode value) {
_owner = value as AstNodeImpl;
}
E operator [](int index) {
if (index < 0 || index >= _elements.length) {
throw new RangeError("Index: $index, Size: ${_elements.length}");
@ -7653,7 +7660,7 @@ class NodeListImpl<E extends AstNode> extends Object
if (index < 0 || index >= _elements.length) {
throw new RangeError("Index: $index, Size: ${_elements.length}");
}
owner._becomeParentOf(node);
_owner._becomeParentOf(node);
_elements[index] = node;
}
@ -7675,7 +7682,7 @@ class NodeListImpl<E extends AstNode> extends Object
if (nodes != null && !nodes.isEmpty) {
_elements.addAll(nodes);
for (E node in nodes) {
owner._becomeParentOf(node);
_owner._becomeParentOf(node);
}
return true;
}
@ -7693,7 +7700,7 @@ class NodeListImpl<E extends AstNode> extends Object
if (index < 0 || index > length) {
throw new RangeError("Index: $index, Size: ${_elements.length}");
}
owner._becomeParentOf(node);
_owner._becomeParentOf(node);
if (length == 0) {
_elements.add(node);
} else {

View file

@ -94,6 +94,9 @@ class ClassElementHandle extends ElementHandle implements ClassElement {
@override
ConstructorElement get unnamedConstructor => actualElement.unnamedConstructor;
@override
NamedCompilationUnitMember computeNode() => super.computeNode();
@override
FieldElement getField(String fieldName) => actualElement.getField(fieldName);
@ -895,6 +898,9 @@ class ParameterElementHandle extends VariableElementHandle
@override
SourceRange get visibleRange => actualElement.visibleRange;
@override
FormalParameter computeNode() => super.computeNode();
}
/**
@ -1013,6 +1019,9 @@ class TopLevelVariableElementHandle extends PropertyInducingElementHandle
@override
ElementKind get kind => ElementKind.TOP_LEVEL_VARIABLE;
@override
VariableDeclaration computeNode() => super.computeNode();
}
/**
@ -1075,10 +1084,12 @@ abstract class VariableElementHandle extends ElementHandle
@override
bool get isFinal => actualElement.isFinal;
@deprecated
@override
bool get isPotentiallyMutatedInClosure =>
actualElement.isPotentiallyMutatedInClosure;
@deprecated
@override
bool get isPotentiallyMutatedInScope =>
actualElement.isPotentiallyMutatedInScope;

View file

@ -688,7 +688,7 @@ class FunctionTypeImpl extends TypeImpl implements FunctionType {
// base types.
assert(this.prunedTypedefs == null);
List<DartType> typeArgs = typeArguments
.map((TypeImpl t) => t.pruned(prune))
.map((DartType t) => (t as TypeImpl).pruned(prune))
.toList(growable: false);
return new FunctionTypeImpl._(
element, name, prune, typeArgs, _isInstantiated);
@ -1616,8 +1616,9 @@ class InterfaceTypeImpl extends TypeImpl implements InterfaceType {
// base types.
assert(this.prunedTypedefs == null);
InterfaceTypeImpl result = new InterfaceTypeImpl._(element, name, prune);
result.typeArguments =
typeArguments.map((TypeImpl t) => t.pruned(prune)).toList();
result.typeArguments = typeArguments
.map((DartType t) => (t as TypeImpl).pruned(prune))
.toList();
return result;
}
}

View file

@ -7537,8 +7537,10 @@ class ResolverVisitor extends ScopedVisitor {
// Same number of type formals. Instantiate the function type so its
// parameter and return type are in terms of the surrounding context.
return fnType.instantiate(
typeParameters.map((t) => t.name.staticElement.type).toList());
return fnType.instantiate(typeParameters
.map((TypeParameter t) =>
(t.name.staticElement as TypeParameterElement).type)
.toList());
}
/**

View file

@ -873,7 +873,7 @@ abstract class TypeSystem {
// Walk the superinterface hierarchy looking for [genericType].
List<DartType> candidates = <DartType>[];
HashSet<ClassElement> visitedClasses = new HashSet<ClassElement>();
void recurse(InterfaceTypeImpl interface) {
void recurse(InterfaceType interface) {
if (interface.element == genericType.element &&
interface.typeArguments.isNotEmpty) {
candidates.add(interface.typeArguments[0]);

View file

@ -723,7 +723,8 @@ class _DeferredConstructorElement extends ConstructorElementHandle {
: super(null, location);
@override
Element get actualElement => enclosingElement.getNamedConstructor(name);
ConstructorElement get actualElement =>
enclosingElement.getNamedConstructor(name);
@override
AnalysisContext get context => _definingType.element.context;
@ -1325,7 +1326,7 @@ class _ReferenceInfo {
// For a type that refers to a generic executable, the type arguments are
// not supposed to include the arguments to the executable itself.
numTypeArguments = enclosing == null ? 0 : enclosing.numTypeParameters;
computer = () => this.element;
computer = () => this.element as FunctionTypedElement;
}
// TODO(paulberry): Is it a bug that we have to pass `false` for
// isInstantiated?

View file

@ -504,10 +504,11 @@ class _CompilationUnitSerializer {
if (element.metadata.isEmpty) {
return const <UnlinkedConstBuilder>[];
}
return element.metadata.map((ElementAnnotationImpl a) {
return element.metadata.map((ElementAnnotation a) {
_ConstExprSerializer serializer =
new _ConstExprSerializer(this, element, null);
serializer.serializeAnnotation(a.annotationAst);
serializer
.serializeAnnotation((a as ElementAnnotationImpl).annotationAst);
return serializer.toBuilder();
}).toList();
}

View file

@ -45,7 +45,7 @@ import 'package:analyzer/task/model.dart';
/**
* The [ResultCachingPolicy] for ASTs.
*/
const ResultCachingPolicy AST_CACHING_POLICY =
const ResultCachingPolicy<CompilationUnit> AST_CACHING_POLICY =
const SimpleResultCachingPolicy(16384, 16384);
/**
@ -57,7 +57,7 @@ const ResultCachingPolicy ELEMENT_CACHING_POLICY =
/**
* The [ResultCachingPolicy] for [TOKEN_STREAM].
*/
const ResultCachingPolicy TOKEN_STREAM_CACHING_POLICY =
const ResultCachingPolicy<Token> TOKEN_STREAM_CACHING_POLICY =
const SimpleResultCachingPolicy(1, 1);
/**
@ -1986,7 +1986,7 @@ class ComputeLibraryCycleTask extends SourceBasedAnalysisTask {
List<LibraryElement> component = library.libraryCycle;
Set<LibraryElement> filter = new Set<LibraryElement>.from(component);
Set<CompilationUnitElement> deps = new Set<CompilationUnitElement>();
void addLibrary(l) {
void addLibrary(LibraryElement l) {
if (!filter.contains(l)) {
deps.addAll(l.units);
}
@ -2933,13 +2933,17 @@ class InferInstanceMembersInUnitTask extends SourceBasedAnalysisTask {
// Require that field re-resolution be complete for all units in the
// current library cycle.
'orderLibraryCycleTasks': LIBRARY_CYCLE_UNITS.of(unit).toList(
(CompilationUnitElementImpl unit) => CREATED_RESOLVED_UNIT8
.of(new LibrarySpecificUnit(unit.librarySource, unit.source))),
(CompilationUnitElement unit) => CREATED_RESOLVED_UNIT8.of(
new LibrarySpecificUnit(
(unit as CompilationUnitElementImpl).librarySource,
unit.source))),
// Require that full inference be complete for all dependencies of the
// current library cycle.
'orderLibraryCycles': LIBRARY_CYCLE_DEPENDENCIES.of(unit).toList(
(CompilationUnitElementImpl unit) => CREATED_RESOLVED_UNIT9
.of(new LibrarySpecificUnit(unit.librarySource, unit.source)))
(CompilationUnitElement unit) => CREATED_RESOLVED_UNIT9.of(
new LibrarySpecificUnit(
(unit as CompilationUnitElementImpl).librarySource,
unit.source)))
};
}
@ -3193,8 +3197,10 @@ class InferStaticVariableTypeTask extends InferStaticVariableTask {
// Require that full inference be complete for all dependencies of the
// current library cycle.
'orderLibraryCycles': LIBRARY_CYCLE_DEPENDENCIES.of(unit).toList(
(CompilationUnitElementImpl unit) => CREATED_RESOLVED_UNIT9
.of(new LibrarySpecificUnit(unit.librarySource, unit.source)))
(CompilationUnitElement unit) => CREATED_RESOLVED_UNIT9.of(
new LibrarySpecificUnit(
(unit as CompilationUnitElementImpl).librarySource,
unit.source)))
};
}
@ -3675,8 +3681,10 @@ class PartiallyResolveUnitReferencesTask extends SourceBasedAnalysisTask {
// Require that full inference be complete for all dependencies of the
// current library cycle.
'orderLibraryCycles': LIBRARY_CYCLE_DEPENDENCIES.of(unit).toList(
(CompilationUnitElementImpl unit) => CREATED_RESOLVED_UNIT9
.of(new LibrarySpecificUnit(unit.librarySource, unit.source)))
(CompilationUnitElement unit) => CREATED_RESOLVED_UNIT9.of(
new LibrarySpecificUnit(
(unit as CompilationUnitElementImpl).librarySource,
unit.source)))
};
}
@ -4398,13 +4406,17 @@ class ResolveInstanceFieldsInUnitTask extends SourceBasedAnalysisTask {
// Require that static variable inference be complete for all units in
// the current library cycle.
'orderLibraryCycleTasks': LIBRARY_CYCLE_UNITS.of(unit).toList(
(CompilationUnitElementImpl unit) => CREATED_RESOLVED_UNIT7
.of(new LibrarySpecificUnit(unit.librarySource, unit.source))),
(CompilationUnitElement unit) => CREATED_RESOLVED_UNIT7.of(
new LibrarySpecificUnit(
(unit as CompilationUnitElementImpl).librarySource,
unit.source))),
// Require that full inference be complete for all dependencies of the
// current library cycle.
'orderLibraryCycles': LIBRARY_CYCLE_DEPENDENCIES.of(unit).toList(
(CompilationUnitElementImpl unit) => CREATED_RESOLVED_UNIT9
.of(new LibrarySpecificUnit(unit.librarySource, unit.source)))
(CompilationUnitElement unit) => CREATED_RESOLVED_UNIT9.of(
new LibrarySpecificUnit(
(unit as CompilationUnitElementImpl).librarySource,
unit.source)))
};
}
@ -4721,8 +4733,10 @@ class ResolveUnitTask extends SourceBasedAnalysisTask {
// Require that inference be complete for all units in the
// current library cycle.
'orderLibraryCycleTasks': LIBRARY_CYCLE_UNITS.of(unit).toList(
(CompilationUnitElementImpl unit) => CREATED_RESOLVED_UNIT9
.of(new LibrarySpecificUnit(unit.librarySource, unit.source)))
(CompilationUnitElement unit) => CREATED_RESOLVED_UNIT9.of(
new LibrarySpecificUnit(
(unit as CompilationUnitElementImpl).librarySource,
unit.source)))
};
}

View file

@ -597,7 +597,7 @@ class WorkItem {
* or `null` if all of the inputs have been collected and the task can be
* created.
*/
TaskInputBuilder builder;
TopLevelTaskInputBuilder builder;
/**
* The [TargetedResult]s outputs of this task depends on.

View file

@ -166,7 +166,7 @@ class DartScriptsTask extends SourceBasedAnalysisTask {
* input descriptors describing those inputs for a task with the
* given [target].
*/
static Map<String, TaskInput> buildInputs(Source target) {
static Map<String, TaskInput> buildInputs(AnalysisTarget target) {
return <String, TaskInput>{DOCUMENT_INPUT: HTML_DOCUMENT.of(target)};
}
@ -237,7 +237,7 @@ class HtmlErrorsTask extends SourceBasedAnalysisTask {
* input descriptors describing those inputs for a task with the
* given [target].
*/
static Map<String, TaskInput> buildInputs(Source target) {
static Map<String, TaskInput> buildInputs(AnalysisTarget target) {
EnginePlugin enginePlugin = AnalysisEngine.instance.enginePlugin;
Map<String, TaskInput> inputs = <String, TaskInput>{
DART_ERRORS_INPUT: DART_SCRIPTS.of(target).toListOf(DART_ERRORS)
@ -342,7 +342,7 @@ class ParseHtmlTask extends SourceBasedAnalysisTask {
* input descriptors describing those inputs for a task with the given
* [source].
*/
static Map<String, TaskInput> buildInputs(Source source) {
static Map<String, TaskInput> buildInputs(AnalysisTarget source) {
return <String, TaskInput>{CONTENT_INPUT_NAME: CONTENT.of(source)};
}

View file

@ -39,7 +39,7 @@ class ConstantTaskInput<V> extends TaskInputImpl<V> {
* A [TaskInputBuilder] used to build an input based on a [ConstantTaskInput].
*/
class ConstantTaskInputBuilder<V> implements TaskInputBuilder<V> {
final ConstantTaskInput input;
final ConstantTaskInput<V> input;
ConstantTaskInputBuilder(this.input);
@ -159,7 +159,7 @@ class ListToFlattenListTaskInputBuilder<B, E>
@override
void _addResultElement(B baseElement, E resultElement) {
_resultValue.addAll(resultElement as Iterable);
_resultValue.addAll(resultElement as Iterable<E>);
}
@override
@ -418,6 +418,8 @@ class MapToFlattenListTaskInputBuilder<K, V, E>
class ObjectToListTaskInput<E> extends TaskInputImpl<List<E>>
with ListTaskInputMixin<E>
implements ListTaskInput<E> {
// TODO(brianwilkerson) Add another type parameter to this class that can be
// used as the type of the keys of [mapper].
/**
* The input used to compute the value to be mapped.
*/

View file

@ -168,36 +168,36 @@ class ErrorFilterOptionValidator extends OptionsValidator {
@override
void validate(ErrorReporter reporter, Map<String, YamlNode> options) {
var analyzer = options[AnalyzerOptions.analyzer];
if (analyzer is! YamlMap) {
return;
}
var filters = analyzer[AnalyzerOptions.errors];
if (filters is YamlMap) {
String value;
filters.nodes.forEach((k, v) {
if (k is YamlScalar) {
value = toUpperCase(k.value);
if (!errorCodes.contains(value)) {
reporter.reportErrorForSpan(
AnalysisOptionsWarningCode.UNRECOGNIZED_ERROR_CODE,
k.span,
[k.value?.toString()]);
if (analyzer is YamlMap) {
var filters = analyzer[AnalyzerOptions.errors];
if (filters is YamlMap) {
String value;
filters.nodes.forEach((k, v) {
if (k is YamlScalar) {
value = toUpperCase(k.value);
if (!errorCodes.contains(value)) {
reporter.reportErrorForSpan(
AnalysisOptionsWarningCode.UNRECOGNIZED_ERROR_CODE,
k.span,
[k.value?.toString()]);
}
}
}
if (v is YamlScalar) {
value = toLowerCase(v.value);
if (!legalValues.contains(value)) {
reporter.reportErrorForSpan(
AnalysisOptionsWarningCode.UNSUPPORTED_OPTION_WITH_LEGAL_VALUES,
v.span, [
AnalyzerOptions.errors,
v.value?.toString(),
legalValueString
]);
if (v is YamlScalar) {
value = toLowerCase(v.value);
if (!legalValues.contains(value)) {
reporter.reportErrorForSpan(
AnalysisOptionsWarningCode
.UNSUPPORTED_OPTION_WITH_LEGAL_VALUES,
v.span,
[
AnalyzerOptions.errors,
v.value?.toString(),
legalValueString
]);
}
}
}
});
});
}
}
}
}
@ -255,7 +255,7 @@ class GenerateOptionsErrorsTask extends SourceBasedAnalysisTask {
/// Return a map from the names of the inputs of this kind of task to the
/// task input descriptors describing those inputs for a task with the
/// given [target].
static Map<String, TaskInput> buildInputs(Source source) =>
static Map<String, TaskInput> buildInputs(AnalysisTarget source) =>
<String, TaskInput>{CONTENT_INPUT_NAME: CONTENT.of(source)};
/// Compute [LineInfo] for the given [content].
@ -289,31 +289,29 @@ class LanguageOptionValidator extends OptionsValidator {
@override
void validate(ErrorReporter reporter, Map<String, YamlNode> options) {
var analyzer = options[AnalyzerOptions.analyzer];
if (analyzer is! YamlMap) {
return;
}
var language = analyzer[AnalyzerOptions.language];
if (language is YamlMap) {
language.nodes.forEach((k, v) {
String key, value;
bool validKey = false;
if (k is YamlScalar) {
key = k.value?.toString();
if (!AnalyzerOptions.languageOptions.contains(key)) {
builder.reportError(reporter, AnalyzerOptions.language, k);
} else {
// If we have a valid key, go on and check the value.
validKey = true;
if (analyzer is YamlMap) {
var language = analyzer[AnalyzerOptions.language];
if (language is YamlMap) {
language.nodes.forEach((k, v) {
String key, value;
bool validKey = false;
if (k is YamlScalar) {
key = k.value?.toString();
if (!AnalyzerOptions.languageOptions.contains(key)) {
builder.reportError(reporter, AnalyzerOptions.language, k);
} else {
// If we have a valid key, go on and check the value.
validKey = true;
}
}
}
if (validKey && v is YamlScalar) {
value = toLowerCase(v.value);
if (!AnalyzerOptions.trueOrFalse.contains(value)) {
trueOrFalseBuilder.reportError(reporter, key, v);
if (validKey && v is YamlScalar) {
value = toLowerCase(v.value);
if (!AnalyzerOptions.trueOrFalse.contains(value)) {
trueOrFalseBuilder.reportError(reporter, key, v);
}
}
}
});
});
}
}
}
}
@ -352,16 +350,14 @@ class StrongModeOptionValueValidator extends OptionsValidator {
@override
void validate(ErrorReporter reporter, Map<String, YamlNode> options) {
var analyzer = options[AnalyzerOptions.analyzer];
if (analyzer is! YamlMap) {
return;
}
var v = analyzer.nodes[AnalyzerOptions.strong_mode];
if (v is YamlScalar) {
var value = toLowerCase(v.value);
if (!AnalyzerOptions.trueOrFalse.contains(value)) {
trueOrFalseBuilder.reportError(
reporter, AnalyzerOptions.strong_mode, v);
if (analyzer is YamlMap) {
var v = analyzer.nodes[AnalyzerOptions.strong_mode];
if (v is YamlScalar) {
var value = toLowerCase(v.value);
if (!AnalyzerOptions.trueOrFalse.contains(value)) {
trueOrFalseBuilder.reportError(
reporter, AnalyzerOptions.strong_mode, v);
}
}
}
}
@ -431,17 +427,16 @@ class _OptionsProcessor {
return;
}
var analyzer = optionMap[AnalyzerOptions.analyzer];
if (analyzer is! Map) {
return;
if (analyzer is Map) {
// Process strong mode option.
var strongMode = analyzer[AnalyzerOptions.strong_mode];
if (strongMode is bool) {
options.strongMode = strongMode;
}
// Process language options.
var language = analyzer[AnalyzerOptions.language];
_applyLanguageOptions(options, language);
}
// Process strong mode option.
var strongMode = analyzer[AnalyzerOptions.strong_mode];
if (strongMode is bool) {
options.strongMode = strongMode;
}
// Process language options.
var language = analyzer[AnalyzerOptions.language];
_applyLanguageOptions(options, language);
}
/// Configure [context] based on the given [options] (which can be `null`
@ -452,21 +447,19 @@ class _OptionsProcessor {
}
var analyzer = options[AnalyzerOptions.analyzer];
if (analyzer is! Map) {
return;
if (analyzer is Map) {
// Set strong mode (default is false).
var strongMode = analyzer[AnalyzerOptions.strong_mode];
setStrongMode(context, strongMode);
// Set filters.
var filters = analyzer[AnalyzerOptions.errors];
setProcessors(context, filters);
// Process language options.
var language = analyzer[AnalyzerOptions.language];
setLanguageOptions(context, language);
}
// Set strong mode (default is false).
var strongMode = analyzer[AnalyzerOptions.strong_mode];
setStrongMode(context, strongMode);
// Set filters.
var filters = analyzer[AnalyzerOptions.errors];
setProcessors(context, filters);
// Process language options.
var language = analyzer[AnalyzerOptions.language];
setLanguageOptions(context, language);
}
void setLanguageOption(

View file

@ -100,7 +100,7 @@ class ParseYamlTask extends SourceBasedAnalysisTask {
* input descriptors describing those inputs for a task with the given
* [source].
*/
static Map<String, TaskInput> buildInputs(Source source) {
static Map<String, TaskInput> buildInputs(AnalysisTarget source) {
return <String, TaskInput>{CONTENT_INPUT_NAME: CONTENT.of(source)};
}

View file

@ -3518,7 +3518,7 @@ class B = Object with A {}''',
void test_expressionList_multiple_end() {
List<Expression> result = parse4("parseExpressionList", ", 2, 3, 4",
[ParserErrorCode.MISSING_IDENTIFIER]);
[ParserErrorCode.MISSING_IDENTIFIER]) as List<Expression>;
expect(result, hasLength(4));
Expression syntheticExpression = result[0];
EngineTestCase.assertInstanceOf((obj) => obj is SimpleIdentifier,
@ -3528,7 +3528,7 @@ class B = Object with A {}''',
void test_expressionList_multiple_middle() {
List<Expression> result = parse4("parseExpressionList", "1, 2, , 4",
[ParserErrorCode.MISSING_IDENTIFIER]);
[ParserErrorCode.MISSING_IDENTIFIER]) as List<Expression>;
expect(result, hasLength(4));
Expression syntheticExpression = result[2];
EngineTestCase.assertInstanceOf((obj) => obj is SimpleIdentifier,
@ -3538,7 +3538,7 @@ class B = Object with A {}''',
void test_expressionList_multiple_start() {
List<Expression> result = parse4("parseExpressionList", "1, 2, 3,",
[ParserErrorCode.MISSING_IDENTIFIER]);
[ParserErrorCode.MISSING_IDENTIFIER]) as List<Expression>;
expect(result, hasLength(4));
Expression syntheticExpression = result[3];
EngineTestCase.assertInstanceOf((obj) => obj is SimpleIdentifier,
@ -6085,7 +6085,8 @@ class SimpleParserTest extends ParserTestCase {
}
void test_parseCombinators_h() {
List<Combinator> combinators = parse4("parseCombinators", "hide a;");
List<Combinator> combinators =
parse4("parseCombinators", "hide a;") as List<Combinator>;
expect(combinators, hasLength(1));
HideCombinator combinator = combinators[0] as HideCombinator;
expect(combinator, isNotNull);
@ -6094,7 +6095,8 @@ class SimpleParserTest extends ParserTestCase {
}
void test_parseCombinators_hs() {
List<Combinator> combinators = parse4("parseCombinators", "hide a show b;");
List<Combinator> combinators =
parse4("parseCombinators", "hide a show b;") as List<Combinator>;
expect(combinators, hasLength(2));
HideCombinator hideCombinator = combinators[0] as HideCombinator;
expect(hideCombinator, isNotNull);
@ -6108,12 +6110,14 @@ class SimpleParserTest extends ParserTestCase {
void test_parseCombinators_hshs() {
List<Combinator> combinators =
parse4("parseCombinators", "hide a show b hide c show d;");
parse4("parseCombinators", "hide a show b hide c show d;")
as List<Combinator>;
expect(combinators, hasLength(4));
}
void test_parseCombinators_s() {
List<Combinator> combinators = parse4("parseCombinators", "show a;");
List<Combinator> combinators =
parse4("parseCombinators", "show a;") as List<Combinator>;
expect(combinators, hasLength(1));
ShowCombinator combinator = combinators[0] as ShowCombinator;
expect(combinator, isNotNull);
@ -6274,7 +6278,8 @@ void''');
TokenType.MULTI_LINE_COMMENT, "/** xxx [a] yyy [bb] zzz */", 3);
List<DocumentationCommentToken> tokens = <DocumentationCommentToken>[token];
List<CommentReference> references =
parse("parseCommentReferences", <Object>[tokens], "");
parse("parseCommentReferences", <Object>[tokens], "")
as List<CommentReference>;
List<Token> tokenReferences = token.references;
expect(references, hasLength(2));
expect(tokenReferences, hasLength(2));
@ -6306,7 +6311,8 @@ void''');
TokenType.MULTI_LINE_COMMENT, "/** [ some text", 5)
];
List<CommentReference> references =
parse("parseCommentReferences", <Object>[tokens], "");
parse("parseCommentReferences", <Object>[tokens], "")
as List<CommentReference>;
expect(references, hasLength(1));
CommentReference reference = references[0];
expect(reference, isNotNull);
@ -6321,7 +6327,8 @@ void''');
TokenType.MULTI_LINE_COMMENT, "/** [namePrefix some text", 5)
];
List<CommentReference> references =
parse("parseCommentReferences", <Object>[tokens], "");
parse("parseCommentReferences", <Object>[tokens], "")
as List<CommentReference>;
expect(references, hasLength(1));
CommentReference reference = references[0];
expect(reference, isNotNull);
@ -6338,7 +6345,8 @@ void''');
TokenType.SINGLE_LINE_COMMENT, "/// x [c]", 28)
];
List<CommentReference> references =
parse("parseCommentReferences", <Object>[tokens], "");
parse("parseCommentReferences", <Object>[tokens], "")
as List<CommentReference>;
expect(references, hasLength(3));
CommentReference reference = references[0];
expect(reference, isNotNull);
@ -6360,7 +6368,8 @@ void''');
"/**\n * a[i]\n * non-code line\n */", 3)
];
List<CommentReference> references =
parse("parseCommentReferences", <Object>[tokens], "");
parse("parseCommentReferences", <Object>[tokens], "")
as List<CommentReference>;
expect(references, isEmpty);
}
@ -6372,7 +6381,8 @@ void''');
TokenType.SINGLE_LINE_COMMENT, "/// a[i] == b[i]", 0)
];
List<CommentReference> references =
parse("parseCommentReferences", <Object>[tokens], "");
parse("parseCommentReferences", <Object>[tokens], "")
as List<CommentReference>;
expect(references, isEmpty);
}
@ -6382,7 +6392,8 @@ void''');
TokenType.MULTI_LINE_COMMENT, "/** [:xxx [a] yyy:] [b] zzz */", 3)
];
List<CommentReference> references =
parse("parseCommentReferences", <Object>[tokens], "");
parse("parseCommentReferences", <Object>[tokens], "")
as List<CommentReference>;
expect(references, hasLength(1));
CommentReference reference = references[0];
expect(reference, isNotNull);
@ -6396,7 +6407,8 @@ void''');
TokenType.MULTI_LINE_COMMENT, "/** `a[i]` and [b] */", 0)
];
List<CommentReference> references =
parse("parseCommentReferences", <Object>[tokens], "");
parse("parseCommentReferences", <Object>[tokens], "")
as List<CommentReference>;
expect(references, hasLength(1));
CommentReference reference = references[0];
expect(reference, isNotNull);
@ -6410,7 +6422,8 @@ void''');
TokenType.MULTI_LINE_COMMENT, "/** `a[i] and [b] */", 0)
];
List<CommentReference> references =
parse("parseCommentReferences", <Object>[tokens], "");
parse("parseCommentReferences", <Object>[tokens], "")
as List<CommentReference>;
expect(references, hasLength(2));
}
@ -6420,7 +6433,8 @@ void''');
"/**\n * a[i]\n * xxx [i] zzz\n */", 3)
];
List<CommentReference> references =
parse("parseCommentReferences", <Object>[tokens], "");
parse("parseCommentReferences", <Object>[tokens], "")
as List<CommentReference>;
expect(references, hasLength(1));
CommentReference reference = references[0];
expect(reference, isNotNull);
@ -6434,7 +6448,8 @@ void''');
"/** [a]: http://www.google.com (Google) [b] zzz */", 3)
];
List<CommentReference> references =
parse("parseCommentReferences", <Object>[tokens], "");
parse("parseCommentReferences", <Object>[tokens], "")
as List<CommentReference>;
expect(references, hasLength(1));
CommentReference reference = references[0];
expect(reference, isNotNull);
@ -6448,7 +6463,8 @@ void''');
"/** [a](http://www.google.com) [b] zzz */", 3)
];
List<CommentReference> references =
parse("parseCommentReferences", <Object>[tokens], "");
parse("parseCommentReferences", <Object>[tokens], "")
as List<CommentReference>;
expect(references, hasLength(1));
CommentReference reference = references[0];
expect(reference, isNotNull);
@ -6462,7 +6478,8 @@ void''');
TokenType.MULTI_LINE_COMMENT, "/** [a][c] [b] zzz */", 3)
];
List<CommentReference> references =
parse("parseCommentReferences", <Object>[tokens], "");
parse("parseCommentReferences", <Object>[tokens], "")
as List<CommentReference>;
expect(references, hasLength(1));
CommentReference reference = references[0];
expect(reference, isNotNull);
@ -7423,12 +7440,14 @@ void''');
}
void test_parseExpressionList_multiple() {
List<Expression> result = parse4("parseExpressionList", "1, 2, 3");
List<Expression> result =
parse4("parseExpressionList", "1, 2, 3") as List<Expression>;
expect(result, hasLength(3));
}
void test_parseExpressionList_single() {
List<Expression> result = parse4("parseExpressionList", "1");
List<Expression> result =
parse4("parseExpressionList", "1") as List<Expression>;
expect(result, hasLength(1));
}
@ -8383,12 +8402,14 @@ void''');
}
void test_parseIdentifierList_multiple() {
List<SimpleIdentifier> list = parse4("parseIdentifierList", "a, b, c");
List<SimpleIdentifier> list =
parse4("parseIdentifierList", "a, b, c") as List<SimpleIdentifier>;
expect(list, hasLength(3));
}
void test_parseIdentifierList_single() {
List<SimpleIdentifier> list = parse4("parseIdentifierList", "a");
List<SimpleIdentifier> list =
parse4("parseIdentifierList", "a") as List<SimpleIdentifier>;
expect(list, hasLength(1));
}