Improve type parameter recovery

This CL replaces several calls to parseTypeVariablesOpt with calls
to computeTypeParam with improved recovery for missing '>'.

In addition, this CL
* moves isOneOfOrEof to util.dart
* moves insertSyntheticIdentifierAfter into TokenStreamRewriter

Change-Id: I48080809d156dd98ccfed7aa2f67be254f4da201
Reviewed-on: https://dart-review.googlesource.com/55940
Reviewed-by: Brian Wilkerson <brianwilkerson@google.com>
Commit-Queue: Dan Rubel <danrubel@google.com>
This commit is contained in:
Dan Rubel 2018-05-20 12:02:56 +00:00 committed by commit-bot@chromium.org
parent 3e7eef12d4
commit 78cc8a13a2
7 changed files with 304 additions and 62 deletions

View file

@ -22,6 +22,66 @@ main() {
*/
@reflectiveTest
class AngleBracketsTest extends AbstractRecoveryTest {
void test_typeParameters_extraGt() {
testRecovery('''
f<T>>() => null;
''', [
ParserErrorCode.TOP_LEVEL_OPERATOR,
ParserErrorCode.MISSING_FUNCTION_PARAMETERS,
ParserErrorCode.MISSING_FUNCTION_BODY
], '''
f<T> > () => null;
''', expectedErrorsInValidCode: [
ParserErrorCode.TOP_LEVEL_OPERATOR,
ParserErrorCode.MISSING_FUNCTION_PARAMETERS,
ParserErrorCode.MISSING_FUNCTION_BODY
]);
}
void test_typeParameters_funct() {
testRecovery('''
f<T extends Function()() => null;
''', [ParserErrorCode.EXPECTED_TOKEN], '''
f<T extends Function()>() => null;
''');
}
void test_typeParameters_funct2() {
testRecovery('''
f<T extends Function<X>()() => null;
''', [ParserErrorCode.EXPECTED_TOKEN], '''
f<T extends Function<X>()>() => null;
''');
}
void test_typeParameters_gtEq() {
testRecovery('''
f<T>=() => null;
''', [
ParserErrorCode.MISSING_FUNCTION_PARAMETERS,
ParserErrorCode.MISSING_FUNCTION_BODY
], '''
f<T> = () => null;
''', expectedErrorsInValidCode: [
ParserErrorCode.MISSING_FUNCTION_PARAMETERS,
ParserErrorCode.MISSING_FUNCTION_BODY
]);
}
void test_typeParameters_gtGtEq() {
testRecovery('''
f<T extends List<int>>=() => null;
''', [
ParserErrorCode.MISSING_FUNCTION_PARAMETERS,
ParserErrorCode.MISSING_FUNCTION_BODY
], '''
f<T extends List<int>> = () => null;
''', expectedErrorsInValidCode: [
ParserErrorCode.MISSING_FUNCTION_PARAMETERS,
ParserErrorCode.MISSING_FUNCTION_BODY
]);
}
@failingTest
void test_typeArguments_inner_last() {
// Parser crashes
@ -49,6 +109,22 @@ Map<List<int>, List<String>> _s_;
List<int
''', [ScannerErrorCode.EXPECTED_TOKEN], '''
List<int> _s_;
''');
}
void test_typeParameters_last() {
testRecovery('''
f<T() => null;
''', [ParserErrorCode.EXPECTED_TOKEN], '''
f<T>() => null;
''');
}
void test_typeParameters_outer_last() {
testRecovery('''
f<T extends List<int>() => null;
''', [ParserErrorCode.EXPECTED_TOKEN], '''
f<T extends List<int>>() => null;
''');
}
}

View file

@ -12,10 +12,9 @@ import 'identifier_context.dart';
import 'parser.dart' show Parser;
import 'type_info.dart'
show insertSyntheticIdentifierAfter, isValidTypeReference;
import 'type_info.dart' show isValidTypeReference;
import 'util.dart' show optional;
import 'util.dart' show isOneOfOrEof, optional;
/// See [IdentifierContext.classOrNamedMixinDeclaration].
class ClassOrNamedMixinIdentifierContext extends IdentifierContext {
@ -46,7 +45,7 @@ class ClassOrNamedMixinIdentifierContext extends IdentifierContext {
if (!identifier.isKeywordOrIdentifier) {
// When in doubt, consume the token to ensure we make progress
// but insert a synthetic identifier to satisfy listeners.
identifier = insertSyntheticIdentifierAfter(identifier, parser);
identifier = parser.rewriter.insertSyntheticIdentifier(identifier);
}
}
return identifier;
@ -88,7 +87,7 @@ class DottedNameIdentifierContext extends IdentifierContext {
if (!identifier.isKeywordOrIdentifier) {
// When in doubt, consume the token to ensure we make progress
// but insert a synthetic identifier to satisfy listeners.
identifier = insertSyntheticIdentifierAfter(identifier, parser);
identifier = parser.rewriter.insertSyntheticIdentifier(identifier);
}
}
return identifier;
@ -123,7 +122,7 @@ class EnumDeclarationIdentifierContext extends IdentifierContext {
if (!identifier.isKeywordOrIdentifier) {
// When in doubt, consume the token to ensure we make progress
// but insert a synthetic identifier to satisfy listeners.
identifier = insertSyntheticIdentifierAfter(identifier, parser);
identifier = parser.rewriter.insertSyntheticIdentifier(identifier);
}
}
return identifier;
@ -148,11 +147,11 @@ class EnumValueDeclarationIdentifierContext extends IdentifierContext {
identifier, fasta.templateExpectedIdentifier);
if (looksLikeStartOfNextTopLevelDeclaration(identifier) ||
isOneOfOrEof(identifier, const [',', '}'])) {
return insertSyntheticIdentifierAfter(token, parser);
return parser.rewriter.insertSyntheticIdentifier(token);
} else if (!identifier.isKeywordOrIdentifier) {
// When in doubt, consume the token to ensure we make progress
// but insert a synthetic identifier to satisfy listeners.
return insertSyntheticIdentifierAfter(identifier, parser);
return parser.rewriter.insertSyntheticIdentifier(identifier);
}
return identifier;
}
@ -204,7 +203,7 @@ class ExpressionIdentifierContext extends IdentifierContext {
identifier = token.next;
}
// Insert a synthetic identifier to satisfy listeners.
return insertSyntheticIdentifierAfter(token, parser);
return parser.rewriter.insertSyntheticIdentifier(token);
}
}
@ -257,7 +256,7 @@ class FieldInitializerIdentifierContext extends IdentifierContext {
parser.reportRecoverableErrorWithToken(
identifier, fasta.templateExpectedIdentifier);
// Insert a synthetic identifier to satisfy listeners.
return insertSyntheticIdentifierAfter(token, parser);
return parser.rewriter.insertSyntheticIdentifier(token);
}
}
@ -291,7 +290,7 @@ class ImportPrefixIdentifierContext extends IdentifierContext {
if (!identifier.isKeywordOrIdentifier) {
// When in doubt, consume the token to ensure we make progress
// but insert a synthetic identifier to satisfy listeners.
identifier = insertSyntheticIdentifierAfter(identifier, parser);
identifier = parser.rewriter.insertSyntheticIdentifier(identifier);
}
}
return identifier;
@ -328,7 +327,7 @@ class LocalFunctionDeclarationIdentifierContext extends IdentifierContext {
if (!identifier.isKeywordOrIdentifier) {
// When in doubt, consume the token to ensure we make progress
// but insert a synthetic identifier to satisfy listeners.
identifier = insertSyntheticIdentifierAfter(identifier, parser);
identifier = parser.rewriter.insertSyntheticIdentifier(identifier);
}
}
return identifier;
@ -372,7 +371,7 @@ class LibraryIdentifierContext extends IdentifierContext {
if (!identifier.isKeywordOrIdentifier) {
// When in doubt, consume the token to ensure we make progress
// but insert a synthetic identifier to satisfy listeners.
identifier = insertSyntheticIdentifierAfter(identifier, parser);
identifier = parser.rewriter.insertSyntheticIdentifier(identifier);
}
}
return identifier;
@ -404,7 +403,7 @@ class LocalVariableDeclarationIdentifierContext extends IdentifierContext {
if (!identifier.isKeywordOrIdentifier) {
// When in doubt, consume the token to ensure we make progress
// but insert a synthetic identifier to satisfy listeners.
identifier = insertSyntheticIdentifierAfter(identifier, parser);
identifier = parser.rewriter.insertSyntheticIdentifier(identifier);
}
}
return identifier;
@ -493,7 +492,7 @@ class TopLevelDeclarationIdentifierContext extends IdentifierContext {
if (!identifier.isKeywordOrIdentifier) {
// When in doubt, consume the token to ensure we make progress
// but insert a synthetic identifier to satisfy listeners.
identifier = insertSyntheticIdentifierAfter(identifier, parser);
identifier = parser.rewriter.insertSyntheticIdentifier(identifier);
}
}
return identifier;
@ -534,7 +533,7 @@ class TypedefDeclarationIdentifierContext extends IdentifierContext {
if (!identifier.isKeywordOrIdentifier) {
// When in doubt, consume the token to ensure we make progress
// but insert a synthetic identifier to satisfy listeners.
identifier = insertSyntheticIdentifierAfter(identifier, parser);
identifier = parser.rewriter.insertSyntheticIdentifier(identifier);
}
}
return identifier;
@ -586,7 +585,7 @@ class TypeReferenceIdentifierContext extends IdentifierContext {
next = token.next;
}
// Insert a synthetic identifier to satisfy listeners.
return insertSyntheticIdentifierAfter(token, parser);
return parser.rewriter.insertSyntheticIdentifier(token);
}
}
@ -612,11 +611,11 @@ class TypeVariableDeclarationIdentifierContext extends IdentifierContext {
looksLikeStartOfNextClassMember(identifier) ||
looksLikeStartOfNextStatement(identifier) ||
isOneOfOrEof(identifier, followingValues)) {
identifier = insertSyntheticIdentifierAfter(token, parser);
identifier = parser.rewriter.insertSyntheticIdentifier(token);
} else {
// When in doubt, consume the token to ensure we make progress
// but insert a synthetic identifier to satisfy listeners.
identifier = insertSyntheticIdentifierAfter(identifier, parser);
identifier = parser.rewriter.insertSyntheticIdentifier(identifier);
}
return identifier;
}
@ -634,15 +633,6 @@ void checkAsyncAwaitYieldAsIdentifier(Token identifier, Parser parser) {
}
}
bool isOneOfOrEof(Token token, Iterable<String> followingValues) {
for (String tokenValue in followingValues) {
if (optional(tokenValue, token)) {
return true;
}
}
return token.isEof;
}
bool looksLikeStartOfNextClassMember(Token token) =>
token.isModifier || isOneOfOrEof(token, const ['get', 'set', 'void']);

View file

@ -89,6 +89,7 @@ import 'type_info.dart'
TypeParamOrArgInfo,
computeMethodTypeArguments,
computeType,
computeTypeParam,
computeTypeParamOrArg,
isGeneralizedFunctionType,
isValidTypeReference,
@ -501,8 +502,11 @@ class Parser {
// and continue parsing as a top level function.
rewriter.insertTokenAfter(
next,
new SyntheticStringToken(TokenType.IDENTIFIER,
'#synthetic_function_${next.charOffset}', token.charOffset, 0));
new SyntheticStringToken(
TokenType.IDENTIFIER,
'#synthetic_function_${next.charOffset}',
next.next.charOffset,
0));
return parseTopLevelMemberImpl(next);
}
// Ignore any preceding modifiers and just report the unexpected token
@ -1354,7 +1358,8 @@ class Parser {
Token endInlineFunctionType;
if (beforeInlineFunctionType != null) {
endInlineFunctionType = parseTypeVariablesOpt(beforeInlineFunctionType);
endInlineFunctionType = computeTypeParamOrArg(beforeInlineFunctionType)
.parseVariables(beforeInlineFunctionType, this);
listener.beginFunctionTypedFormalParameter(beforeInlineFunctionType.next);
token = typeInfo.parseType(beforeType, this);
endInlineFunctionType = parseFormalParametersRequiredOpt(
@ -1654,7 +1659,7 @@ class Parser {
assert(optional('class', token));
Token name =
ensureIdentifier(token, IdentifierContext.classOrNamedMixinDeclaration);
token = parseTypeVariablesOpt(name);
token = computeTypeParam(name).parseVariables(name, this);
if (optional('=', token.next)) {
listener.beginNamedMixinApplication(begin, abstractToken, name);
return parseNamedMixinApplication(token, begin, classKeyword);
@ -2677,7 +2682,7 @@ class Parser {
Token token;
bool isGetter = false;
if (getOrSet == null) {
token = parseTypeVariablesOpt(name);
token = computeTypeParam(name).parseVariables(name, this);
} else {
isGetter = optional("get", getOrSet);
token = name;

View file

@ -2,8 +2,16 @@
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
import 'package:front_end/src/fasta/util/link.dart';
import '../../scanner/token.dart'
show BeginToken, SimpleToken, Token, TokenType;
show
BeginToken,
SimpleToken,
SyntheticStringToken,
SyntheticToken,
Token,
TokenType;
import 'util.dart' show optional;
@ -33,6 +41,61 @@ class TokenStreamRewriter {
/// Initialize a newly created re-writer.
TokenStreamRewriter();
/// For every `<` in [unbalancedLt] append a synthetic `>`.
/// Return the first `<`.
///
/// [unbalancedLt] is a collection of `<` without closing `>` in the reverse
/// order from which they were encountered in the token stream.
Token balanceLt(final Token beforeLt, Token end, Link<Token> unbalancedLt) {
assert(optional('<', beforeLt.next));
assert(unbalancedLt.isNotEmpty);
BeginToken lt;
while (unbalancedLt.isNotEmpty) {
lt = unbalancedLt.head;
unbalancedLt = unbalancedLt.tail;
Token next = end.next;
Token gt;
if (optional('>', next)) {
gt = next;
} else if (optional('>>', next)) {
gt = new SimpleToken(TokenType.GT, next.charOffset)
..setNext(new SimpleToken(TokenType.GT, next.charOffset + 1)
..setNext(next.next));
} else if (optional('>=', next)) {
gt = new SimpleToken(TokenType.GT, next.charOffset)
..setNext(new SimpleToken(TokenType.EQ, next.charOffset + 1)
..setNext(next.next));
} else if (optional('>>=', next)) {
gt = new SimpleToken(TokenType.GT, next.charOffset)
..setNext(new SimpleToken(TokenType.GT, next.charOffset + 1)
..setNext(new SimpleToken(TokenType.EQ, next.charOffset + 2)
..setNext(next.next)));
} else {
gt = new SyntheticToken(TokenType.GT, next.charOffset)..setNext(next);
}
lt.endGroup = gt;
end.setNext(gt);
end = gt;
}
assert(beforeLt.next == lt);
return lt;
}
/// Insert a synthetic identifier after [token] and return the new identifier.
Token insertSyntheticIdentifier(Token token) {
Token identifier = new SyntheticStringToken(
TokenType.IDENTIFIER, '', token.next.charOffset, 0)
..setNext(token.next);
// A no-op rewriter could simply return the synthetic identifier here.
token.setNext(identifier);
return identifier;
}
/// Insert the chain of tokens starting at the [insertedToken] immediately
/// after the [previousToken]. Return the [previousToken].
Token insertTokenAfter(Token previousToken, Token insertedToken) {

View file

@ -4,7 +4,7 @@
library fasta.parser.type_info;
import '../../scanner/token.dart' show SyntheticStringToken, Token, TokenType;
import '../../scanner/token.dart' show Token, TokenType;
import '../scanner/token_constants.dart' show IDENTIFIER_TOKEN, KEYWORD_TOKEN;
@ -108,13 +108,6 @@ const TypeParamOrArgInfo noTypeParamOrArg = const NoTypeParamOrArg();
/// `<` identifier `>`.
const TypeParamOrArgInfo simpleTypeArgument1 = const SimpleTypeArgument1();
Token insertSyntheticIdentifierAfter(Token token, Parser parser) {
Token identifier = new SyntheticStringToken(
TokenType.IDENTIFIER, '', token.next.charOffset, 0);
parser.rewriter.insertTokenAfter(token, identifier);
return identifier;
}
bool isGeneralizedFunctionType(Token token) {
return optional('Function', token) &&
(optional('<', token.next) || optional('(', token.next));
@ -261,6 +254,29 @@ TypeInfo computeType(final Token token, bool required, [Token innerEndGroup]) {
return noType;
}
/// Called by the parser to obtain information about a possible group of type
/// parameters that follow [token] in a top level or class member declaration.
/// It assumes that a leading `<` cannot be part of an expression, and thus
/// tries to more aggressively recover given an unmatched '<'
TypeParamOrArgInfo computeTypeParam(Token token) {
if (!optional('<', token.next)) {
return noTypeParamOrArg;
}
TypeParamOrArgInfo typeParam = computeTypeParamOrArgImpl(token);
if (typeParam != noTypeParamOrArg) {
return typeParam;
}
// Recovery
if (token.next.endGroup == null) {
// Attempt to find where the missing `>` should be inserted.
return new ComplexTypeParamOrArgInfo(token).computeRecovery();
} else {
// The `<` `>` are balanced but there's still a problem.
return noTypeParamOrArg;
}
}
/// Called by the parser to obtain information about a possible group of type
/// parameters or type arguments that follow [token].
/// This does not modify the token stream.
@ -269,18 +285,23 @@ TypeInfo computeType(final Token token, bool required, [Token innerEndGroup]) {
/// with `>>`, then then [innerEndGroup] is set to either `>>` if the token
/// has not been split or the first `>` if the `>>` token has been split.
TypeParamOrArgInfo computeTypeParamOrArg(Token token, [Token innerEndGroup]) {
return optional('<', token.next)
? computeTypeParamOrArgImpl(token, innerEndGroup)
: noTypeParamOrArg;
}
TypeParamOrArgInfo computeTypeParamOrArgImpl(Token token,
[Token innerEndGroup]) {
Token next = token.next;
if (!optional('<', next)) {
return noTypeParamOrArg;
}
assert(optional('<', next));
Token endGroup = next.endGroup ?? innerEndGroup;
if (endGroup == null) {
return noTypeParamOrArg;
}
Token identifier = next.next;
// identifier `<` `void` `>` is handled by ComplexTypeInfo.
if (isValidTypeReference(identifier) &&
!optional('void', identifier) &&
// identifier `<` `void` `>` and `<` `dynamic` `>`
// are handled by ComplexTypeInfo.
if ((identifier.kind == IDENTIFIER_TOKEN || identifier.type.isPseudo) &&
identifier.next == endGroup) {
return simpleTypeArgument1;
}

View file

@ -22,7 +22,7 @@ import 'parser.dart' show Parser;
import 'type_info.dart';
import 'util.dart' show optional, skipMetadata;
import 'util.dart' show isOneOf, optional, skipMetadata;
/// See documentation on the [noType] const.
class NoType implements TypeInfo {
@ -35,7 +35,7 @@ class NoType implements TypeInfo {
Token ensureTypeNotVoid(Token token, Parser parser) {
parser.reportRecoverableErrorWithToken(
token.next, fasta.templateExpectedType);
insertSyntheticIdentifierAfter(token, parser);
parser.rewriter.insertSyntheticIdentifier(token);
return simpleType.parseType(token, parser);
}
@ -563,6 +563,10 @@ class ComplexTypeParamOrArgInfo implements TypeParamOrArgInfo {
/// `>>` for the outer group and the token before `>>` for the inner group.
Token end;
/// A collection of `<` without closing `>` in the reverse order from which
/// they were encountered in the token stream.
Link<Token> unbalancedLt = const Link<Token>();
ComplexTypeParamOrArgInfo(Token token)
: assert(optional('<', token.next)),
start = token.next;
@ -618,12 +622,60 @@ class ComplexTypeParamOrArgInfo implements TypeParamOrArgInfo {
return this;
}
/// Parse the tokens and return the receiver or [noTypeParamOrArg] if there
/// are no type parameters or arguments. This does not modify the token
/// stream.
///
/// This is called when parsing type parameters in a top level
/// or class member declaration. It assumes that a leading `<` cannot be part
/// of an expression, and thus tries to more aggressively recover
/// given an unmatched '<'.
///
TypeParamOrArgInfo computeRecovery() {
assert(start.endGroup == null);
unbalancedLt = unbalancedLt.prepend(start);
Token token = start;
Token next = token.next;
while (!next.isEof) {
if (optional('Function', next)) {
next = next.next;
if (optional('<', next)) {
next = skipTypeVariables(next);
if (next == null) {
break;
}
next = next.next;
}
if (optional('(', next)) {
next = next.endGroup;
} else {
break;
}
} else if (optional('<', next)) {
Token endGroup = skipTypeVariables(next);
if (endGroup != null) {
next = endGroup;
} else {
unbalancedLt = unbalancedLt.prepend(next);
}
} else if (!isValidTypeReference(next) &&
!isOneOf(next, const ['.', ',', 'extends', 'super'])) {
break;
}
token = next;
next = token.next;
}
end = token;
return this;
}
@override
Token parseArguments(Token token, Parser parser) {
assert(identical(start, token.next));
Token next = start;
Token innerEndGroup = processBeginGroup(start, parser);
parser.listener.beginTypeArguments(start);
Token begin = balanceLt(token, parser);
Token next = begin;
Token innerEndGroup = processBeginGroup(begin, parser);
parser.listener.beginTypeArguments(begin);
int count = 0;
do {
TypeInfo typeInfo = computeType(next, true, innerEndGroup);
@ -641,17 +693,17 @@ class ComplexTypeParamOrArgInfo implements TypeParamOrArgInfo {
next = token.next;
++count;
} while (optional(',', next));
end = processEndGroup(token, start, parser);
parser.listener.endTypeArguments(count, start, end);
end = processEndGroup(token, begin, parser);
parser.listener.endTypeArguments(count, begin, end);
return end;
}
@override
Token parseVariables(Token token, Parser parser) {
assert(identical(start, token.next));
Token next = start;
Token innerEndGroup = processBeginGroup(start, parser);
parser.listener.beginTypeVariables(start);
Token begin = balanceLt(token, parser);
Token next = begin;
Token innerEndGroup = processBeginGroup(begin, parser);
parser.listener.beginTypeVariables(begin);
int count = 0;
do {
parser.listener.beginTypeVariable(next.next);
@ -671,13 +723,28 @@ class ComplexTypeParamOrArgInfo implements TypeParamOrArgInfo {
parser.listener.endTypeVariable(next, extendsOrSuper);
++count;
} while (optional(',', next));
end = processEndGroup(token, start, parser);
parser.listener.endTypeVariables(count, start, end);
end = processEndGroup(token, begin, parser);
parser.listener.endTypeVariables(count, begin, end);
return end;
}
@override
Token skip(Token token) => end;
/// For every unbalanced `<` append a synthetic `>`. Return the first `<`
Token balanceLt(Token token, Parser parser) {
assert(identical(start, token.next));
if (unbalancedLt.isEmpty) {
return start;
}
Token begin = parser.rewriter.balanceLt(token, end, unbalancedLt);
assert(begin.endGroup != null);
if (begin.endGroup.isSynthetic) {
parser.reportRecoverableError(
begin.endGroup.next, fasta.templateExpectedButGot.withArguments('>'));
}
return begin;
}
}
Token processBeginGroup(BeginToken start, Parser parser) {

View file

@ -39,6 +39,26 @@ int offsetForToken(Token token) {
return token == null ? TreeNode.noOffset : token.offset;
}
/// Return true if the given token matches one of the given values.
bool isOneOf(Token token, Iterable<String> values) {
for (String tokenValue in values) {
if (optional(tokenValue, token)) {
return true;
}
}
return false;
}
/// Return true if the given token matches one of the given values or is EOF.
bool isOneOfOrEof(Token token, Iterable<String> values) {
for (String tokenValue in values) {
if (optional(tokenValue, token)) {
return true;
}
}
return token.isEof;
}
/// A null-aware alternative to `token.length`. If [token] is `null`, returns
/// [noLength].
int lengthForToken(Token token) {