mirror of
https://github.com/dart-lang/sdk
synced 2024-09-15 22:19:49 +00:00
Ensure that partial inference results aren't refined by later partial inference stages.
As part of the implementation of https://github.com/dart-lang/language/issues/731 (improved inference for fold etc.), I expanded the front end's type inference logic so that instead of just having a downward phase and an upward phase, it could have 3 or more phases. The function that previously did downward inference became repurposed to do "partial inference" (which could either be the first, downward stage, or a later, horizontal stage). However, I failed to generalize the logic that prevents types assigned by one inference stage from being refined by later stages--previously this logic was only needed for upward inference, but now it's needed for horizontal inference stages as well. (This logic is needed because of Dart's "runtime checked covariance" behavior--it means that we want to stick with the type from downward inference, even if a later horizontal inference stage is able to find a more precise type, because that more precise type may lead to runtime failures). As part of this change I've re-architected the inference methods so that they are responsible for creating and returning the list of inferred types. This makes the inference logic more similar between the front end and analyzer, and is easier to read IMHO. The total number of list allocations is the same as before. Change-Id: I19bfcede9c2968e50f110b571164549f16495217 Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/243707 Reviewed-by: Chloe Stefantsova <cstefantsova@google.com> Commit-Queue: Paul Berry <paulberry@google.com>
This commit is contained in:
parent
51d578cf91
commit
0d636e5543
|
@ -2030,23 +2030,15 @@ class InferenceVisitor
|
|||
}
|
||||
TypeConstraintGatherer? gatherer;
|
||||
if (inferenceNeeded) {
|
||||
inferredTypes = [const UnknownType()];
|
||||
gatherer = inferrer.typeSchemaEnvironment.setupGenericTypeInference(
|
||||
listType,
|
||||
listClass.typeParameters,
|
||||
typeContext,
|
||||
inferrer.libraryBuilder.library,
|
||||
isConst: node.isConst);
|
||||
inferrer.typeSchemaEnvironment.partialInfer(
|
||||
gatherer,
|
||||
listClass.typeParameters,
|
||||
inferredTypes,
|
||||
inferrer.libraryBuilder.library);
|
||||
inferredTypes = inferrer.typeSchemaEnvironment.partialInfer(gatherer,
|
||||
listClass.typeParameters, null, inferrer.libraryBuilder.library);
|
||||
inferredTypeArgument = inferredTypes[0];
|
||||
if (inferrer.dataForTesting != null) {
|
||||
inferrer.dataForTesting!.typeInferenceResult
|
||||
.inferredTypeArguments[node] = inferredTypes;
|
||||
}
|
||||
} else {
|
||||
inferredTypeArgument = node.typeArgument;
|
||||
}
|
||||
|
@ -2068,11 +2060,15 @@ class InferenceVisitor
|
|||
}
|
||||
if (inferenceNeeded) {
|
||||
gatherer!.constrainArguments(formalTypes!, actualTypes!);
|
||||
inferrer.typeSchemaEnvironment.upwardsInfer(
|
||||
inferredTypes = inferrer.typeSchemaEnvironment.upwardsInfer(
|
||||
gatherer,
|
||||
listClass.typeParameters,
|
||||
inferredTypes!,
|
||||
inferrer.libraryBuilder.library);
|
||||
if (inferrer.dataForTesting != null) {
|
||||
inferrer.dataForTesting!.typeInferenceResult
|
||||
.inferredTypeArguments[node] = inferredTypes;
|
||||
}
|
||||
inferredTypeArgument = inferredTypes[0];
|
||||
inferrer.instrumentation?.record(
|
||||
inferrer.uriForInstrumentation,
|
||||
|
@ -2715,24 +2711,16 @@ class InferenceVisitor
|
|||
}
|
||||
TypeConstraintGatherer? gatherer;
|
||||
if (inferenceNeeded) {
|
||||
inferredTypes = [noInferredType, noInferredType];
|
||||
gatherer = inferrer.typeSchemaEnvironment.setupGenericTypeInference(
|
||||
mapType,
|
||||
mapClass.typeParameters,
|
||||
typeContext,
|
||||
inferrer.libraryBuilder.library,
|
||||
isConst: node.isConst);
|
||||
inferrer.typeSchemaEnvironment.partialInfer(
|
||||
gatherer,
|
||||
mapClass.typeParameters,
|
||||
inferredTypes,
|
||||
inferrer.libraryBuilder.library);
|
||||
inferredTypes = inferrer.typeSchemaEnvironment.partialInfer(gatherer,
|
||||
mapClass.typeParameters, null, inferrer.libraryBuilder.library);
|
||||
inferredKeyType = inferredTypes[0];
|
||||
inferredValueType = inferredTypes[1];
|
||||
if (inferrer.dataForTesting != null) {
|
||||
inferrer.dataForTesting!.typeInferenceResult
|
||||
.inferredTypeArguments[node] = inferredTypes;
|
||||
}
|
||||
} else {
|
||||
inferredKeyType = node.keyType;
|
||||
inferredValueType = node.valueType;
|
||||
|
@ -2796,7 +2784,6 @@ class InferenceVisitor
|
|||
formalTypesForSet.add(setType.typeArguments[0]);
|
||||
}
|
||||
|
||||
List<DartType> inferredTypesForSet = <DartType>[noInferredType];
|
||||
// Note: we don't use the previously created gatherer because it was set
|
||||
// up presuming that the literal would be a map; we now know that it
|
||||
// needs to be a set.
|
||||
|
@ -2807,13 +2794,11 @@ class InferenceVisitor
|
|||
typeContext,
|
||||
inferrer.libraryBuilder.library,
|
||||
isConst: node.isConst);
|
||||
inferrer.typeSchemaEnvironment.partialInfer(
|
||||
gatherer,
|
||||
inferrer.coreTypes.setClass.typeParameters,
|
||||
inferredTypesForSet,
|
||||
inferrer.libraryBuilder.library);
|
||||
List<DartType> inferredTypesForSet = inferrer.typeSchemaEnvironment
|
||||
.partialInfer(gatherer, inferrer.coreTypes.setClass.typeParameters,
|
||||
null, inferrer.libraryBuilder.library);
|
||||
gatherer.constrainArguments(formalTypesForSet, actualTypesForSet!);
|
||||
inferrer.typeSchemaEnvironment.upwardsInfer(
|
||||
inferredTypesForSet = inferrer.typeSchemaEnvironment.upwardsInfer(
|
||||
gatherer,
|
||||
inferrer.coreTypes.setClass.typeParameters,
|
||||
inferredTypesForSet,
|
||||
|
@ -2864,11 +2849,15 @@ class InferenceVisitor
|
|||
replacement);
|
||||
}
|
||||
gatherer!.constrainArguments(formalTypes!, actualTypes!);
|
||||
inferrer.typeSchemaEnvironment.upwardsInfer(
|
||||
inferredTypes = inferrer.typeSchemaEnvironment.upwardsInfer(
|
||||
gatherer,
|
||||
mapClass.typeParameters,
|
||||
inferredTypes!,
|
||||
inferrer.libraryBuilder.library);
|
||||
if (inferrer.dataForTesting != null) {
|
||||
inferrer.dataForTesting!.typeInferenceResult
|
||||
.inferredTypeArguments[node] = inferredTypes;
|
||||
}
|
||||
inferredKeyType = inferredTypes[0];
|
||||
inferredValueType = inferredTypes[1];
|
||||
inferrer.instrumentation?.record(
|
||||
|
@ -5985,23 +5974,15 @@ class InferenceVisitor
|
|||
}
|
||||
TypeConstraintGatherer? gatherer;
|
||||
if (inferenceNeeded) {
|
||||
inferredTypes = [const UnknownType()];
|
||||
gatherer = inferrer.typeSchemaEnvironment.setupGenericTypeInference(
|
||||
setType,
|
||||
setClass.typeParameters,
|
||||
typeContext,
|
||||
inferrer.libraryBuilder.library,
|
||||
isConst: node.isConst);
|
||||
inferrer.typeSchemaEnvironment.partialInfer(
|
||||
gatherer,
|
||||
setClass.typeParameters,
|
||||
inferredTypes,
|
||||
inferrer.libraryBuilder.library);
|
||||
inferredTypes = inferrer.typeSchemaEnvironment.partialInfer(gatherer,
|
||||
setClass.typeParameters, null, inferrer.libraryBuilder.library);
|
||||
inferredTypeArgument = inferredTypes[0];
|
||||
if (inferrer.dataForTesting != null) {
|
||||
inferrer.dataForTesting!.typeInferenceResult
|
||||
.inferredTypeArguments[node] = inferredTypes;
|
||||
}
|
||||
} else {
|
||||
inferredTypeArgument = node.typeArgument;
|
||||
}
|
||||
|
@ -6023,11 +6004,15 @@ class InferenceVisitor
|
|||
}
|
||||
if (inferenceNeeded) {
|
||||
gatherer!.constrainArguments(formalTypes!, actualTypes!);
|
||||
inferrer.typeSchemaEnvironment.upwardsInfer(
|
||||
inferredTypes = inferrer.typeSchemaEnvironment.upwardsInfer(
|
||||
gatherer,
|
||||
setClass.typeParameters,
|
||||
inferredTypes!,
|
||||
inferrer.libraryBuilder.library);
|
||||
if (inferrer.dataForTesting != null) {
|
||||
inferrer.dataForTesting!.typeInferenceResult
|
||||
.inferredTypeArguments[node] = inferredTypes;
|
||||
}
|
||||
inferredTypeArgument = inferredTypes[0];
|
||||
inferrer.instrumentation?.record(
|
||||
inferrer.uriForInstrumentation,
|
||||
|
|
|
@ -978,7 +978,7 @@ class TypeInferrerImpl implements TypeInferrer {
|
|||
typeSchemaEnvironment.setupGenericTypeInference(
|
||||
null, typeParameters, null, libraryBuilder.library);
|
||||
gatherer.constrainArguments([onType], [receiverType]);
|
||||
typeSchemaEnvironment.upwardsInfer(
|
||||
inferredTypes = typeSchemaEnvironment.upwardsInfer(
|
||||
gatherer, typeParameters, inferredTypes, libraryBuilder.library);
|
||||
return inferredTypes;
|
||||
}
|
||||
|
@ -2378,8 +2378,6 @@ class TypeInferrerImpl implements TypeInferrer {
|
|||
: coreTypes.objectLegacyRawType)
|
||||
.substituteType(typeContext);
|
||||
}
|
||||
inferredTypes = new List<DartType>.filled(
|
||||
calleeTypeParameters.length, const UnknownType());
|
||||
gatherer = typeSchemaEnvironment.setupGenericTypeInference(
|
||||
isNonNullableByDefault
|
||||
? calleeType.returnType
|
||||
|
@ -2387,8 +2385,8 @@ class TypeInferrerImpl implements TypeInferrer {
|
|||
calleeTypeParameters,
|
||||
typeContext,
|
||||
libraryBuilder.library);
|
||||
typeSchemaEnvironment.partialInfer(gatherer, calleeTypeParameters,
|
||||
inferredTypes, libraryBuilder.library);
|
||||
inferredTypes = typeSchemaEnvironment.partialInfer(
|
||||
gatherer, calleeTypeParameters, null, libraryBuilder.library);
|
||||
substitution =
|
||||
Substitution.fromPairs(calleeTypeParameters, inferredTypes);
|
||||
} else if (explicitTypeArguments != null &&
|
||||
|
@ -2579,8 +2577,8 @@ class TypeInferrerImpl implements TypeInferrer {
|
|||
: const [])
|
||||
.planReconciliationStages()) {
|
||||
if (gatherer != null && !isFirstStage) {
|
||||
typeSchemaEnvironment.partialInfer(gatherer, calleeTypeParameters,
|
||||
inferredTypes!, libraryBuilder.library);
|
||||
inferredTypes = typeSchemaEnvironment.partialInfer(gatherer,
|
||||
calleeTypeParameters, inferredTypes, libraryBuilder.library);
|
||||
substitution =
|
||||
Substitution.fromPairs(calleeTypeParameters, inferredTypes);
|
||||
}
|
||||
|
@ -2706,8 +2704,8 @@ class TypeInferrerImpl implements TypeInferrer {
|
|||
}
|
||||
|
||||
if (inferenceNeeded) {
|
||||
typeSchemaEnvironment.upwardsInfer(gatherer!, calleeTypeParameters,
|
||||
inferredTypes!, libraryBuilder.library);
|
||||
inferredTypes = typeSchemaEnvironment.upwardsInfer(gatherer!,
|
||||
calleeTypeParameters, inferredTypes!, libraryBuilder.library);
|
||||
assert(inferredTypes.every((type) => isKnown(type)),
|
||||
"Unknown type(s) in inferred types: $inferredTypes.");
|
||||
assert(inferredTypes.every((type) => !hasPromotedTypeVariable(type)),
|
||||
|
@ -4255,7 +4253,7 @@ class TypeInferrerImpl implements TypeInferrer {
|
|||
TypeConstraintGatherer gatherer =
|
||||
typeSchemaEnvironment.setupGenericTypeInference(instantiatedType,
|
||||
typeParameters, context, libraryBuilder.library);
|
||||
typeSchemaEnvironment.upwardsInfer(
|
||||
inferredTypes = typeSchemaEnvironment.upwardsInfer(
|
||||
gatherer, typeParameters, inferredTypes, libraryBuilder.library);
|
||||
Substitution substitution =
|
||||
Substitution.fromPairs(typeParameters, inferredTypes);
|
||||
|
|
|
@ -130,14 +130,14 @@ class TypeSchemaEnvironment extends HierarchyBasedTypeEnvironment
|
|||
|
||||
/// Performs partial (either downwards or horizontal) inference, producing a
|
||||
/// set of inferred types that may contain references to the "unknown type".
|
||||
void partialInfer(
|
||||
List<DartType> partialInfer(
|
||||
TypeConstraintGatherer gatherer,
|
||||
List<TypeParameter> typeParametersToInfer,
|
||||
List<DartType> inferredTypes,
|
||||
List<DartType>? previouslyInferredTypes,
|
||||
Library clientLibrary) =>
|
||||
_chooseTypes(
|
||||
gatherer, typeParametersToInfer, inferredTypes, clientLibrary,
|
||||
downwardsInferPhase: true);
|
||||
_chooseTypes(gatherer, typeParametersToInfer, previouslyInferredTypes,
|
||||
clientLibrary,
|
||||
partial: true);
|
||||
|
||||
@override
|
||||
DartType getTypeOfSpecialCasedBinaryOperator(DartType type1, DartType type2,
|
||||
|
@ -255,26 +255,24 @@ class TypeSchemaEnvironment extends HierarchyBasedTypeEnvironment
|
|||
/// Use the given [constraints] to substitute for type variables.
|
||||
///
|
||||
/// [typeParametersToInfer] is the set of type parameters that should be
|
||||
/// substituted for. [inferredTypes] should be a list of the same length.
|
||||
/// substituted for. [previouslyInferredTypes], if present, should be the set
|
||||
/// of types inferred by the last call to this method; it should be a list of
|
||||
/// the same length.
|
||||
///
|
||||
/// If [downwardsInferPhase] is `true`, then we are in the first pass of
|
||||
/// inference, pushing context types down. This means we are allowed to push
|
||||
/// down `?` to precisely represent an unknown type. [inferredTypes] should
|
||||
/// be initially populated with `?`. These `?`s will be replaced, if
|
||||
/// appropriate, with the types that were inferred by downwards inference.
|
||||
/// If [partial] is `true`, then we not in the final pass of inference. This
|
||||
/// means we are allowed to return `?` to precisely represent an unknown type.
|
||||
///
|
||||
/// If [downwardsInferPhase] is `false`, then we are in the second pass of
|
||||
/// inference, and must not conclude `?` for any type formal. In this pass,
|
||||
/// [inferredTypes] should contain the values from the first pass. They will
|
||||
/// be replaced with the final inferred types.
|
||||
void inferTypeFromConstraints(
|
||||
/// If [partial] is `false`, then we are in the final pass of inference, and
|
||||
/// must not conclude `?` for any type formal.
|
||||
List<DartType> inferTypeFromConstraints(
|
||||
Map<TypeParameter, TypeConstraint> constraints,
|
||||
List<TypeParameter> typeParametersToInfer,
|
||||
List<DartType> inferredTypes,
|
||||
List<DartType>? previouslyInferredTypes,
|
||||
Library clientLibrary,
|
||||
{bool downwardsInferPhase: false}) {
|
||||
List<DartType>? typesFromDownwardsInference =
|
||||
downwardsInferPhase ? null : inferredTypes.toList(growable: false);
|
||||
{bool partial: false}) {
|
||||
List<DartType> inferredTypes =
|
||||
previouslyInferredTypes?.toList(growable: false) ??
|
||||
new List.filled(typeParametersToInfer.length, const UnknownType());
|
||||
|
||||
for (int i = 0; i < typeParametersToInfer.length; i++) {
|
||||
TypeParameter typeParam = typeParametersToInfer[i];
|
||||
|
@ -288,21 +286,25 @@ class TypeSchemaEnvironment extends HierarchyBasedTypeEnvironment
|
|||
}
|
||||
|
||||
TypeConstraint constraint = constraints[typeParam]!;
|
||||
if (downwardsInferPhase) {
|
||||
if (partial) {
|
||||
inferredTypes[i] = _inferTypeParameterFromContext(
|
||||
constraint, extendsConstraint, clientLibrary);
|
||||
previouslyInferredTypes?[i],
|
||||
constraint,
|
||||
extendsConstraint,
|
||||
clientLibrary,
|
||||
isLegacyCovariant: typeParam.isLegacyCovariant);
|
||||
} else {
|
||||
inferredTypes[i] = _inferTypeParameterFromAll(
|
||||
typesFromDownwardsInference![i],
|
||||
previouslyInferredTypes![i],
|
||||
constraint,
|
||||
extendsConstraint,
|
||||
clientLibrary,
|
||||
isContravariant: typeParam.variance == Variance.contravariant,
|
||||
preferUpwardsInference: !typeParam.isLegacyCovariant);
|
||||
isLegacyCovariant: typeParam.isLegacyCovariant);
|
||||
}
|
||||
}
|
||||
|
||||
if (!downwardsInferPhase) {
|
||||
if (!partial) {
|
||||
assert(typeParametersToInfer.length == inferredTypes.length);
|
||||
FreshTypeParameters freshTypeParameters =
|
||||
getFreshTypeParameters(typeParametersToInfer);
|
||||
|
@ -337,6 +339,8 @@ class TypeSchemaEnvironment extends HierarchyBasedTypeEnvironment
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
return inferredTypes;
|
||||
}
|
||||
|
||||
@override
|
||||
|
@ -502,44 +506,48 @@ class TypeSchemaEnvironment extends HierarchyBasedTypeEnvironment
|
|||
|
||||
/// Performs upwards inference, producing a final set of inferred types that
|
||||
/// does not contain references to the "unknown type".
|
||||
void upwardsInfer(
|
||||
List<DartType> upwardsInfer(
|
||||
TypeConstraintGatherer gatherer,
|
||||
List<TypeParameter> typeParametersToInfer,
|
||||
List<DartType> inferredTypes,
|
||||
List<DartType> previouslyInferredTypes,
|
||||
Library clientLibrary) =>
|
||||
_chooseTypes(
|
||||
gatherer, typeParametersToInfer, inferredTypes, clientLibrary,
|
||||
downwardsInferPhase: false);
|
||||
_chooseTypes(gatherer, typeParametersToInfer, previouslyInferredTypes,
|
||||
clientLibrary,
|
||||
partial: false);
|
||||
|
||||
/// Computes (or recomputes) a set of [inferredTypes] based on the constraints
|
||||
/// that have been recorded so far.
|
||||
void _chooseTypes(
|
||||
List<DartType> _chooseTypes(
|
||||
TypeConstraintGatherer gatherer,
|
||||
List<TypeParameter> typeParametersToInfer,
|
||||
List<DartType> inferredTypes,
|
||||
List<DartType>? previouslyInferredTypes,
|
||||
Library clientLibrary,
|
||||
{required bool downwardsInferPhase}) {
|
||||
inferTypeFromConstraints(gatherer.computeConstraints(clientLibrary),
|
||||
typeParametersToInfer, inferredTypes, clientLibrary,
|
||||
downwardsInferPhase: downwardsInferPhase);
|
||||
{required bool partial}) {
|
||||
List<DartType> inferredTypes = inferTypeFromConstraints(
|
||||
gatherer.computeConstraints(clientLibrary),
|
||||
typeParametersToInfer,
|
||||
previouslyInferredTypes,
|
||||
clientLibrary,
|
||||
partial: partial);
|
||||
|
||||
for (int i = 0; i < inferredTypes.length; i++) {
|
||||
inferredTypes[i] = demoteTypeInLibrary(inferredTypes[i], clientLibrary);
|
||||
}
|
||||
return inferredTypes;
|
||||
}
|
||||
|
||||
DartType _inferTypeParameterFromAll(
|
||||
DartType typeFromContextInference,
|
||||
DartType typeFromPreviousInference,
|
||||
TypeConstraint constraint,
|
||||
DartType? extendsConstraint,
|
||||
Library clientLibrary,
|
||||
{bool isContravariant: false,
|
||||
bool preferUpwardsInference: false}) {
|
||||
// See if we already fixed this type from downwards inference.
|
||||
// If so, then we aren't allowed to change it based on argument types unless
|
||||
// [preferUpwardsInference] is true.
|
||||
if (!preferUpwardsInference && isKnown(typeFromContextInference)) {
|
||||
return typeFromContextInference;
|
||||
bool isLegacyCovariant: true}) {
|
||||
// See if we already fixed this type in a previous inference step.
|
||||
// If so, then we aren't allowed to change it unless [isLegacyCovariant] is
|
||||
// false.
|
||||
if (isLegacyCovariant && isKnown(typeFromPreviousInference)) {
|
||||
return typeFromPreviousInference;
|
||||
}
|
||||
|
||||
if (extendsConstraint != null) {
|
||||
|
@ -559,8 +567,21 @@ class TypeSchemaEnvironment extends HierarchyBasedTypeEnvironment
|
|||
isContravariant: isContravariant);
|
||||
}
|
||||
|
||||
DartType _inferTypeParameterFromContext(TypeConstraint constraint,
|
||||
DartType? extendsConstraint, Library clientLibrary) {
|
||||
DartType _inferTypeParameterFromContext(
|
||||
DartType? typeFromPreviousInference,
|
||||
TypeConstraint constraint,
|
||||
DartType? extendsConstraint,
|
||||
Library clientLibrary,
|
||||
{bool isLegacyCovariant: true}) {
|
||||
// See if we already fixed this type in a previous inference step.
|
||||
// If so, then we aren't allowed to change it unless [isLegacyCovariant] is
|
||||
// false.
|
||||
if (isLegacyCovariant &&
|
||||
typeFromPreviousInference != null &&
|
||||
isKnown(typeFromPreviousInference)) {
|
||||
return typeFromPreviousInference;
|
||||
}
|
||||
|
||||
DartType t = solveTypeConstraint(
|
||||
constraint,
|
||||
clientLibrary.isNonNullableByDefault
|
||||
|
|
|
@ -156,10 +156,9 @@ abstract class TypeSchemaEnvironmentTestBase {
|
|||
? null
|
||||
: functionTypeNode.positionalParameters;
|
||||
|
||||
List<DartType> inferredTypeNodes;
|
||||
List<DartType>? inferredTypeNodes;
|
||||
if (inferredTypesFromDownwardPhase == null) {
|
||||
inferredTypeNodes = new List<DartType>.generate(
|
||||
typeParameterNodesToInfer.length, (_) => new UnknownType());
|
||||
inferredTypeNodes = null;
|
||||
} else {
|
||||
inferredTypeNodes = parseTypes(inferredTypesFromDownwardPhase);
|
||||
}
|
||||
|
@ -171,12 +170,12 @@ abstract class TypeSchemaEnvironmentTestBase {
|
|||
returnContextTypeNode,
|
||||
testLibrary);
|
||||
if (formalTypeNodes == null) {
|
||||
typeSchemaEnvironment.partialInfer(gatherer, typeParameterNodesToInfer,
|
||||
inferredTypeNodes, testLibrary);
|
||||
inferredTypeNodes = typeSchemaEnvironment.partialInfer(gatherer,
|
||||
typeParameterNodesToInfer, inferredTypeNodes, testLibrary);
|
||||
} else {
|
||||
gatherer.constrainArguments(formalTypeNodes, actualTypeNodes!);
|
||||
typeSchemaEnvironment.upwardsInfer(gatherer, typeParameterNodesToInfer,
|
||||
inferredTypeNodes, testLibrary);
|
||||
inferredTypeNodes = typeSchemaEnvironment.upwardsInfer(gatherer,
|
||||
typeParameterNodesToInfer, inferredTypeNodes!, testLibrary);
|
||||
}
|
||||
|
||||
assert(
|
||||
|
@ -204,18 +203,15 @@ abstract class TypeSchemaEnvironmentTestBase {
|
|||
TypeConstraint typeConstraint = parseConstraint(constraints);
|
||||
DartType expectedTypeNode = parseType(expected);
|
||||
TypeParameter typeParameterNode = typeParameterNodes.single;
|
||||
List<DartType> inferredTypeNodes = <DartType>[
|
||||
inferredTypeFromDownwardPhase == null
|
||||
? new UnknownType()
|
||||
: parseType(inferredTypeFromDownwardPhase)
|
||||
];
|
||||
List<DartType>? inferredTypeNodes = inferredTypeFromDownwardPhase == null
|
||||
? null
|
||||
: <DartType>[parseType(inferredTypeFromDownwardPhase)];
|
||||
|
||||
typeSchemaEnvironment.inferTypeFromConstraints(
|
||||
{typeParameterNode: typeConstraint},
|
||||
[typeParameterNode],
|
||||
inferredTypeNodes,
|
||||
testLibrary,
|
||||
downwardsInferPhase: downwardsInferPhase);
|
||||
inferredTypeNodes = typeSchemaEnvironment.inferTypeFromConstraints({
|
||||
typeParameterNode: typeConstraint
|
||||
}, [
|
||||
typeParameterNode
|
||||
], inferredTypeNodes, testLibrary, partial: downwardsInferPhase);
|
||||
|
||||
expect(inferredTypeNodes.single, expectedTypeNode);
|
||||
});
|
||||
|
|
|
@ -444,6 +444,7 @@ noted
|
|||
nottest
|
||||
null'ed
|
||||
numerator
|
||||
nums
|
||||
ob
|
||||
obool
|
||||
observable
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
// Copyright (c) 2022, the Dart project authors. Please see the AUTHORS file
|
||||
// for details. All rights reserved. Use of this source code is governed by a
|
||||
// BSD-style license that can be found in the LICENSE file.
|
||||
|
||||
// Tests that when the feature is enabled, types supplied by downward inference
|
||||
// are preferred over those available via horizontal inference.
|
||||
//
|
||||
// The way this happens is that the type parameter is "fixed" after the downward
|
||||
// inference phase and is not changed in further inference phases.
|
||||
|
||||
testProductOfNums(List<num> values) {
|
||||
num a = values.fold(1, (p, v) => p * v);
|
||||
}
|
||||
|
||||
main() {}
|
|
@ -0,0 +1,2 @@
|
|||
testProductOfNums(List<num> values) {}
|
||||
main() {}
|
|
@ -0,0 +1,2 @@
|
|||
main() {}
|
||||
testProductOfNums(List<num> values) {}
|
|
@ -0,0 +1,8 @@
|
|||
library /*isNonNullableByDefault*/;
|
||||
import self as self;
|
||||
import "dart:core" as core;
|
||||
|
||||
static method testProductOfNums(core::List<core::num> values) → dynamic {
|
||||
core::num a = values.{core::Iterable::fold}<core::num>(1, (core::num p, core::num v) → core::num => p.{core::num::*}(v){(core::num) → core::num}){(core::num, (core::num, core::num) → core::num) → core::num};
|
||||
}
|
||||
static method main() → dynamic {}
|
|
@ -0,0 +1,8 @@
|
|||
library /*isNonNullableByDefault*/;
|
||||
import self as self;
|
||||
import "dart:core" as core;
|
||||
|
||||
static method testProductOfNums(core::List<core::num> values) → dynamic {
|
||||
core::num a = values.{core::Iterable::fold}<core::num>(1, (core::num p, core::num v) → core::num => p.{core::num::*}(v){(core::num) → core::num}){(core::num, (core::num, core::num) → core::num) → core::num};
|
||||
}
|
||||
static method main() → dynamic {}
|
|
@ -0,0 +1,8 @@
|
|||
library /*isNonNullableByDefault*/;
|
||||
import self as self;
|
||||
import "dart:core" as core;
|
||||
|
||||
static method testProductOfNums(core::List<core::num> values) → dynamic
|
||||
;
|
||||
static method main() → dynamic
|
||||
;
|
|
@ -0,0 +1,8 @@
|
|||
library /*isNonNullableByDefault*/;
|
||||
import self as self;
|
||||
import "dart:core" as core;
|
||||
|
||||
static method testProductOfNums(core::List<core::num> values) → dynamic {
|
||||
core::num a = values.{core::Iterable::fold}<core::num>(1, (core::num p, core::num v) → core::num => p.{core::num::*}(v){(core::num) → core::num}){(core::num, (core::num, core::num) → core::num) → core::num};
|
||||
}
|
||||
static method main() → dynamic {}
|
|
@ -0,0 +1,24 @@
|
|||
// Copyright (c) 2022, the Dart project authors. Please see the AUTHORS file
|
||||
// for details. All rights reserved. Use of this source code is governed by a
|
||||
// BSD-style license that can be found in the LICENSE file.
|
||||
|
||||
// Tests that when the feature is enabled, types supplied by downward inference
|
||||
// are preferred over those available via horizontal inference.
|
||||
//
|
||||
// The way this happens is that the type parameter is "fixed" after the downward
|
||||
// inference phase and is not changed in further inference phases.
|
||||
|
||||
// SharedOptions=--enable-experiment=inference-update-1
|
||||
|
||||
import '../static_type_helper.dart';
|
||||
|
||||
testProductOfNums(List<num> values) {
|
||||
num a = values.fold(
|
||||
1,
|
||||
(p, v) =>
|
||||
(p..expectStaticType<Exactly<num>>()) *
|
||||
(v..expectStaticType<Exactly<num>>()))
|
||||
..expectStaticType<Exactly<num>>();
|
||||
}
|
||||
|
||||
main() {}
|
Loading…
Reference in a new issue