[vm/nnbd] Preparation for type testing stubs in NNBD strong mode

This change includes the following:

* Decision to use subtype range checks is revised to work regardless of
  the NNBD mode.

* Handling of dynamic/Object/void is removed from SlowTypeTest stub
  as we use TopTypeTypeTest stub for the top types. That handling was
  unnecessary but harmless. However, in NNBD strong mode it becomes
  incorrect for non-nullable Object.

* TypeTestingStubGenerator::OptimizedCodeForType now falls back to
  TypeTestingStubGenerator::DefaultCodeForType instead of directly
  using StubCode::DefaultTypeTest(). This refactoring is needed to
  avoid duplicating a more complicated logic in
  TypeTestingStubGenerator::DefaultCodeForType in the future.


Change-Id: I09a3f3d9844c02263cd2dac7922c1bdd5264ee55
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/135628
Commit-Queue: Alexander Markov <alexmarkov@google.com>
Reviewed-by: Régis Crelier <regis@google.com>
Reviewed-by: Martin Kustermann <kustermann@google.com>
This commit is contained in:
Alexander Markov 2020-02-14 18:27:07 +00:00 committed by commit-bot@chromium.org
parent c84b9230cc
commit 0c5b98bb15
6 changed files with 7 additions and 44 deletions

View file

@ -1676,8 +1676,8 @@ void Precompiler::AttachOptimizedTypeTestingStub() {
}
}
ASSERT(Object::dynamic_type().type_test_stub_entry_point() !=
StubCode::DefaultTypeTest().EntryPoint());
ASSERT(Object::dynamic_type().type_test_stub_entry_point() ==
StubCode::TopTypeTypeTest().EntryPoint());
}
void Precompiler::DropTypes() {

View file

@ -387,8 +387,7 @@ bool HierarchyInfo::CanUseSubtypeRangeCheckFor(const AbstractType& type) {
// arguments are not "dynamic" but instantiated-to-bounds.
const Type& rare_type =
Type::Handle(zone, Type::RawCast(type_class.RareType()));
// TODO(dartbug.com/38845): revise when supporting NNBD strong mode.
if (!rare_type.IsEquivalent(type, TypeEquality::kIgnoreNullability)) {
if (!rare_type.IsSubtypeOf(NNBDMode::kLegacyLib, type, Heap::kNew)) {
ASSERT(type.arguments() != TypeArguments::null());
return false;
}

View file

@ -3008,18 +3008,6 @@ void StubCodeCompiler::GenerateSlowTypeTestStub(Assembler* assembler) {
__ Bind(&call_runtime);
// We cannot really ensure here that dynamic/Object/void never occur here
// (though it is guaranteed at dart_precompiled_runtime time). This is
// because we do constant evaluation with default stubs and only install
// optimized versions before writing out the AOT snapshot.
// So dynamic/Object/void will run with default stub in constant evaluation.
__ CompareObject(kDstTypeReg, CastHandle<Object>(DynamicType()));
__ BranchIf(EQUAL, &done);
__ CompareObject(kDstTypeReg, CastHandle<Object>(ObjectType()));
__ BranchIf(EQUAL, &done);
__ CompareObject(kDstTypeReg, CastHandle<Object>(VoidType()));
__ BranchIf(EQUAL, &done);
InvokeTypeCheckFromTypeTestStub(assembler, kTypeCheckFromSlowStub);
__ Bind(&done);

View file

@ -3132,18 +3132,6 @@ void StubCodeCompiler::GenerateSlowTypeTestStub(Assembler* assembler) {
__ Bind(&call_runtime);
// We cannot really ensure here that dynamic/Object/void never occur here
// (though it is guaranteed at dart_precompiled_runtime time). This is
// because we do constant evaluation with default stubs and only install
// optimized versions before writing out the AOT snapshot.
// So dynamic/Object/void will run with default stub in constant evaluation.
__ CompareObject(kDstTypeReg, CastHandle<Object>(DynamicType()));
__ BranchIf(EQUAL, &done);
__ CompareObject(kDstTypeReg, CastHandle<Object>(ObjectType()));
__ BranchIf(EQUAL, &done);
__ CompareObject(kDstTypeReg, CastHandle<Object>(VoidType()));
__ BranchIf(EQUAL, &done);
InvokeTypeCheckFromTypeTestStub(assembler, kTypeCheckFromSlowStub);
__ Bind(&done);

View file

@ -3086,18 +3086,6 @@ void StubCodeCompiler::GenerateSlowTypeTestStub(Assembler* assembler) {
__ Bind(&call_runtime);
// We cannot really ensure here that dynamic/Object/void never occur here
// (though it is guaranteed at dart_precompiled_runtime time). This is
// because we do constant evaluation with default stubs and only install
// optimized versions before writing out the AOT snapshot.
// So dynamic/Object/void will run with default stub in constant evaluation.
__ CompareObject(kDstTypeReg, CastHandle<Object>(DynamicType()));
__ BranchIf(EQUAL, &done);
__ CompareObject(kDstTypeReg, CastHandle<Object>(ObjectType()));
__ BranchIf(EQUAL, &done);
__ CompareObject(kDstTypeReg, CastHandle<Object>(VoidType()));
__ BranchIf(EQUAL, &done);
InvokeTypeCheckFromTypeTestStub(assembler, kTypeCheckFromSlowStub);
__ Bind(&done);

View file

@ -143,7 +143,8 @@ RawCode* TypeTestingStubGenerator::OptimizedCodeForType(
ASSERT(StubCode::HasBeenInitialized());
if (type.IsTypeRef()) {
return StubCode::DefaultTypeTest().raw();
return TypeTestingStubGenerator::DefaultCodeForType(
type, /*lazy_specialize=*/false);
}
const intptr_t cid = type.type_class_id();
@ -161,17 +162,16 @@ RawCode* TypeTestingStubGenerator::OptimizedCodeForType(
}
// Fall back to default.
return StubCode::DefaultTypeTest().raw();
#else
// In the precompiled runtime we cannot lazily create new optimized type
// testing stubs, so if we cannot find one, we'll just return the default
// one.
return StubCode::DefaultTypeTest().raw();
#endif // !defined(DART_PRECOMPILED_RUNTIME)
}
}
#endif // !defined(TARGET_ARCH_IA32)
return TypeTestingStubGenerator::DefaultCodeForType(type, false);
return TypeTestingStubGenerator::DefaultCodeForType(
type, /*lazy_specialize=*/false);
}
#if !defined(TARGET_ARCH_IA32)