mirror of
https://github.com/dart-lang/sdk
synced 2024-11-02 12:24:24 +00:00
[vm/nnbd] Type testing stubs in NNBD strong mode
Issue: https://github.com/dart-lang/sdk/issues/38845 Change-Id: I82e7a1b3c4220abdd2a215529230fbb158adbe9e Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/135627 Commit-Queue: Alexander Markov <alexmarkov@google.com> Reviewed-by: Martin Kustermann <kustermann@google.com> Reviewed-by: Régis Crelier <regis@google.com>
This commit is contained in:
parent
b3a1299f22
commit
77e1c72d04
27 changed files with 534 additions and 185 deletions
|
@ -403,6 +403,11 @@ class Assembler : public AssemblerBase {
|
|||
cmp(value, Operand(TMP));
|
||||
}
|
||||
|
||||
void CompareTypeNullabilityWith(Register type, int8_t value) {
|
||||
ldrb(TMP, FieldAddress(type, compiler::target::Type::nullability_offset()));
|
||||
cmp(TMP, Operand(value));
|
||||
}
|
||||
|
||||
// Misc. functionality
|
||||
bool use_far_branches() const {
|
||||
return FLAG_use_far_branches || use_far_branches_;
|
||||
|
|
|
@ -482,6 +482,12 @@ class Assembler : public AssemblerBase {
|
|||
cmp(value, Operand(TMP));
|
||||
}
|
||||
|
||||
void CompareTypeNullabilityWith(Register type, int8_t value) {
|
||||
ldr(TMP, FieldAddress(type, compiler::target::Type::nullability_offset()),
|
||||
kUnsignedByte);
|
||||
cmp(TMP, Operand(value));
|
||||
}
|
||||
|
||||
bool use_far_branches() const {
|
||||
return FLAG_use_far_branches || use_far_branches_;
|
||||
}
|
||||
|
|
|
@ -857,6 +857,11 @@ class Assembler : public AssemblerBase {
|
|||
cmpq(value, address);
|
||||
}
|
||||
|
||||
void CompareTypeNullabilityWith(Register type, int8_t value) {
|
||||
cmpb(FieldAddress(type, compiler::target::Type::nullability_offset()),
|
||||
Immediate(value));
|
||||
}
|
||||
|
||||
void RestoreCodePointer();
|
||||
void LoadPoolPointer(Register pp = PP);
|
||||
|
||||
|
|
|
@ -2292,8 +2292,6 @@ void FlowGraphCompiler::GenerateAssertAssignableViaTypeTestingStub(
|
|||
compiler::Label* done) {
|
||||
TypeUsageInfo* type_usage_info = thread()->type_usage_info();
|
||||
|
||||
// TODO(regis): Take nnbd_mode() into account and pass it in a register.
|
||||
|
||||
// If the int type is assignable to [dst_type] we special case it on the
|
||||
// caller side!
|
||||
const Type& int_type = Type::Handle(zone(), Type::IntType());
|
||||
|
@ -2311,6 +2309,15 @@ void FlowGraphCompiler::GenerateAssertAssignableViaTypeTestingStub(
|
|||
// We can handle certain types very efficiently on the call site (with a
|
||||
// bailout to the normal stub, which will do a runtime call).
|
||||
if (dst_type.IsTypeParameter()) {
|
||||
// In NNBD strong mode we need to handle null instance before calling TTS
|
||||
// if type parameter is nullable or legacy because type parameter can be
|
||||
// instantiated with a non-nullable type which rejects null.
|
||||
// In NNBD weak mode or if type parameter is non-nullable or has
|
||||
// undetermined nullability null instance is correctly handled by TTS.
|
||||
if (FLAG_null_safety && (dst_type.IsNullable() || dst_type.IsLegacy())) {
|
||||
__ CompareObject(instance_reg, Object::null_object());
|
||||
__ BranchIf(EQUAL, done);
|
||||
}
|
||||
const TypeParameter& type_param = TypeParameter::Cast(dst_type);
|
||||
const Register kTypeArgumentsReg = type_param.IsClassTypeParameter()
|
||||
? instantiator_type_args_reg
|
||||
|
@ -2338,10 +2345,10 @@ void FlowGraphCompiler::GenerateAssertAssignableViaTypeTestingStub(
|
|||
const bool can_use_simple_cid_range_test =
|
||||
hi->CanUseSubtypeRangeCheckFor(dst_type);
|
||||
if (can_use_simple_cid_range_test) {
|
||||
const CidRangeVector& ranges =
|
||||
hi->SubtypeRangesForClass(type_class,
|
||||
/*include_abstract=*/false,
|
||||
/*exclude_null=*/false);
|
||||
const CidRangeVector& ranges = hi->SubtypeRangesForClass(
|
||||
type_class,
|
||||
/*include_abstract=*/false,
|
||||
/*exclude_null=*/!Instance::NullIsAssignableTo(dst_type));
|
||||
if (ranges.length() <= kMaxNumberOfCidRangesToTest) {
|
||||
if (is_non_smi) {
|
||||
__ LoadClassId(scratch_reg, instance_reg);
|
||||
|
|
|
@ -701,8 +701,8 @@ void FlowGraphCompiler::GenerateAssertAssignable(TokenPosition token_pos,
|
|||
ASSERT(!dst_type.IsNull());
|
||||
ASSERT(dst_type.IsFinalized());
|
||||
// Assignable check is skipped in FlowGraphBuilder, not here.
|
||||
ASSERT(!dst_type.IsDynamicType() && !dst_type.IsObjectType() &&
|
||||
!dst_type.IsVoidType());
|
||||
ASSERT(!dst_type.IsTopTypeForAssignability());
|
||||
|
||||
const Register kInstantiatorTypeArgumentsReg = R2;
|
||||
const Register kFunctionTypeArgumentsReg = R1;
|
||||
|
||||
|
@ -712,9 +712,10 @@ void FlowGraphCompiler::GenerateAssertAssignable(TokenPosition token_pos,
|
|||
} else {
|
||||
compiler::Label is_assignable_fast, is_assignable, runtime_call;
|
||||
|
||||
// A null object is always assignable and is returned as result.
|
||||
__ CompareObject(R0, Object::null_object());
|
||||
__ b(&is_assignable_fast, EQ);
|
||||
if (Instance::NullIsAssignableTo(dst_type)) {
|
||||
__ CompareObject(R0, Object::null_object());
|
||||
__ b(&is_assignable_fast, EQ);
|
||||
}
|
||||
|
||||
__ PushList((1 << kInstantiatorTypeArgumentsReg) |
|
||||
(1 << kFunctionTypeArgumentsReg));
|
||||
|
|
|
@ -678,8 +678,8 @@ void FlowGraphCompiler::GenerateAssertAssignable(TokenPosition token_pos,
|
|||
ASSERT(!dst_type.IsNull());
|
||||
ASSERT(dst_type.IsFinalized());
|
||||
// Assignable check is skipped in FlowGraphBuilder, not here.
|
||||
ASSERT(!dst_type.IsDynamicType() && !dst_type.IsObjectType() &&
|
||||
!dst_type.IsVoidType());
|
||||
ASSERT(!dst_type.IsTopTypeForAssignability());
|
||||
|
||||
const Register kInstantiatorTypeArgumentsReg = R1;
|
||||
const Register kFunctionTypeArgumentsReg = R2;
|
||||
|
||||
|
@ -689,9 +689,10 @@ void FlowGraphCompiler::GenerateAssertAssignable(TokenPosition token_pos,
|
|||
} else {
|
||||
compiler::Label is_assignable_fast, is_assignable, runtime_call;
|
||||
|
||||
// A null object is always assignable and is returned as result.
|
||||
__ CompareObject(R0, Object::null_object());
|
||||
__ b(&is_assignable_fast, EQ);
|
||||
if (Instance::NullIsAssignableTo(dst_type)) {
|
||||
__ CompareObject(R0, Object::null_object());
|
||||
__ b(&is_assignable_fast, EQ);
|
||||
}
|
||||
|
||||
__ PushPair(kFunctionTypeArgumentsReg, kInstantiatorTypeArgumentsReg);
|
||||
|
||||
|
|
|
@ -682,16 +682,18 @@ void FlowGraphCompiler::GenerateAssertAssignable(TokenPosition token_pos,
|
|||
ASSERT(!dst_type.IsNull());
|
||||
ASSERT(dst_type.IsFinalized());
|
||||
// Assignable check is skipped in FlowGraphBuilder, not here.
|
||||
ASSERT(!dst_type.IsDynamicType() && !dst_type.IsObjectType() &&
|
||||
!dst_type.IsVoidType());
|
||||
ASSERT(!dst_type.IsTopTypeForAssignability());
|
||||
|
||||
__ pushl(EDX); // Store instantiator type arguments.
|
||||
__ pushl(ECX); // Store function type arguments.
|
||||
// A null object is always assignable and is returned as result.
|
||||
const compiler::Immediate& raw_null =
|
||||
compiler::Immediate(reinterpret_cast<intptr_t>(Object::null()));
|
||||
|
||||
compiler::Label is_assignable, runtime_call;
|
||||
__ cmpl(EAX, raw_null);
|
||||
__ j(EQUAL, &is_assignable);
|
||||
if (Instance::NullIsAssignableTo(dst_type)) {
|
||||
const compiler::Immediate& raw_null =
|
||||
compiler::Immediate(reinterpret_cast<intptr_t>(Object::null()));
|
||||
__ cmpl(EAX, raw_null);
|
||||
__ j(EQUAL, &is_assignable);
|
||||
}
|
||||
|
||||
// Generate inline type check, linking to runtime call if not assignable.
|
||||
SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone());
|
||||
|
|
|
@ -695,8 +695,7 @@ void FlowGraphCompiler::GenerateAssertAssignable(TokenPosition token_pos,
|
|||
ASSERT(!dst_type.IsNull());
|
||||
ASSERT(dst_type.IsFinalized());
|
||||
// Assignable check is skipped in FlowGraphBuilder, not here.
|
||||
ASSERT(!dst_type.IsDynamicType() && !dst_type.IsObjectType() &&
|
||||
!dst_type.IsVoidType());
|
||||
ASSERT(!dst_type.IsTopTypeForAssignability());
|
||||
|
||||
const Register kInstantiatorTypeArgumentsReg = RDX;
|
||||
const Register kFunctionTypeArgumentsReg = RCX;
|
||||
|
@ -707,9 +706,10 @@ void FlowGraphCompiler::GenerateAssertAssignable(TokenPosition token_pos,
|
|||
} else {
|
||||
compiler::Label is_assignable, runtime_call;
|
||||
|
||||
// A null object is always assignable and is returned as result.
|
||||
__ CompareObject(RAX, Object::null_object());
|
||||
__ j(EQUAL, &is_assignable);
|
||||
if (Instance::NullIsAssignableTo(dst_type)) {
|
||||
__ CompareObject(RAX, Object::null_object());
|
||||
__ j(EQUAL, &is_assignable);
|
||||
}
|
||||
|
||||
// Generate inline type check, linking to runtime call if not assignable.
|
||||
SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone());
|
||||
|
|
|
@ -115,20 +115,16 @@ const CidRangeVector& HierarchyInfo::SubtypeRangesForClass(
|
|||
bool exclude_null) {
|
||||
ClassTable* table = thread()->isolate()->class_table();
|
||||
const intptr_t cid_count = table->NumCids();
|
||||
CidRangeVector** cid_ranges = nullptr;
|
||||
std::unique_ptr<CidRangeVector[]>* cid_ranges = nullptr;
|
||||
if (include_abstract) {
|
||||
ASSERT(!exclude_null);
|
||||
cid_ranges = &cid_subtype_ranges_abstract_nullable_;
|
||||
} else if (exclude_null) {
|
||||
ASSERT(!include_abstract);
|
||||
cid_ranges = &cid_subtype_ranges_nonnullable_;
|
||||
cid_ranges = exclude_null ? &cid_subtype_ranges_abstract_nonnullable_
|
||||
: &cid_subtype_ranges_abstract_nullable_;
|
||||
} else {
|
||||
ASSERT(!include_abstract);
|
||||
ASSERT(!exclude_null);
|
||||
cid_ranges = &cid_subtype_ranges_nullable_;
|
||||
cid_ranges = exclude_null ? &cid_subtype_ranges_nonnullable_
|
||||
: &cid_subtype_ranges_nullable_;
|
||||
}
|
||||
if (*cid_ranges == nullptr) {
|
||||
*cid_ranges = new CidRangeVector[cid_count];
|
||||
cid_ranges->reset(new CidRangeVector[cid_count]);
|
||||
}
|
||||
CidRangeVector& ranges = (*cid_ranges)[klass.id()];
|
||||
if (ranges.length() == 0) {
|
||||
|
@ -147,8 +143,8 @@ const CidRangeVector& HierarchyInfo::SubclassRangesForClass(
|
|||
const Class& klass) {
|
||||
ClassTable* table = thread()->isolate()->class_table();
|
||||
const intptr_t cid_count = table->NumCids();
|
||||
if (cid_subclass_ranges_ == NULL) {
|
||||
cid_subclass_ranges_ = new CidRangeVector[cid_count];
|
||||
if (cid_subclass_ranges_ == nullptr) {
|
||||
cid_subclass_ranges_.reset(new CidRangeVector[cid_count]);
|
||||
}
|
||||
|
||||
CidRangeVector& ranges = cid_subclass_ranges_[klass.id()];
|
||||
|
@ -270,7 +266,6 @@ void HierarchyInfo::BuildRangesForJIT(ClassTable* table,
|
|||
exclude_null);
|
||||
return;
|
||||
}
|
||||
ASSERT(!exclude_null);
|
||||
|
||||
Zone* zone = thread()->zone();
|
||||
GrowableArray<intptr_t> cids;
|
||||
|
@ -2957,7 +2952,7 @@ Definition* AssertAssignableInstr::Canonicalize(FlowGraph* flow_graph) {
|
|||
instantiator_type_arguments()->BindTo(flow_graph->constant_null());
|
||||
function_type_arguments()->BindTo(flow_graph->constant_null());
|
||||
|
||||
if (new_dst_type.IsDynamicType() || new_dst_type.IsObjectType() ||
|
||||
if (new_dst_type.IsTopTypeForAssignability() ||
|
||||
(FLAG_eliminate_type_checks &&
|
||||
value()->Type()->IsAssignableTo(nnbd_mode(), new_dst_type))) {
|
||||
return value()->definition();
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
#ifndef RUNTIME_VM_COMPILER_BACKEND_IL_H_
|
||||
#define RUNTIME_VM_COMPILER_BACKEND_IL_H_
|
||||
|
||||
#include <memory>
|
||||
#include <utility>
|
||||
|
||||
#include "vm/allocation.h"
|
||||
|
@ -230,27 +231,16 @@ class HierarchyInfo : public ThreadStackResource {
|
|||
public:
|
||||
explicit HierarchyInfo(Thread* thread)
|
||||
: ThreadStackResource(thread),
|
||||
cid_subtype_ranges_nullable_(NULL),
|
||||
cid_subtype_ranges_abstract_nullable_(NULL),
|
||||
cid_subtype_ranges_nonnullable_(NULL),
|
||||
cid_subclass_ranges_(NULL) {
|
||||
cid_subtype_ranges_nullable_(),
|
||||
cid_subtype_ranges_abstract_nullable_(),
|
||||
cid_subtype_ranges_nonnullable_(),
|
||||
cid_subtype_ranges_abstract_nonnullable_(),
|
||||
cid_subclass_ranges_() {
|
||||
thread->set_hierarchy_info(this);
|
||||
}
|
||||
|
||||
~HierarchyInfo() {
|
||||
thread()->set_hierarchy_info(NULL);
|
||||
|
||||
delete[] cid_subtype_ranges_nullable_;
|
||||
cid_subtype_ranges_nullable_ = NULL;
|
||||
|
||||
delete[] cid_subtype_ranges_abstract_nullable_;
|
||||
cid_subtype_ranges_abstract_nullable_ = NULL;
|
||||
|
||||
delete[] cid_subtype_ranges_nonnullable_;
|
||||
cid_subtype_ranges_nonnullable_ = NULL;
|
||||
|
||||
delete[] cid_subclass_ranges_;
|
||||
cid_subclass_ranges_ = NULL;
|
||||
}
|
||||
|
||||
const CidRangeVector& SubtypeRangesForClass(const Class& klass,
|
||||
|
@ -302,10 +292,11 @@ class HierarchyInfo : public ThreadStackResource {
|
|||
bool include_abstract,
|
||||
bool exclude_null);
|
||||
|
||||
CidRangeVector* cid_subtype_ranges_nullable_;
|
||||
CidRangeVector* cid_subtype_ranges_abstract_nullable_;
|
||||
CidRangeVector* cid_subtype_ranges_nonnullable_;
|
||||
CidRangeVector* cid_subclass_ranges_;
|
||||
std::unique_ptr<CidRangeVector[]> cid_subtype_ranges_nullable_;
|
||||
std::unique_ptr<CidRangeVector[]> cid_subtype_ranges_abstract_nullable_;
|
||||
std::unique_ptr<CidRangeVector[]> cid_subtype_ranges_nonnullable_;
|
||||
std::unique_ptr<CidRangeVector[]> cid_subtype_ranges_abstract_nonnullable_;
|
||||
std::unique_ptr<CidRangeVector[]> cid_subclass_ranges_;
|
||||
};
|
||||
|
||||
// An embedded container with N elements of type T. Used (with partial
|
||||
|
|
|
@ -816,7 +816,7 @@ bool CompileType::IsSubtypeOf(NNBDMode mode, const AbstractType& other) {
|
|||
}
|
||||
|
||||
bool CompileType::IsAssignableTo(NNBDMode mode, const AbstractType& other) {
|
||||
if (other.IsTopType()) {
|
||||
if (other.IsTopTypeForAssignability()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -826,19 +826,8 @@ bool CompileType::IsAssignableTo(NNBDMode mode, const AbstractType& other) {
|
|||
|
||||
// Consider the compile type of the value.
|
||||
const AbstractType& compile_type = *ToAbstractType();
|
||||
|
||||
if (compile_type.IsNullType()) {
|
||||
if (!FLAG_null_safety) {
|
||||
// In weak mode, 'null' is assignable to any type.
|
||||
return true;
|
||||
}
|
||||
// In strong mode, 'null' is assignable to any nullable or legacy type.
|
||||
// It is also assignable to FutureOr<T> if it is assignable to T.
|
||||
const AbstractType& unwrapped_other =
|
||||
AbstractType::Handle(other.UnwrapFutureOr());
|
||||
// A nullable or legacy type parameter will still be either nullable or
|
||||
// legacy after instantiation.
|
||||
return unwrapped_other.IsNullable() || unwrapped_other.IsLegacy();
|
||||
return Instance::NullIsAssignableTo(other);
|
||||
}
|
||||
return compile_type.IsSubtypeOf(mode, other, Heap::kOld);
|
||||
}
|
||||
|
|
|
@ -858,7 +858,7 @@ bool CallSpecializer::TryInlineInstanceSetter(InstanceCallInstr* instr) {
|
|||
|
||||
// Build an AssertAssignable if necessary.
|
||||
const AbstractType& dst_type = AbstractType::ZoneHandle(zone(), field.type());
|
||||
if (I->argument_type_checks() && !dst_type.IsTopType()) {
|
||||
if (I->argument_type_checks() && !dst_type.IsTopTypeForAssignability()) {
|
||||
// Compute if we need to type check the value. Always type check if
|
||||
// at a dynamic invocation.
|
||||
bool needs_check = true;
|
||||
|
|
|
@ -418,8 +418,8 @@ RawArray* BytecodeReaderHelper::CreateForwarderChecks(
|
|||
const bool has_optional_parameters = function.HasOptionalParameters();
|
||||
for (intptr_t i = function.NumImplicitParameters(); i < num_params; ++i) {
|
||||
type = function.ParameterTypeAt(i);
|
||||
if (!type.IsTopType() && !is_generic_covariant_impl.Contains(i) &&
|
||||
!is_covariant.Contains(i)) {
|
||||
if (!type.IsTopTypeForAssignability() &&
|
||||
!is_generic_covariant_impl.Contains(i) && !is_covariant.Contains(i)) {
|
||||
name = function.ParameterNameAt(i);
|
||||
intptr_t index;
|
||||
if (i >= num_pos_params) {
|
||||
|
|
|
@ -3685,7 +3685,7 @@ Fragment StreamingFlowGraphBuilder::BuildAsExpression(TokenPosition* p) {
|
|||
Fragment instructions = BuildExpression(); // read operand.
|
||||
|
||||
const AbstractType& type = T.BuildType(); // read type.
|
||||
if (type.IsInstantiated() && type.IsTopType()) {
|
||||
if (type.IsInstantiated() && type.IsTopTypeForAssignability()) {
|
||||
// We already evaluated the operand on the left and just leave it there as
|
||||
// the result of the `obj as dynamic` expression.
|
||||
} else {
|
||||
|
|
|
@ -1589,8 +1589,7 @@ Fragment FlowGraphBuilder::CheckAssignable(const AbstractType& dst_type,
|
|||
if (!I->should_emit_strong_mode_checks()) {
|
||||
return Fragment();
|
||||
}
|
||||
if (!dst_type.IsDynamicType() && !dst_type.IsObjectType() &&
|
||||
!dst_type.IsVoidType()) {
|
||||
if (!dst_type.IsTopTypeForAssignability()) {
|
||||
LocalVariable* top_of_stack = MakeTemporary();
|
||||
instructions += LoadLocal(top_of_stack);
|
||||
instructions += AssertAssignableLoadTypeArguments(TokenPosition::kNoSource,
|
||||
|
@ -1738,7 +1737,7 @@ void FlowGraphBuilder::BuildArgumentTypeChecks(
|
|||
&AbstractType::ZoneHandle(Z, forwarding_target->ParameterTypeAt(i));
|
||||
}
|
||||
|
||||
if (target_type->IsTopType()) continue;
|
||||
if (target_type->IsTopTypeForAssignability()) continue;
|
||||
|
||||
const bool is_covariant = param->is_explicit_covariant_parameter();
|
||||
Fragment* checks = is_covariant ? explicit_checks : implicit_checks;
|
||||
|
@ -2294,8 +2293,7 @@ FlowGraph* FlowGraphBuilder::BuildGraphOfNoSuchMethodForwarder(
|
|||
body += Drop(); // argument count
|
||||
|
||||
AbstractType& return_type = AbstractType::Handle(function.result_type());
|
||||
if (!return_type.IsDynamicType() && !return_type.IsVoidType() &&
|
||||
!return_type.IsObjectType()) {
|
||||
if (!return_type.IsTopTypeForAssignability()) {
|
||||
body += AssertAssignableLoadTypeArguments(TokenPosition::kNoSource,
|
||||
return_type, Symbols::Empty());
|
||||
}
|
||||
|
|
|
@ -2868,6 +2868,13 @@ void StubCodeCompiler::GenerateSubtype6TestCacheStub(Assembler* assembler) {
|
|||
// Note of warning: The caller will not populate CODE_REG and we have therefore
|
||||
// no access to the pool.
|
||||
void StubCodeCompiler::GenerateDefaultTypeTestStub(Assembler* assembler) {
|
||||
__ ldr(CODE_REG, Address(THR, target::Thread::slow_type_test_stub_offset()));
|
||||
__ Branch(FieldAddress(CODE_REG, target::Code::entry_point_offset()));
|
||||
}
|
||||
|
||||
// Used instead of DefaultTypeTestStub when null is assignable.
|
||||
void StubCodeCompiler::GenerateDefaultNullableTypeTestStub(
|
||||
Assembler* assembler) {
|
||||
Label done;
|
||||
|
||||
const Register kInstanceReg = R0;
|
||||
|
@ -2922,6 +2929,17 @@ static void InvokeTypeCheckFromTypeTestStub(Assembler* assembler,
|
|||
|
||||
void StubCodeCompiler::GenerateLazySpecializeTypeTestStub(
|
||||
Assembler* assembler) {
|
||||
__ ldr(CODE_REG,
|
||||
Address(THR, target::Thread::lazy_specialize_type_test_stub_offset()));
|
||||
__ EnterStubFrame();
|
||||
InvokeTypeCheckFromTypeTestStub(assembler, kTypeCheckFromLazySpecializeStub);
|
||||
__ LeaveStubFrame();
|
||||
__ Ret();
|
||||
}
|
||||
|
||||
// Used instead of LazySpecializeTypeTestStub when null is assignable.
|
||||
void StubCodeCompiler::GenerateLazySpecializeNullableTypeTestStub(
|
||||
Assembler* assembler) {
|
||||
const Register kInstanceReg = R0;
|
||||
Label done;
|
||||
|
||||
|
@ -2948,15 +2966,6 @@ void StubCodeCompiler::GenerateSlowTypeTestStub(Assembler* assembler) {
|
|||
|
||||
__ EnterStubFrame();
|
||||
|
||||
#ifdef DEBUG
|
||||
// Guaranteed by caller.
|
||||
Label no_error;
|
||||
__ CompareObject(kInstanceReg, NullObject());
|
||||
__ BranchIf(NOT_EQUAL, &no_error);
|
||||
__ Breakpoint();
|
||||
__ Bind(&no_error);
|
||||
#endif
|
||||
|
||||
// If the subtype-cache is null, it needs to be lazily-created by the runtime.
|
||||
__ CompareObject(kSubtypeTestCacheReg, NullObject());
|
||||
__ BranchIf(EQUAL, &call_runtime);
|
||||
|
|
|
@ -2980,22 +2980,23 @@ void StubCodeCompiler::GenerateSubtype6TestCacheStub(Assembler* assembler) {
|
|||
// Note of warning: The caller will not populate CODE_REG and we have therefore
|
||||
// no access to the pool.
|
||||
void StubCodeCompiler::GenerateDefaultTypeTestStub(Assembler* assembler) {
|
||||
// Tail call the [SubtypeTestCache]-based implementation.
|
||||
__ ldr(CODE_REG, Address(THR, target::Thread::slow_type_test_stub_offset()));
|
||||
__ ldr(R9, FieldAddress(CODE_REG, target::Code::entry_point_offset()));
|
||||
__ br(R9);
|
||||
}
|
||||
|
||||
// Used instead of DefaultTypeTestStub when null is assignable.
|
||||
void StubCodeCompiler::GenerateDefaultNullableTypeTestStub(
|
||||
Assembler* assembler) {
|
||||
Label done;
|
||||
|
||||
const Register kInstanceReg = R0;
|
||||
const Register kDstTypeReg = R8;
|
||||
|
||||
// Fast case for 'null'.
|
||||
__ CompareObject(kInstanceReg, NullObject());
|
||||
__ BranchIf(EQUAL, &done);
|
||||
|
||||
// Fast case for 'int'.
|
||||
Label not_smi;
|
||||
__ BranchIfNotSmi(kInstanceReg, ¬_smi);
|
||||
__ CompareObject(kDstTypeReg, CastHandle<Object>(IntType()));
|
||||
__ BranchIf(EQUAL, &done);
|
||||
__ Bind(¬_smi);
|
||||
|
||||
// Tail call the [SubtypeTestCache]-based implementation.
|
||||
__ ldr(CODE_REG, Address(THR, target::Thread::slow_type_test_stub_offset()));
|
||||
__ ldr(R9, FieldAddress(CODE_REG, target::Code::entry_point_offset()));
|
||||
|
@ -3046,6 +3047,17 @@ static void InvokeTypeCheckFromTypeTestStub(Assembler* assembler,
|
|||
|
||||
void StubCodeCompiler::GenerateLazySpecializeTypeTestStub(
|
||||
Assembler* assembler) {
|
||||
__ ldr(CODE_REG,
|
||||
Address(THR, target::Thread::lazy_specialize_type_test_stub_offset()));
|
||||
__ EnterStubFrame();
|
||||
InvokeTypeCheckFromTypeTestStub(assembler, kTypeCheckFromLazySpecializeStub);
|
||||
__ LeaveStubFrame();
|
||||
__ Ret();
|
||||
}
|
||||
|
||||
// Used instead of LazySpecializeTypeTestStub when null is assignable.
|
||||
void StubCodeCompiler::GenerateLazySpecializeNullableTypeTestStub(
|
||||
Assembler* assembler) {
|
||||
const Register kInstanceReg = R0;
|
||||
Label done;
|
||||
|
||||
|
@ -3072,15 +3084,6 @@ void StubCodeCompiler::GenerateSlowTypeTestStub(Assembler* assembler) {
|
|||
|
||||
__ EnterStubFrame();
|
||||
|
||||
#ifdef DEBUG
|
||||
// Guaranteed by caller.
|
||||
Label no_error;
|
||||
__ CompareObject(kInstanceReg, NullObject());
|
||||
__ BranchIf(NOT_EQUAL, &no_error);
|
||||
__ Breakpoint();
|
||||
__ Bind(&no_error);
|
||||
#endif
|
||||
|
||||
// If the subtype-cache is null, it needs to be lazily-created by the runtime.
|
||||
__ CompareObject(kSubtypeTestCacheReg, NullObject());
|
||||
__ BranchIf(EQUAL, &call_runtime);
|
||||
|
|
|
@ -2480,6 +2480,12 @@ void StubCodeCompiler::GenerateDefaultTypeTestStub(Assembler* assembler) {
|
|||
__ Breakpoint();
|
||||
}
|
||||
|
||||
void StubCodeCompiler::GenerateDefaultNullableTypeTestStub(
|
||||
Assembler* assembler) {
|
||||
// Not implemented on ia32.
|
||||
__ Breakpoint();
|
||||
}
|
||||
|
||||
void StubCodeCompiler::GenerateTopTypeTypeTestStub(Assembler* assembler) {
|
||||
// Not implemented on ia32.
|
||||
__ Breakpoint();
|
||||
|
@ -2496,6 +2502,12 @@ void StubCodeCompiler::GenerateLazySpecializeTypeTestStub(
|
|||
__ Breakpoint();
|
||||
}
|
||||
|
||||
void StubCodeCompiler::GenerateLazySpecializeNullableTypeTestStub(
|
||||
Assembler* assembler) {
|
||||
// Not implemented on ia32.
|
||||
__ Breakpoint();
|
||||
}
|
||||
|
||||
void StubCodeCompiler::GenerateSlowTypeTestStub(Assembler* assembler) {
|
||||
// Not implemented on ia32.
|
||||
__ Breakpoint();
|
||||
|
|
|
@ -2945,6 +2945,13 @@ void StubCodeCompiler::GenerateSubtype6TestCacheStub(Assembler* assembler) {
|
|||
// Note of warning: The caller will not populate CODE_REG and we have therefore
|
||||
// no access to the pool.
|
||||
void StubCodeCompiler::GenerateDefaultTypeTestStub(Assembler* assembler) {
|
||||
__ movq(CODE_REG, Address(THR, target::Thread::slow_type_test_stub_offset()));
|
||||
__ jmp(FieldAddress(CODE_REG, target::Code::entry_point_offset()));
|
||||
}
|
||||
|
||||
// Used instead of DefaultTypeTestStub when null is assignable.
|
||||
void StubCodeCompiler::GenerateDefaultNullableTypeTestStub(
|
||||
Assembler* assembler) {
|
||||
Label done;
|
||||
|
||||
const Register kInstanceReg = RAX;
|
||||
|
@ -3000,6 +3007,18 @@ static void InvokeTypeCheckFromTypeTestStub(Assembler* assembler,
|
|||
|
||||
void StubCodeCompiler::GenerateLazySpecializeTypeTestStub(
|
||||
Assembler* assembler) {
|
||||
__ movq(
|
||||
CODE_REG,
|
||||
Address(THR, target::Thread::lazy_specialize_type_test_stub_offset()));
|
||||
__ EnterStubFrame();
|
||||
InvokeTypeCheckFromTypeTestStub(assembler, kTypeCheckFromLazySpecializeStub);
|
||||
__ LeaveStubFrame();
|
||||
__ Ret();
|
||||
}
|
||||
|
||||
// Used instead of LazySpecializeTypeTestStub when null is assignable.
|
||||
void StubCodeCompiler::GenerateLazySpecializeNullableTypeTestStub(
|
||||
Assembler* assembler) {
|
||||
const Register kInstanceReg = RAX;
|
||||
|
||||
Label done;
|
||||
|
@ -3028,19 +3047,6 @@ void StubCodeCompiler::GenerateSlowTypeTestStub(Assembler* assembler) {
|
|||
|
||||
__ EnterStubFrame();
|
||||
|
||||
#ifdef DEBUG
|
||||
// Guaranteed by caller.
|
||||
// TODO(regis): This will change when supporting NNBD, because the caller may
|
||||
// not always determine the test result for a null instance, as for example
|
||||
// in the case of a still uninstantiated test type, which may become nullable
|
||||
// or non-nullable after instantiation in the runtime.
|
||||
Label no_error;
|
||||
__ CompareObject(kInstanceReg, NullObject());
|
||||
__ BranchIf(NOT_EQUAL, &no_error);
|
||||
__ Breakpoint();
|
||||
__ Bind(&no_error);
|
||||
#endif
|
||||
|
||||
// If the subtype-cache is null, it needs to be lazily-created by the runtime.
|
||||
__ CompareObject(kSubtypeTestCacheReg, NullObject());
|
||||
__ BranchIf(EQUAL, &call_runtime);
|
||||
|
|
|
@ -770,8 +770,8 @@ bool NeedsDynamicInvocationForwarder(const Function& function) {
|
|||
auto& type = AbstractType::Handle(zone);
|
||||
for (intptr_t i = function.NumImplicitParameters(); i < num_params; ++i) {
|
||||
type = function.ParameterTypeAt(i);
|
||||
if (!type.IsTopType() && !is_generic_covariant_impl.Contains(i) &&
|
||||
!is_covariant.Contains(i)) {
|
||||
if (!type.IsTopTypeForAssignability() &&
|
||||
!is_generic_covariant_impl.Contains(i) && !is_covariant.Contains(i)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17609,6 +17609,23 @@ bool Instance::NullIsInstanceOf(
|
|||
return nullability == Nullability::kNullable;
|
||||
}
|
||||
|
||||
bool Instance::NullIsAssignableTo(const AbstractType& other) {
|
||||
// In weak mode, Null is a bottom type (according to LEGACY_SUBTYPE).
|
||||
if (!FLAG_null_safety) {
|
||||
return true;
|
||||
}
|
||||
// "Left Null" rule: null is assignable when destination type is either
|
||||
// legacy or nullable. Otherwise it is not assignable or we cannot tell
|
||||
// without instantiating type parameter.
|
||||
if (other.IsLegacy() || other.IsNullable()) {
|
||||
return true;
|
||||
}
|
||||
if (other.IsFutureOrType()) {
|
||||
return NullIsAssignableTo(AbstractType::Handle(other.UnwrapFutureOr()));
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
bool Instance::RuntimeTypeIsSubtypeOf(
|
||||
NNBDMode mode,
|
||||
const AbstractType& other,
|
||||
|
@ -18363,7 +18380,6 @@ bool AbstractType::IsNeverType() const {
|
|||
|
||||
// Caution: IsTopType() does not return true for non-nullable Object.
|
||||
bool AbstractType::IsTopType() const {
|
||||
// FutureOr<T> where T is a top type behaves as a top type.
|
||||
const classid_t cid = type_class_id();
|
||||
if (cid == kDynamicCid || cid == kVoidCid) {
|
||||
return true;
|
||||
|
@ -18372,11 +18388,29 @@ bool AbstractType::IsTopType() const {
|
|||
return !IsNonNullable(); // kLegacy or kNullable.
|
||||
}
|
||||
if (cid == kFutureOrCid) {
|
||||
// FutureOr<T> where T is a top type behaves as a top type.
|
||||
return AbstractType::Handle(UnwrapFutureOr()).IsTopType();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
bool AbstractType::IsTopTypeForAssignability() const {
|
||||
const classid_t cid = type_class_id();
|
||||
if (cid == kDynamicCid || cid == kVoidCid) {
|
||||
return true;
|
||||
}
|
||||
if (cid == kInstanceCid) { // Object type.
|
||||
// NNBD weak mode uses LEGACY_SUBTYPE for assignability / 'as' tests,
|
||||
// and non-nullable Object is a top type according to LEGACY_SUBTYPE.
|
||||
return !FLAG_null_safety || !IsNonNullable();
|
||||
}
|
||||
if (cid == kFutureOrCid) {
|
||||
// FutureOr<T> where T is a top type behaves as a top type.
|
||||
return AbstractType::Handle(UnwrapFutureOr()).IsTopTypeForAssignability();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
bool AbstractType::IsIntType() const {
|
||||
return HasTypeClass() &&
|
||||
(type_class() == Type::Handle(Type::IntType()).type_class());
|
||||
|
@ -18477,13 +18511,7 @@ bool AbstractType::IsSubtypeOf(NNBDMode mode,
|
|||
}
|
||||
// Left Null type.
|
||||
if (IsNullType()) {
|
||||
// In weak mode, Null is a bottom type.
|
||||
if (!FLAG_null_safety) {
|
||||
return true;
|
||||
}
|
||||
const AbstractType& unwrapped_other =
|
||||
AbstractType::Handle(other.UnwrapFutureOr());
|
||||
return unwrapped_other.IsNullable() || unwrapped_other.IsLegacy();
|
||||
return Instance::NullIsAssignableTo(other);
|
||||
}
|
||||
Thread* thread = Thread::Current();
|
||||
Zone* zone = thread->zone();
|
||||
|
|
|
@ -6856,6 +6856,11 @@ class Instance : public Object {
|
|||
const TypeArguments& other_instantiator_type_arguments,
|
||||
const TypeArguments& other_function_type_arguments) const;
|
||||
|
||||
// Return true if the null instance can be assigned to a variable of [other]
|
||||
// type. Return false if null cannot be assigned or we cannot tell (if
|
||||
// [other] is a type parameter in NNBD strong mode).
|
||||
static bool NullIsAssignableTo(const AbstractType& other);
|
||||
|
||||
// Returns true if the type of this instance is a subtype of FutureOr<T>
|
||||
// specified by instantiated type 'other'.
|
||||
// Returns false if other type is not a FutureOr.
|
||||
|
@ -7432,6 +7437,13 @@ class AbstractType : public Instance {
|
|||
// Check if this type represents a top type.
|
||||
bool IsTopType() const;
|
||||
|
||||
// Check if this type represents a top type with respect to
|
||||
// assignability and 'as' type tests, e.g. returns true if any value can be
|
||||
// assigned to a variable of this type and 'as' type test always succeeds.
|
||||
// Guaranteed to return true for top types according to IsTopType(), but
|
||||
// may also return true for other types (non-nullable Object in weak mode).
|
||||
bool IsTopTypeForAssignability() const;
|
||||
|
||||
// Check if this type represents the 'bool' type.
|
||||
bool IsBoolType() const { return type_class_id() == kBoolCid; }
|
||||
|
||||
|
|
|
@ -64,10 +64,12 @@ namespace dart {
|
|||
V(Subtype4TestCache) \
|
||||
V(Subtype6TestCache) \
|
||||
V(DefaultTypeTest) \
|
||||
V(DefaultNullableTypeTest) \
|
||||
V(TopTypeTypeTest) \
|
||||
V(UnreachableTypeTest) \
|
||||
V(SlowTypeTest) \
|
||||
V(LazySpecializeTypeTest) \
|
||||
V(LazySpecializeNullableTypeTest) \
|
||||
V(CallClosureNoSuchMethod) \
|
||||
V(FrameAwaitingMaterialization) \
|
||||
V(AsynchronousGapMarker) \
|
||||
|
|
|
@ -92,30 +92,33 @@ RawCode* TypeTestingStubGenerator::DefaultCodeForType(
|
|||
const AbstractType& type,
|
||||
bool lazy_specialize /* = true */) {
|
||||
if (type.IsTypeRef()) {
|
||||
return StubCode::DefaultTypeTest().raw();
|
||||
return FLAG_null_safety ? StubCode::DefaultTypeTest().raw()
|
||||
: StubCode::DefaultNullableTypeTest().raw();
|
||||
}
|
||||
|
||||
const intptr_t cid = type.type_class_id();
|
||||
// During bootstrapping we have no access to stubs yet, so we'll just return
|
||||
// `null` and patch these later in `Object::FinishInit()`.
|
||||
if (!StubCode::HasBeenInitialized()) {
|
||||
ASSERT(type.IsType());
|
||||
const classid_t cid = type.type_class_id();
|
||||
ASSERT(cid == kDynamicCid || cid == kVoidCid || cid == kNeverCid);
|
||||
return Code::null();
|
||||
}
|
||||
|
||||
if (cid == kDynamicCid || cid == kVoidCid ||
|
||||
(cid == kInstanceCid && (!FLAG_null_safety || !type.IsNonNullable()))) {
|
||||
if (type.IsTopTypeForAssignability()) {
|
||||
return StubCode::TopTypeTypeTest().raw();
|
||||
}
|
||||
|
||||
if (type.IsType() || type.IsTypeParameter()) {
|
||||
// TODO(dartbug.com/38845): Add support for specialized TTS for
|
||||
// nullable and non-nullable types in NNBD strong mode.
|
||||
const bool should_specialize = !FLAG_precompiled_mode && lazy_specialize &&
|
||||
(type.IsLegacy() || !FLAG_null_safety);
|
||||
return should_specialize ? StubCode::LazySpecializeTypeTest().raw()
|
||||
: StubCode::DefaultTypeTest().raw();
|
||||
const bool should_specialize = !FLAG_precompiled_mode && lazy_specialize;
|
||||
const bool nullable = Instance::NullIsAssignableTo(type);
|
||||
if (should_specialize) {
|
||||
return nullable ? StubCode::LazySpecializeNullableTypeTest().raw()
|
||||
: StubCode::LazySpecializeTypeTest().raw();
|
||||
} else {
|
||||
return nullable ? StubCode::DefaultNullableTypeTest().raw()
|
||||
: StubCode::DefaultTypeTest().raw();
|
||||
}
|
||||
}
|
||||
|
||||
return StubCode::UnreachableTypeTest().raw();
|
||||
|
@ -145,8 +148,7 @@ RawCode* TypeTestingStubGenerator::OptimizedCodeForType(
|
|||
type, /*lazy_specialize=*/false);
|
||||
}
|
||||
|
||||
const intptr_t cid = type.type_class_id();
|
||||
if (cid == kDynamicCid || cid == kVoidCid || cid == kInstanceCid) {
|
||||
if (type.IsTopTypeForAssignability()) {
|
||||
return StubCode::TopTypeTypeTest().raw();
|
||||
}
|
||||
|
||||
|
@ -239,7 +241,7 @@ void TypeTestingStubGenerator::BuildOptimizedTypeTestStubFastCases(
|
|||
Register instance_reg,
|
||||
Register class_id_reg) {
|
||||
// These are handled via the TopTypeTypeTestStub!
|
||||
ASSERT(!(type.IsDynamicType() || type.IsVoidType() || type.IsObjectType()));
|
||||
ASSERT(!type.IsTopTypeForAssignability());
|
||||
|
||||
// Fast case for 'int'.
|
||||
if (type.IsIntType()) {
|
||||
|
@ -261,10 +263,10 @@ void TypeTestingStubGenerator::BuildOptimizedTypeTestStubFastCases(
|
|||
|
||||
// Check the cid ranges which are a subtype of [type].
|
||||
if (hi->CanUseSubtypeRangeCheckFor(type)) {
|
||||
const CidRangeVector& ranges =
|
||||
hi->SubtypeRangesForClass(type_class,
|
||||
/*include_abstract=*/false,
|
||||
/*exclude_null=*/false);
|
||||
const CidRangeVector& ranges = hi->SubtypeRangesForClass(
|
||||
type_class,
|
||||
/*include_abstract=*/false,
|
||||
/*exclude_null=*/!Instance::NullIsAssignableTo(type));
|
||||
|
||||
const Type& int_type = Type::Handle(Type::IntType());
|
||||
const bool smi_is_ok =
|
||||
|
@ -289,12 +291,14 @@ void TypeTestingStubGenerator::BuildOptimizedTypeTestStubFastCases(
|
|||
type_class, tp, ta);
|
||||
}
|
||||
|
||||
// Fast case for 'null'.
|
||||
compiler::Label non_null;
|
||||
__ CompareObject(instance_reg, Object::null_object());
|
||||
__ BranchIf(NOT_EQUAL, &non_null);
|
||||
__ Ret();
|
||||
__ Bind(&non_null);
|
||||
if (Instance::NullIsAssignableTo(type)) {
|
||||
// Fast case for 'null'.
|
||||
compiler::Label non_null;
|
||||
__ CompareObject(instance_reg, Object::null_object());
|
||||
__ BranchIf(NOT_EQUAL, &non_null);
|
||||
__ Ret();
|
||||
__ Bind(&non_null);
|
||||
}
|
||||
}
|
||||
|
||||
void TypeTestingStubGenerator::BuildOptimizedSubtypeRangeCheck(
|
||||
|
@ -399,6 +403,8 @@ void TypeTestingStubGenerator::BuildOptimizedSubclassRangeCheck(
|
|||
__ Bind(&is_subtype);
|
||||
}
|
||||
|
||||
// Generate code to verify that instance's type argument is a subtype of
|
||||
// 'type_arg'.
|
||||
void TypeTestingStubGenerator::BuildOptimizedTypeArgumentValueCheck(
|
||||
compiler::Assembler* assembler,
|
||||
HierarchyInfo* hi,
|
||||
|
@ -410,20 +416,64 @@ void TypeTestingStubGenerator::BuildOptimizedTypeArgumentValueCheck(
|
|||
const Register function_type_args_reg,
|
||||
const Register own_type_arg_reg,
|
||||
compiler::Label* check_failed) {
|
||||
const intptr_t cid = type_arg.type_class_id();
|
||||
if (!(cid == kDynamicCid || cid == kVoidCid || cid == kInstanceCid)) {
|
||||
// TODO(kustermann): Even though it should be safe to use TMP here, we
|
||||
// should avoid using TMP outside the assembler. Try to find a free
|
||||
// register to use here!
|
||||
__ LoadField(TMP, compiler::FieldAddress(
|
||||
instance_type_args_reg,
|
||||
compiler::target::TypeArguments::type_at_offset(
|
||||
type_param_value_offset_i)));
|
||||
__ LoadField(class_id_reg,
|
||||
compiler::FieldAddress(
|
||||
TMP, compiler::target::Type::type_class_id_offset()));
|
||||
if (type_arg.IsTopType()) {
|
||||
return;
|
||||
}
|
||||
// TODO(dartbug.com/40736): Even though it should be safe to use TMP here,
|
||||
// we should avoid using TMP outside the assembler. Try to find a free
|
||||
// register to use here!
|
||||
__ LoadField(TMP, compiler::FieldAddress(
|
||||
instance_type_args_reg,
|
||||
compiler::target::TypeArguments::type_at_offset(
|
||||
type_param_value_offset_i)));
|
||||
__ LoadField(class_id_reg,
|
||||
compiler::FieldAddress(
|
||||
TMP, compiler::target::Type::type_class_id_offset()));
|
||||
|
||||
if (type_arg.IsTypeParameter()) {
|
||||
if (type_arg.IsTypeParameter()) {
|
||||
const TypeParameter& type_param = TypeParameter::Cast(type_arg);
|
||||
const Register kTypeArgumentsReg = type_param.IsClassTypeParameter()
|
||||
? instantiator_type_args_reg
|
||||
: function_type_args_reg;
|
||||
__ LoadField(
|
||||
own_type_arg_reg,
|
||||
compiler::FieldAddress(kTypeArgumentsReg,
|
||||
compiler::target::TypeArguments::type_at_offset(
|
||||
type_param.index())));
|
||||
__ CompareWithFieldValue(
|
||||
class_id_reg,
|
||||
compiler::FieldAddress(own_type_arg_reg,
|
||||
compiler::target::Type::type_class_id_offset()));
|
||||
__ BranchIf(NOT_EQUAL, check_failed);
|
||||
} else {
|
||||
const Class& type_class = Class::Handle(type_arg.type_class());
|
||||
const CidRangeVector& ranges = hi->SubtypeRangesForClass(
|
||||
type_class,
|
||||
/*include_abstract=*/true,
|
||||
/*exclude_null=*/!Instance::NullIsAssignableTo(type_arg));
|
||||
|
||||
compiler::Label is_subtype;
|
||||
__ SmiUntag(class_id_reg);
|
||||
FlowGraphCompiler::GenerateCidRangesCheck(assembler, class_id_reg, ranges,
|
||||
&is_subtype, check_failed, true);
|
||||
__ Bind(&is_subtype);
|
||||
}
|
||||
|
||||
// Weak NNBD mode uses LEGACY_SUBTYPE which ignores nullability.
|
||||
// We don't need to check nullability of LHS for nullable and legacy RHS
|
||||
// ("Right Legacy", "Right Nullable" rules).
|
||||
if (FLAG_null_safety && !type_arg.IsNullable() && !type_arg.IsLegacy()) {
|
||||
ASSERT((type_arg.IsTypeParameter() && type_arg.IsUndetermined()) ||
|
||||
type_arg.IsNonNullable());
|
||||
|
||||
compiler::Label skip_nullable_check;
|
||||
if (type_arg.IsUndetermined()) {
|
||||
ASSERT(type_arg.IsTypeParameter());
|
||||
// Skip the nullability check if actual RHS is nullable or legacy.
|
||||
// TODO(dartbug.com/40736): Allocate register for own_type_arg_reg
|
||||
// which is not clobbered and avoid reloading own_type_arg_reg.
|
||||
// Currently own_type_arg_reg == TMP on certain
|
||||
// architectures and it is clobbered by CompareWithFieldValue.
|
||||
const TypeParameter& type_param = TypeParameter::Cast(type_arg);
|
||||
const Register kTypeArgumentsReg = type_param.IsClassTypeParameter()
|
||||
? instantiator_type_args_reg
|
||||
|
@ -433,23 +483,26 @@ void TypeTestingStubGenerator::BuildOptimizedTypeArgumentValueCheck(
|
|||
kTypeArgumentsReg,
|
||||
compiler::target::TypeArguments::type_at_offset(
|
||||
type_param.index())));
|
||||
__ CompareWithFieldValue(
|
||||
class_id_reg, compiler::FieldAddress(
|
||||
own_type_arg_reg,
|
||||
compiler::target::Type::type_class_id_offset()));
|
||||
__ BranchIf(NOT_EQUAL, check_failed);
|
||||
} else {
|
||||
const Class& type_class = Class::Handle(type_arg.type_class());
|
||||
const CidRangeVector& ranges =
|
||||
hi->SubtypeRangesForClass(type_class,
|
||||
/*include_abstract=*/true,
|
||||
/*exclude_null=*/false);
|
||||
__ CompareTypeNullabilityWith(
|
||||
own_type_arg_reg, compiler::target::Nullability::kNonNullable);
|
||||
__ BranchIf(NOT_EQUAL, &skip_nullable_check);
|
||||
}
|
||||
|
||||
compiler::Label is_subtype;
|
||||
__ SmiUntag(class_id_reg);
|
||||
FlowGraphCompiler::GenerateCidRangesCheck(
|
||||
assembler, class_id_reg, ranges, &is_subtype, check_failed, true);
|
||||
__ Bind(&is_subtype);
|
||||
// Nullable type is not a subtype of non-nullable type.
|
||||
// TODO(dartbug.com/40736): allocate a register for instance type argument
|
||||
// and avoid reloading it. Note that class_id_reg == TMP on certain
|
||||
// architectures.
|
||||
__ LoadField(
|
||||
class_id_reg,
|
||||
compiler::FieldAddress(instance_type_args_reg,
|
||||
compiler::target::TypeArguments::type_at_offset(
|
||||
type_param_value_offset_i)));
|
||||
__ CompareTypeNullabilityWith(class_id_reg,
|
||||
compiler::target::Nullability::kNullable);
|
||||
__ BranchIf(EQUAL, check_failed);
|
||||
|
||||
if (type_arg.IsUndetermined()) {
|
||||
__ Bind(&skip_nullable_check);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
28
tests/language/nnbd/subtyping/type_casts_legacy_library.dart
Normal file
28
tests/language/nnbd/subtyping/type_casts_legacy_library.dart
Normal file
|
@ -0,0 +1,28 @@
|
|||
// Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file
|
||||
// for details. All rights reserved. Use of this source code is governed by a
|
||||
// BSD-style license that can be found in the LICENSE file.
|
||||
|
||||
// Opt out of Null Safety:
|
||||
// @dart = 2.6
|
||||
|
||||
import 'type_casts_null_safe_library.dart';
|
||||
|
||||
class A<T> {
|
||||
@pragma('vm:never-inline')
|
||||
asT(arg) => arg as T;
|
||||
|
||||
@pragma('vm:never-inline')
|
||||
asBT(arg) => arg as B<T>;
|
||||
}
|
||||
|
||||
class B<T> {}
|
||||
|
||||
class C {}
|
||||
|
||||
class D extends C {}
|
||||
|
||||
newAOfLegacyC() => new A<C>();
|
||||
newAOfLegacyBOfLegacyC() => new A<B<C>>();
|
||||
newWOfLegacyC() => new W<C>();
|
||||
newWOfLegacyBOfLegacyC() => new W<B<C>>();
|
||||
newXOfLegacyY() => new X<Y>();
|
|
@ -0,0 +1,26 @@
|
|||
// Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file
|
||||
// for details. All rights reserved. Use of this source code is governed by a
|
||||
// BSD-style license that can be found in the LICENSE file.
|
||||
|
||||
class W<T> {
|
||||
@pragma('vm:never-inline')
|
||||
asT(arg) => arg as T;
|
||||
|
||||
@pragma('vm:never-inline')
|
||||
asNullableT(arg) => arg as T?;
|
||||
|
||||
@pragma('vm:never-inline')
|
||||
asXT(arg) => arg as X<T>;
|
||||
|
||||
@pragma('vm:never-inline')
|
||||
asNullableXT(arg) => arg as X<T>?;
|
||||
|
||||
@pragma('vm:never-inline')
|
||||
asXNullableT(arg) => arg as X<T?>;
|
||||
}
|
||||
|
||||
class X<T> {}
|
||||
|
||||
class Y {}
|
||||
|
||||
class Z extends Y {}
|
170
tests/language/nnbd/subtyping/type_casts_strong_test.dart
Normal file
170
tests/language/nnbd/subtyping/type_casts_strong_test.dart
Normal file
|
@ -0,0 +1,170 @@
|
|||
// Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file
|
||||
// for details. All rights reserved. Use of this source code is governed by a
|
||||
// BSD-style license that can be found in the LICENSE file.
|
||||
|
||||
// VMOptions=--optimization_counter_threshold=10 --deterministic
|
||||
|
||||
// Requirements=nnbd-strong
|
||||
|
||||
import 'package:expect/expect.dart';
|
||||
import 'type_casts_legacy_library.dart'; // A, B, C, D
|
||||
import 'type_casts_null_safe_library.dart'; // W, X, Y, Z
|
||||
|
||||
doTests() {
|
||||
// Testing 'arg as T*', T = C*
|
||||
final ac = newAOfLegacyC();
|
||||
ac.asT(new C());
|
||||
ac.asT(new D());
|
||||
ac.asT(null);
|
||||
Expect.throwsCastError(() {
|
||||
ac.asT(new Y());
|
||||
});
|
||||
|
||||
// Testing 'arg as T*', T = B<C*>*
|
||||
final abc = newAOfLegacyBOfLegacyC();
|
||||
abc.asT(new B<C>());
|
||||
abc.asT(new B<D>());
|
||||
abc.asT(null);
|
||||
Expect.throwsCastError(() {
|
||||
abc.asT(new B<dynamic>());
|
||||
});
|
||||
Expect.throwsCastError(() {
|
||||
abc.asT(new B<Y>());
|
||||
});
|
||||
|
||||
// Testing 'arg as T*', T = Y
|
||||
final ay = new A<Y>();
|
||||
ay.asT(new Y());
|
||||
ay.asT(new Z());
|
||||
ay.asT(null);
|
||||
Expect.throwsCastError(() {
|
||||
ay.asT(new C());
|
||||
});
|
||||
|
||||
// Testing 'arg as T', T = C*
|
||||
final wc = newWOfLegacyC();
|
||||
wc.asT(new C());
|
||||
wc.asT(new D());
|
||||
wc.asT(null);
|
||||
Expect.throwsCastError(() {
|
||||
wc.asT(new Y());
|
||||
});
|
||||
|
||||
// Testing 'arg as T?', T = C*
|
||||
wc.asNullableT(new C());
|
||||
wc.asNullableT(new D());
|
||||
wc.asNullableT(null);
|
||||
Expect.throwsCastError(() {
|
||||
wc.asNullableT(new Y());
|
||||
});
|
||||
|
||||
// Testing 'arg as T', T = B<C*>*
|
||||
final wby = newWOfLegacyBOfLegacyC();
|
||||
wby.asT(new B<C>());
|
||||
wby.asT(new B<D>());
|
||||
wby.asT(null);
|
||||
Expect.throwsCastError(() {
|
||||
wby.asT(new B<dynamic>());
|
||||
});
|
||||
Expect.throwsCastError(() {
|
||||
wby.asT(new B<Y>());
|
||||
});
|
||||
|
||||
// Testing 'arg as T?', T = B<C*>*
|
||||
wby.asNullableT(new B<C>());
|
||||
wby.asNullableT(new B<D>());
|
||||
wby.asNullableT(null);
|
||||
Expect.throwsCastError(() {
|
||||
wby.asNullableT(new B<dynamic>());
|
||||
});
|
||||
Expect.throwsCastError(() {
|
||||
wby.asNullableT(new B<Y>());
|
||||
});
|
||||
|
||||
// Testing 'arg as T', T = Y
|
||||
final wy = new W<Y>();
|
||||
wy.asT(new Y());
|
||||
wy.asT(new Z());
|
||||
Expect.throwsCastError(() {
|
||||
wy.asT(null);
|
||||
});
|
||||
Expect.throwsCastError(() {
|
||||
wy.asT(new C());
|
||||
});
|
||||
|
||||
// Testing 'arg as T?', T = Y
|
||||
wy.asNullableT(new Y());
|
||||
wy.asNullableT(new Z());
|
||||
wy.asNullableT(null);
|
||||
Expect.throwsCastError(() {
|
||||
wy.asNullableT(new C());
|
||||
});
|
||||
|
||||
// Testing 'arg as B<T*>*', T = Y
|
||||
ay.asBT(new B<Y>());
|
||||
ay.asBT(new B<Z>());
|
||||
ay.asBT(null);
|
||||
Expect.throwsCastError(() {
|
||||
ay.asBT(new B<dynamic>());
|
||||
});
|
||||
Expect.throwsCastError(() {
|
||||
ay.asBT(new B<C>());
|
||||
});
|
||||
|
||||
// Testing 'arg as X<T>', T = Y
|
||||
wy.asXT(new X<Y>());
|
||||
wy.asXT(new X<Z>());
|
||||
wy.asXT(newXOfLegacyY());
|
||||
Expect.throwsCastError(() {
|
||||
wy.asXT(null);
|
||||
});
|
||||
Expect.throwsCastError(() {
|
||||
wy.asXT(new X<dynamic>());
|
||||
});
|
||||
Expect.throwsCastError(() {
|
||||
wy.asXT(new X<Y?>());
|
||||
});
|
||||
|
||||
// Testing 'arg as X<T>?', T = Y
|
||||
wy.asNullableXT(new X<Y>());
|
||||
wy.asNullableXT(new X<Z>());
|
||||
wy.asNullableXT(newXOfLegacyY());
|
||||
wy.asNullableXT(null);
|
||||
Expect.throwsCastError(() {
|
||||
wy.asNullableXT(new X<dynamic>());
|
||||
});
|
||||
Expect.throwsCastError(() {
|
||||
wy.asNullableXT(new X<Y?>());
|
||||
});
|
||||
|
||||
// Testing 'arg as X<T?>', T = Y
|
||||
wy.asXNullableT(new X<Y>());
|
||||
wy.asXNullableT(new X<Z>());
|
||||
wy.asXNullableT(new X<Y?>());
|
||||
wy.asXNullableT(newXOfLegacyY());
|
||||
Expect.throwsCastError(() {
|
||||
wy.asXNullableT(null);
|
||||
});
|
||||
Expect.throwsCastError(() {
|
||||
wy.asXNullableT(new X<dynamic>());
|
||||
});
|
||||
|
||||
// Testing 'arg as X<T>', T = Y?
|
||||
final wny = new W<Y?>();
|
||||
wny.asXT(new X<Y>());
|
||||
wny.asXT(new X<Z>());
|
||||
wny.asXT(new X<Y?>());
|
||||
wny.asXT(newXOfLegacyY());
|
||||
Expect.throwsCastError(() {
|
||||
wny.asXT(null);
|
||||
});
|
||||
Expect.throwsCastError(() {
|
||||
wny.asXT(new X<dynamic>());
|
||||
});
|
||||
}
|
||||
|
||||
main() {
|
||||
for (int i = 0; i < 20; ++i) {
|
||||
doTests();
|
||||
}
|
||||
}
|
Loading…
Reference in a new issue