dart-sdk/runtime/vm/stub_code.h
Tess Strickland 7c55965b76 [vm/compiler] Add initial partial TTSes for implemented types.
Previously, no optimized TTSes were generated for implemented types, and
so they always fell back to the default TTS, which mostly depends on
calling the runtime and cached checks in SubtypeTestCaches. Now,
optimized TTSes are generated that check for certain compatible
implementing classes before falling back on the runtime/STC.

More specifically, the optimized TTSes for implemented types checks for
the following cases:
1) The implemented type is instantiated and the checked class implements
   an instantiated subtype of the implemented type. The only check
   required is a class id match.
2) The instance type arguments of the checked class are compatible with
   the type arguments of the checked type.  That is, given the following
   declarations, where Base, Impl1, and Impl2 have the same number of
   parent type arguments:

```
case Impl1<K, V> implements Base<K, V>
case Impl2<V> implements Base<String, V>
```

   then the generated optimized TTS for Base<S, T>, where S and T are
   either type parameters or instantiated types, checks for instances of
   Base and Impl1, comparing the type arguments of the instance to S and
   T. The generated TTS does not currently check for Impl2, and thus
   when given an instance of Impl2, it falls back to the old runtime
   checking/SubtypeTestCache behavior.

This compatibility restriction allows us to perform the same checks on
the loaded instance type arguments as is done for non-implemented types,
where the checked classes are subclasses and so naturally compatible in
this manner.

Note that two implementing classes whose instance type arguments are
compatible may store their instance type arguments at different field
offsets. Thus, we also split the classes being checked into groups that
share the same instance type arguments field offset, and load the
instance type arguments differently for each checked group.

This CL also removes now-unused code in the HierarchyInfo class.

TEST=vm/cc/TTS_{Generic,}SubtypeRangeCheck

Cq-Include-Trybots: luci.dart.try:vm-kernel-linux-release-x64-try,vm-kernel-precomp-linux-release-x64-try,vm-kernel-precomp-nnbd-linux-release-x64-try,vm-kernel-nnbd-linux-release-x64-try,vm-kernel-linux-product-x64-try,vm-kernel-precomp-linux-product-x64-try,vm-kernel-linux-release-simarm-try,vm-kernel-linux-release-simarm64-try,vm-kernel-precomp-linux-release-simarm64-try,vm-kernel-precomp-linux-release-simarm-try,vm-kernel-precomp-linux-release-simarm_x64-try
Change-Id: I4c3aa23db2e75adbad9c15727b491669b2f3a189
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/209540
Reviewed-by: Martin Kustermann <kustermann@google.com>
Reviewed-by: Alexander Markov <alexmarkov@google.com>
Commit-Queue: Tess Strickland <sstrickl@google.com>
2021-08-16 13:02:34 +00:00

140 lines
4.8 KiB
C++

// Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
#ifndef RUNTIME_VM_STUB_CODE_H_
#define RUNTIME_VM_STUB_CODE_H_
#include "vm/allocation.h"
#include "vm/compiler/runtime_api.h"
#include "vm/object.h"
#include "vm/stub_code_list.h"
#if !defined(DART_PRECOMPILED_RUNTIME)
#include "vm/compiler/assembler/assembler.h"
#include "vm/compiler/stub_code_compiler.h"
#endif // !defined(DART_PRECOMPILED_RUNTIME)
namespace dart {
// Forward declarations.
class Code;
class Isolate;
class ObjectPointerVisitor;
// Is it permitted for the stubs above to refer to Object::null(), which is
// allocated in the VM isolate and shared across all isolates.
// However, in cases where a simple GC-safe placeholder is needed on the stack,
// using Smi 0 instead of Object::null() is slightly more efficient, since a Smi
// does not require relocation.
// class StubCode is used to maintain the lifecycle of stubs.
class StubCode : public AllStatic {
public:
// Generate all stubs which are shared across all isolates, this is done
// only once and the stub code resides in the vm_isolate heap.
static void Init();
static void Cleanup();
// Returns true if stub code has been initialized.
static bool HasBeenInitialized() {
return initialized_.load(std::memory_order_acquire);
}
static void InitializationDone() {
initialized_.store(true, std::memory_order_release);
}
// Check if specified pc is in the dart invocation stub used for
// transitioning into dart code.
static bool InInvocationStub(uword pc);
// Check if the specified pc is in the jump to frame stub.
static bool InJumpToFrameStub(uword pc);
// Returns NULL if no stub found.
static const char* NameOfStub(uword entry_point);
// Define the shared stub code accessors.
#define STUB_CODE_ACCESSOR(name) \
static const Code& name() { return *entries_[k##name##Index].code; } \
static intptr_t name##Size() { return name().Size(); }
VM_STUB_CODE_LIST(STUB_CODE_ACCESSOR);
#undef STUB_CODE_ACCESSOR
#if !defined(DART_PRECOMPILED_RUNTIME)
static CodePtr GetAllocationStubForClass(const Class& cls);
static CodePtr GetAllocationStubForTypedData(classid_t class_id);
#endif // !defined(DART_PRECOMPILED_RUNTIME)
#if !defined(TARGET_ARCH_IA32)
static CodePtr GetBuildGenericMethodExtractorStub(
compiler::ObjectPoolBuilder* pool) {
return GetBuildMethodExtractorStub(pool, /*generic=*/true);
}
static CodePtr GetBuildNonGenericMethodExtractorStub(
compiler::ObjectPoolBuilder* pool) {
return GetBuildMethodExtractorStub(pool, /*generic=*/false);
}
#endif
#if !defined(DART_PRECOMPILED_RUNTIME)
// Generate the stub and finalize the generated code into the stub
// code executable area.
static CodePtr Generate(const char* name,
compiler::ObjectPoolBuilder* object_pool_builder,
void (*GenerateStub)(compiler::Assembler* assembler));
#endif // !defined(DART_PRECOMPILED_RUNTIME)
static const Code& UnoptimizedStaticCallEntry(intptr_t num_args_tested);
static const char* NameAt(intptr_t index) { return entries_[index].name; }
static const Code& EntryAt(intptr_t index) { return *(entries_[index].code); }
static void EntryAtPut(intptr_t index, Code* entry) {
ASSERT(entry->IsReadOnlyHandle());
ASSERT(entries_[index].code == nullptr);
entries_[index].code = entry;
}
static intptr_t NumEntries() { return kNumStubEntries; }
#if !defined(DART_PRECOMPILED_RUNTIME)
#define GENERATE_STUB(name) \
static CodePtr BuildIsolateSpecific##name##Stub( \
compiler::ObjectPoolBuilder* opw) { \
return StubCode::Generate( \
"_iso_stub_" #name, opw, \
compiler::StubCodeCompiler::Generate##name##Stub); \
}
VM_STUB_CODE_LIST(GENERATE_STUB);
#undef GENERATE_STUB
#endif // !defined(DART_PRECOMPILED_RUNTIME)
private:
friend class MegamorphicCacheTable;
static CodePtr GetBuildMethodExtractorStub(compiler::ObjectPoolBuilder* pool,
bool generic);
enum {
#define STUB_CODE_ENTRY(name) k##name##Index,
VM_STUB_CODE_LIST(STUB_CODE_ENTRY)
#undef STUB_CODE_ENTRY
kNumStubEntries
};
struct StubCodeEntry {
Code* code;
const char* name;
#if !defined(DART_PRECOMPILED_RUNTIME)
void (*generator)(compiler::Assembler* assembler);
#endif
};
static StubCodeEntry entries_[kNumStubEntries];
static AcqRelAtomic<bool> initialized_;
};
} // namespace dart
#endif // RUNTIME_VM_STUB_CODE_H_