[vm] Remove support for non-bare AOT snapshots

Every AOT user out there has been using bare instructions mode
and continuing to maintaining non-bare instructions mode simply
adds costs (both in terms of time spent making changes to work
in a mode that is not used and CI resources spent on testing it).

This change removes FLAG_use_bare_instructions and changes the code
to assume that FLAG_precompiled_mode implies bare instructions.

TEST=ci

Cq-Include-Trybots: luci.dart.try:vm-kernel-precomp-linux-debug-x64-try,vm-kernel-precomp-linux-debug-x64c-try,vm-kernel-precomp-linux-product-x64-try,vm-kernel-precomp-dwarf-linux-product-x64-try,vm-kernel-precomp-obfuscate-linux-release-x64-try,app-kernel-linux-release-x64-try,app-kernel-linux-debug-x64-try
Change-Id: I5032b13bfcb613f79865f2cfa139cca8d1b42556
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/220964
Commit-Queue: Slava Egorov <vegorov@google.com>
Reviewed-by: Martin Kustermann <kustermann@google.com>
This commit is contained in:
Vyacheslav Egorov 2021-11-24 13:03:52 +00:00 committed by commit-bot@chromium.org
parent 450ff2976d
commit 71c9c2cd83
70 changed files with 401 additions and 948 deletions

View file

@ -16,11 +16,11 @@ void main() {
test("Fallback to dartdev.dill from dartdev.dart.snapshot for 'Hello World'",
() async {
p = project(mainSrc: "void main() { print('Hello World'); }");
// The DartDev snapshot includes the --use-bare-instructions flag. If
// --no-use-bare-instructions is passed, the VM will fail to load the
// The DartDev snapshot includes the --use_field_guards flag. If
// --no-use-field-guards is passed, the VM will fail to load the
// snapshot and should fall back to using the DartDev dill file.
ProcessResult result =
await p.run(['--no-use-bare-instructions', 'run', p.relativeFilePath]);
await p.run(['--no-use-field-guards', 'run', p.relativeFilePath]);
expect(result.stdout, contains('Hello World'));
expect(result.stderr, isEmpty);

View file

@ -2,7 +2,7 @@
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
// VMOptions=--always-generate-trampolines-for-testing --use-bare-instructions
// VMOptions=--always-generate-trampolines-for-testing
// We use a reasonable sized test and run it with the above options.
import 'hello_fuchsia_test.dart' as test;

View file

@ -2,7 +2,7 @@
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
//
// VMOptions=--dwarf-stack-traces --save-debugging-info=async_lazy_debug.so --lazy-async-stacks --no-use-bare-instructions
// VMOptions=--dwarf-stack-traces --save-debugging-info=async_lazy_debug.so --lazy-async-stacks
import 'dart:async';
import 'dart:io';

View file

@ -2,9 +2,8 @@
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
// VMOptions=--use_bare_instructions=false
// VMOptions=--use_bare_instructions=true --use_table_dispatch=false
// VMOptions=--use_bare_instructions=true --use_table_dispatch=true
// VMOptions=--use_table_dispatch=false
// VMOptions=--use_table_dispatch=true
import "splay_test.dart" deferred as splay; // Some non-trivial code.

View file

@ -65,8 +65,6 @@ main(List<String> args) async {
// Compile kernel to ELF.
await run(genSnapshot, <String>[
"--use_bare_instructions=false", //# object: ok
"--use_bare_instructions=true", //# bare: ok
"--snapshot-kind=app-aot-elf",
"--elf=$snapshot",
"--loading-unit-manifest=$manifest",

View file

@ -1,103 +0,0 @@
// Copyright (c) 2019, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
// This test is ensuring that the flag for --use-bare-instructions given at
// AOT compile-time will be used at runtime (irrespective if other values were
// passed to the runtime).
import "dart:async";
import "dart:io";
import 'package:expect/expect.dart';
import 'package:path/path.dart' as path;
import 'use_flag_test_helper.dart';
main(List<String> args) async {
if (!isAOTRuntime) {
return; // Running in JIT: AOT binaries not available.
}
if (Platform.isAndroid) {
return; // SDK tree and dart_bootstrap not available on the test device.
}
await withTempDir('bare-flag-test', (String tempDir) async {
final script = path.join(sdkDir, 'pkg/kernel/bin/dump.dart');
final scriptDill = path.join(tempDir, 'kernel_dump.dill');
// Compile script to Kernel IR.
await run(genKernel, <String>[
'--aot',
'--platform=$platformDill',
'-o',
scriptDill,
script,
]);
// Run the AOT compiler with/without bare instructions.
final scriptBareSnapshot = path.join(tempDir, 'bare.snapshot');
final scriptNonBareSnapshot = path.join(tempDir, 'non_bare.snapshot');
await Future.wait(<Future>[
run(genSnapshot, <String>[
'--use-bare-instructions',
'--snapshot-kind=app-aot-elf',
'--elf=$scriptBareSnapshot',
scriptDill,
]),
run(genSnapshot, <String>[
'--no-use-bare-instructions',
'--snapshot-kind=app-aot-elf',
'--elf=$scriptNonBareSnapshot',
scriptDill,
]),
]);
// Run the resulting bare-AOT compiled script.
final bareOut1 = path.join(tempDir, 'bare-out1.txt');
final bareOut2 = path.join(tempDir, 'bare-out2.txt');
await Future.wait(<Future>[
run(aotRuntime, <String>[
'--use-bare-instructions',
scriptBareSnapshot,
scriptDill,
bareOut1,
]),
run(aotRuntime, <String>[
'--no-use-bare-instructions',
scriptBareSnapshot,
scriptDill,
bareOut2,
]),
]);
// Run the resulting non-bare-AOT compiled script.
final nonBareOut1 = path.join(tempDir, 'non-bare-out1.txt');
final nonBareOut2 = path.join(tempDir, 'non-bare-out2.txt');
await Future.wait(<Future>[
run(aotRuntime, <String>[
'--use-bare-instructions',
scriptNonBareSnapshot,
scriptDill,
nonBareOut1,
]),
run(aotRuntime, <String>[
'--no-use-bare-instructions',
scriptNonBareSnapshot,
scriptDill,
nonBareOut2,
]),
]);
// Ensure we got 4 times the same result.
final output = await readFile(bareOut1);
Expect.equals(output, await readFile(bareOut2));
Expect.equals(output, await readFile(nonBareOut1));
Expect.equals(output, await readFile(nonBareOut2));
});
}
Future<String> readFile(String file) {
return new File(file).readAsString();
}

View file

@ -107,7 +107,6 @@ Future<void> testJIT(String dillPath, String snapshotKind) async {
Future<void> testAOT(String dillPath,
{bool useAsm = false,
bool useBare = true,
bool forceDrops = false,
bool useDispatch = true,
bool stripUtil = false, // Note: forced true if useAsm.
@ -127,9 +126,6 @@ Future<void> testAOT(String dillPath,
}
final descriptionBuilder = StringBuffer()..write(useAsm ? 'assembly' : 'elf');
if (!useBare) {
descriptionBuilder.write('-nonbare');
}
if (forceDrops) {
descriptionBuilder.write('-dropped');
}
@ -156,7 +152,6 @@ Future<void> testAOT(String dillPath,
final snapshotPath = path.join(tempDir, 'test.snap');
final commonSnapshotArgs = [
if (stripFlag) '--strip', // gen_snapshot specific and not a VM flag.
useBare ? '--use-bare-instructions' : '--no-use-bare-instructions',
"--write-v8-snapshot-profile-to=$profilePath",
if (forceDrops) ...[
'--dwarf-stack-traces',
@ -438,24 +433,11 @@ main() async {
// Test unstripped ELF generation directly.
await testAOT(aotDillPath);
await testAOT(aotDillPath, useBare: false);
await testAOT(aotDillPath, forceDrops: true);
await testAOT(aotDillPath, forceDrops: true, useBare: false);
await testAOT(aotDillPath, forceDrops: true, useDispatch: false);
await testAOT(aotDillPath,
forceDrops: true, useDispatch: false, useBare: false);
// Test flag-stripped ELF generation.
await testAOT(aotDillPath, stripFlag: true);
await testAOT(aotDillPath, useBare: false, stripFlag: true);
// Since we can't force disassembler support after the fact when running
// in PRODUCT mode, skip any --disassemble tests. Do these tests last as
// they have lots of output and so the log will be truncated.
if (!const bool.fromEnvironment('dart.vm.product')) {
// Regression test for dartbug.com/41149.
await testAOT(aotDillPath, useBare: false, disassemble: true);
}
// We neither generate assembly nor have a stripping utility on Windows.
if (Platform.isWindows) {
@ -469,7 +451,6 @@ main() async {
} else {
// Test unstripped ELF generation that is then externally stripped.
await testAOT(aotDillPath, stripUtil: true);
await testAOT(aotDillPath, stripUtil: true, useBare: false);
}
// TODO(sstrickl): Currently we can't assemble for SIMARM64 on MacOSX.
@ -482,9 +463,7 @@ main() async {
}
// Test unstripped assembly generation that is then externally stripped.
await testAOT(aotDillPath, useAsm: true);
await testAOT(aotDillPath, useAsm: true, useBare: false);
// Test stripped assembly generation that is then externally stripped.
await testAOT(aotDillPath, useAsm: true, stripFlag: true);
await testAOT(aotDillPath, useAsm: true, stripFlag: true, useBare: false);
});
}

View file

@ -4,7 +4,7 @@
// @dart = 2.9
// VMOptions=--always-generate-trampolines-for-testing --use-bare-instructions
// VMOptions=--always-generate-trampolines-for-testing
// We use a reasonable sized test and run it with the above options.
import 'hello_fuchsia_test.dart' as test;

View file

@ -2,7 +2,7 @@
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
//
// VMOptions=--dwarf-stack-traces --save-debugging-info=async_lazy_debug.so --lazy-async-stacks --no-use-bare-instructions
// VMOptions=--dwarf-stack-traces --save-debugging-info=async_lazy_debug.so --lazy-async-stacks
// @dart = 2.9

View file

@ -4,9 +4,8 @@
// @dart = 2.9
// VMOptions=--use_bare_instructions=false
// VMOptions=--use_bare_instructions=true --use_table_dispatch=false
// VMOptions=--use_bare_instructions=true --use_table_dispatch=true
// VMOptions=--use_table_dispatch=false
// VMOptions=--use_table_dispatch=true
import "splay_test.dart" deferred as splay; // Some non-trivial code.

View file

@ -67,8 +67,6 @@ main(List<String> args) async {
// Compile kernel to ELF.
await run(genSnapshot, <String>[
"--use_bare_instructions=false", //# object: ok
"--use_bare_instructions=true", //# bare: ok
"--snapshot-kind=app-aot-elf",
"--elf=$snapshot",
"--loading-unit-manifest=$manifest",

View file

@ -1,105 +0,0 @@
// Copyright (c) 2019, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
// @dart = 2.9
// This test is ensuring that the flag for --use-bare-instructions given at
// AOT compile-time will be used at runtime (irrespective if other values were
// passed to the runtime).
import "dart:async";
import "dart:io";
import 'package:expect/expect.dart';
import 'package:path/path.dart' as path;
import 'use_flag_test_helper.dart';
main(List<String> args) async {
if (!isAOTRuntime) {
return; // Running in JIT: AOT binaries not available.
}
if (Platform.isAndroid) {
return; // SDK tree and dart_bootstrap not available on the test device.
}
await withTempDir('bare-flag-test', (String tempDir) async {
final script = path.join(sdkDir, 'pkg/kernel/bin/dump.dart');
final scriptDill = path.join(tempDir, 'kernel_dump.dill');
// Compile script to Kernel IR.
await run(genKernel, <String>[
'--aot',
'--platform=$platformDill',
'-o',
scriptDill,
script,
]);
// Run the AOT compiler with/without bare instructions.
final scriptBareSnapshot = path.join(tempDir, 'bare.snapshot');
final scriptNonBareSnapshot = path.join(tempDir, 'non_bare.snapshot');
await Future.wait(<Future>[
run(genSnapshot, <String>[
'--use-bare-instructions',
'--snapshot-kind=app-aot-elf',
'--elf=$scriptBareSnapshot',
scriptDill,
]),
run(genSnapshot, <String>[
'--no-use-bare-instructions',
'--snapshot-kind=app-aot-elf',
'--elf=$scriptNonBareSnapshot',
scriptDill,
]),
]);
// Run the resulting bare-AOT compiled script.
final bareOut1 = path.join(tempDir, 'bare-out1.txt');
final bareOut2 = path.join(tempDir, 'bare-out2.txt');
await Future.wait(<Future>[
run(aotRuntime, <String>[
'--use-bare-instructions',
scriptBareSnapshot,
scriptDill,
bareOut1,
]),
run(aotRuntime, <String>[
'--no-use-bare-instructions',
scriptBareSnapshot,
scriptDill,
bareOut2,
]),
]);
// Run the resulting non-bare-AOT compiled script.
final nonBareOut1 = path.join(tempDir, 'non-bare-out1.txt');
final nonBareOut2 = path.join(tempDir, 'non-bare-out2.txt');
await Future.wait(<Future>[
run(aotRuntime, <String>[
'--use-bare-instructions',
scriptNonBareSnapshot,
scriptDill,
nonBareOut1,
]),
run(aotRuntime, <String>[
'--no-use-bare-instructions',
scriptNonBareSnapshot,
scriptDill,
nonBareOut2,
]),
]);
// Ensure we got 4 times the same result.
final output = await readFile(bareOut1);
Expect.equals(output, await readFile(bareOut2));
Expect.equals(output, await readFile(nonBareOut1));
Expect.equals(output, await readFile(nonBareOut2));
});
}
Future<String> readFile(String file) {
return new File(file).readAsString();
}

View file

@ -109,7 +109,6 @@ Future<void> testJIT(String dillPath, String snapshotKind) async {
Future<void> testAOT(String dillPath,
{bool useAsm = false,
bool useBare = true,
bool forceDrops = false,
bool useDispatch = true,
bool stripUtil = false, // Note: forced true if useAsm.
@ -129,9 +128,6 @@ Future<void> testAOT(String dillPath,
}
final descriptionBuilder = StringBuffer()..write(useAsm ? 'assembly' : 'elf');
if (!useBare) {
descriptionBuilder.write('-nonbare');
}
if (forceDrops) {
descriptionBuilder.write('-dropped');
}
@ -158,7 +154,6 @@ Future<void> testAOT(String dillPath,
final snapshotPath = path.join(tempDir, 'test.snap');
final commonSnapshotArgs = [
if (stripFlag) '--strip', // gen_snapshot specific and not a VM flag.
useBare ? '--use-bare-instructions' : '--no-use-bare-instructions',
"--write-v8-snapshot-profile-to=$profilePath",
if (forceDrops) ...[
'--dwarf-stack-traces',
@ -432,24 +427,11 @@ main() async {
// Test unstripped ELF generation directly.
await testAOT(aotDillPath);
await testAOT(aotDillPath, useBare: false);
await testAOT(aotDillPath, forceDrops: true);
await testAOT(aotDillPath, forceDrops: true, useBare: false);
await testAOT(aotDillPath, forceDrops: true, useDispatch: false);
await testAOT(aotDillPath,
forceDrops: true, useDispatch: false, useBare: false);
// Test flag-stripped ELF generation.
await testAOT(aotDillPath, stripFlag: true);
await testAOT(aotDillPath, useBare: false, stripFlag: true);
// Since we can't force disassembler support after the fact when running
// in PRODUCT mode, skip any --disassemble tests. Do these tests last as
// they have lots of output and so the log will be truncated.
if (!const bool.fromEnvironment('dart.vm.product')) {
// Regression test for dartbug.com/41149.
await testAOT(aotDillPath, useBare: false, disassemble: true);
}
// We neither generate assembly nor have a stripping utility on Windows.
if (Platform.isWindows) {
@ -463,7 +445,6 @@ main() async {
} else {
// Test unstripped ELF generation that is then externally stripped.
await testAOT(aotDillPath, stripUtil: true);
await testAOT(aotDillPath, stripUtil: true, useBare: false);
}
// TODO(sstrickl): Currently we can't assemble for SIMARM64 on MacOSX.
@ -476,9 +457,7 @@ main() async {
}
// Test unstripped assembly generation that is then externally stripped.
await testAOT(aotDillPath, useAsm: true);
await testAOT(aotDillPath, useAsm: true, useBare: false);
// Test stripped assembly generation that is then externally stripped.
await testAOT(aotDillPath, useAsm: true, stripFlag: true);
await testAOT(aotDillPath, useAsm: true, stripFlag: true, useBare: false);
});
}

View file

@ -23,7 +23,6 @@ dart/null_safety_autodetection_in_kernel_compiler_test: Pass, Slow # Spawns seve
dart/slow_path_shared_stub_test: Pass, Slow # Uses --shared-slow-path-triggers-gc flag.
dart/snapshot_version_test: Skip # This test is a Dart1 test (script snapshot)
dart/stack_overflow_shared_test: Pass, Slow # Uses --shared-slow-path-triggers-gc flag.
dart/use_bare_instructions_flag_test: Pass, Slow # Spawns several subprocesses
dart_2/boxmint_test: Pass, Slow # Uses slow path
dart_2/byte_array_optimized_test: Pass, Slow
dart_2/data_uri_import_test/none: SkipByDesign
@ -36,7 +35,6 @@ dart_2/null_safety_autodetection_in_kernel_compiler_test: Pass, Slow # Spawns se
dart_2/slow_path_shared_stub_test: Pass, Slow # Uses --shared-slow-path-triggers-gc flag.
dart_2/snapshot_version_test: Skip # This test is a Dart1 test (script snapshot)
dart_2/stack_overflow_shared_test: Pass, Slow # Uses --shared-slow-path-triggers-gc flag.
dart_2/use_bare_instructions_flag_test: Pass, Slow # Spawns several subprocesses
[ $arch == ia32 ]
dart/disassemble_aot_test: SkipByDesign # IA32 does not support AOT.
@ -403,11 +401,9 @@ dart_2/data_uri*test: Skip # Data uri's not supported by dart2js or the analyzer
dart/emit_aot_size_info_flag_test: SkipByDesign # This test is for VM AOT only and is quite slow (so we don't run it in debug mode).
dart/split_aot_kernel_generation2_test: SkipByDesign # This test is for VM AOT only and is quite slow (so we don't run it in debug mode).
dart/split_aot_kernel_generation_test: SkipByDesign # This test is for VM AOT only and is quite slow (so we don't run it in debug mode).
dart/use_bare_instructions_flag_test: SkipByDesign # This test is for VM AOT only and is quite slow (so we don't run it in debug mode).
dart_2/emit_aot_size_info_flag_test: SkipByDesign # This test is for VM AOT only and is quite slow (so we don't run it in debug mode).
dart_2/split_aot_kernel_generation2_test: SkipByDesign # This test is for VM AOT only and is quite slow (so we don't run it in debug mode).
dart_2/split_aot_kernel_generation_test: SkipByDesign # This test is for VM AOT only and is quite slow (so we don't run it in debug mode).
dart_2/use_bare_instructions_flag_test: SkipByDesign # This test is for VM AOT only and is quite slow (so we don't run it in debug mode).
[ $mode != debug || $runtime != dart_precompiled ]
in_memory_elf_test: Skip # Tests a debug-mode flag to dart_precompiled_runtime.
@ -427,10 +423,6 @@ dart_2/data_uri_spawn_test: SkipByDesign # Isolate.spawnUri
dart_2/isolates/send_object_to_spawn_uri_isolate_test: SkipByDesign # uses spawnUri
dart_2/issue32950_test: SkipByDesign # uses spawnUri.
[ $runtime != dart_precompiled || $system == android ]
dart/bare_instructions_trampolines_test: SkipByDesign # This test is for VM AOT only (android fails due to listing interfaces).
dart_2/bare_instructions_trampolines_test: SkipByDesign # This test is for VM AOT only (android fails due to listing interfaces).
[ $hot_reload || $hot_reload_rollback ]
dart/appjit*: SkipByDesign # Cannot reload with URI pointing to app snapshot.
dart/disassemble_determinism_test: SkipSlow # Runs expensive fibonacci(32) computation in 2 subprocesses

View file

@ -1775,7 +1775,7 @@ class CodeSerializationCluster : public SerializationCluster {
// the pool for references to other code objects (which might reside
// in the current loading unit).
ObjectPoolPtr pool = code->untag()->object_pool_;
if (s->kind() == Snapshot::kFullAOT && FLAG_use_bare_instructions) {
if (s->kind() == Snapshot::kFullAOT) {
TracePool(s, pool, /*only_code=*/is_deferred);
} else {
if (s->InCurrentLoadingUnitOrRoot(pool)) {
@ -1824,8 +1824,8 @@ class CodeSerializationCluster : public SerializationCluster {
}
if (Code::IsDiscarded(code)) {
ASSERT(s->kind() == Snapshot::kFullAOT && FLAG_use_bare_instructions &&
FLAG_dwarf_stack_traces_mode && !FLAG_retain_code_objects);
ASSERT(s->kind() == Snapshot::kFullAOT && FLAG_dwarf_stack_traces_mode &&
!FLAG_retain_code_objects);
// Only object pool and static call table entries and the compressed
// stack maps should be pushed.
return;
@ -2003,7 +2003,7 @@ class CodeSerializationCluster : public SerializationCluster {
if (FLAG_write_v8_snapshot_profile_to != nullptr) {
// If we are writing V8 snapshot profile then attribute references going
// through the object pool and static calls to the code object itself.
if (kind == Snapshot::kFullAOT && FLAG_use_bare_instructions &&
if (kind == Snapshot::kFullAOT &&
code->untag()->object_pool_ != ObjectPool::null()) {
ObjectPoolPtr pool = code->untag()->object_pool_;
// Non-empty per-code object pools should not be reachable in this mode.
@ -2026,8 +2026,8 @@ class CodeSerializationCluster : public SerializationCluster {
if (Code::IsDiscarded(code)) {
// Only write instructions, compressed stackmaps and state bits
// for the discarded Code objects.
ASSERT(kind == Snapshot::kFullAOT && FLAG_use_bare_instructions &&
FLAG_dwarf_stack_traces_mode && !FLAG_retain_code_objects);
ASSERT(kind == Snapshot::kFullAOT && FLAG_dwarf_stack_traces_mode &&
!FLAG_retain_code_objects);
#if defined(DART_PRECOMPILER)
if (FLAG_write_v8_snapshot_profile_to != nullptr) {
// Keep the owner as a (possibly artificial) node for snapshot analysis.
@ -2042,7 +2042,7 @@ class CodeSerializationCluster : public SerializationCluster {
// No need to write object pool out if we are producing full AOT
// snapshot with bare instructions.
if (!(kind == Snapshot::kFullAOT && FLAG_use_bare_instructions)) {
if (kind != Snapshot::kFullAOT) {
if (s->InCurrentLoadingUnitOrRoot(code->untag()->object_pool_)) {
WriteField(code, object_pool_);
} else {
@ -2167,7 +2167,7 @@ class CodeDeserializationCluster : public DeserializationCluster {
// There would be a single global pool if this is a full AOT snapshot
// with bare instructions.
if (!(d->kind() == Snapshot::kFullAOT && FLAG_use_bare_instructions)) {
if (d->kind() != Snapshot::kFullAOT) {
code->untag()->object_pool_ = static_cast<ObjectPoolPtr>(d->ReadRef());
} else {
code->untag()->object_pool_ = ObjectPool::null();
@ -2252,9 +2252,7 @@ class ObjectPoolSerializationCluster : public SerializationCluster {
ObjectPoolPtr pool = ObjectPool::RawCast(object);
objects_.Add(pool);
if (s->kind() == Snapshot::kFullAOT && FLAG_use_bare_instructions) {
// Treat pool as weak.
} else {
if (s->kind() != Snapshot::kFullAOT) {
const intptr_t length = pool->untag()->length_;
uint8_t* entry_bits = pool->untag()->entry_bits();
for (intptr_t i = 0; i < length; i++) {
@ -2280,7 +2278,7 @@ class ObjectPoolSerializationCluster : public SerializationCluster {
}
void WriteFill(Serializer* s) {
bool weak = s->kind() == Snapshot::kFullAOT && FLAG_use_bare_instructions;
bool weak = s->kind() == Snapshot::kFullAOT;
const intptr_t count = objects_.length();
for (intptr_t i = 0; i < count; i++) {
@ -2387,12 +2385,10 @@ class ObjectPoolDeserializationCluster : public DeserializationCluster {
ObjectPool::Patchability::kPatchable);
uword switchable_call_miss_entry_point = 0;
uword megamorphic_call_entry_point = 0;
if (FLAG_use_bare_instructions) {
switchable_call_miss_entry_point =
StubCode::SwitchableCallMiss().MonomorphicEntryPoint();
megamorphic_call_entry_point =
StubCode::MegamorphicCall().MonomorphicEntryPoint();
}
switchable_call_miss_entry_point =
StubCode::SwitchableCallMiss().MonomorphicEntryPoint();
megamorphic_call_entry_point =
StubCode::MegamorphicCall().MonomorphicEntryPoint();
#endif // defined(DART_PRECOMPILED_RUNTIME)
for (intptr_t id = start_index_; id < stop_index_; id++) {
@ -2420,13 +2416,11 @@ class ObjectPoolDeserializationCluster : public DeserializationCluster {
}
#if defined(DART_PRECOMPILED_RUNTIME)
case ObjectPool::EntryType::kSwitchableCallMissEntryPoint:
ASSERT(FLAG_use_bare_instructions);
pool->untag()->entry_bits()[j] = immediate_bits;
entry.raw_value_ =
static_cast<intptr_t>(switchable_call_miss_entry_point);
break;
case ObjectPool::EntryType::kMegamorphicCallEntryPoint:
ASSERT(FLAG_use_bare_instructions);
pool->untag()->entry_bits()[j] = immediate_bits;
entry.raw_value_ =
static_cast<intptr_t>(megamorphic_call_entry_point);
@ -4377,7 +4371,7 @@ class ClosureDeserializationCluster
void PostLoad(Deserializer* d, const Array& refs, bool primary) {
// We only cache the entry point in bare instructions mode (as we need
// to load the function anyway otherwise).
if (d->kind() == Snapshot::kFullAOT && FLAG_use_bare_instructions) {
if (d->kind() == Snapshot::kFullAOT) {
auto& closure = Closure::Handle(d->zone());
auto& func = Function::Handle(d->zone());
for (intptr_t i = start_index_; i < stop_index_; i++) {
@ -5863,20 +5857,16 @@ class UnitSerializationRoots : public SerializationRoots {
const Object* deferred_object = (*unit_->deferred_objects())[i];
ASSERT(deferred_object->IsCode());
CodePtr code = static_cast<CodePtr>(deferred_object->ptr());
if (FLAG_use_bare_instructions) {
ObjectPoolPtr pool = code->untag()->object_pool_;
if (pool != ObjectPool::null()) {
const intptr_t length = pool->untag()->length_;
uint8_t* entry_bits = pool->untag()->entry_bits();
for (intptr_t i = 0; i < length; i++) {
auto entry_type = ObjectPool::TypeBits::decode(entry_bits[i]);
if (entry_type == ObjectPool::EntryType::kTaggedObject) {
s->Push(pool->untag()->data()[i].raw_obj_);
}
ObjectPoolPtr pool = code->untag()->object_pool_;
if (pool != ObjectPool::null()) {
const intptr_t length = pool->untag()->length_;
uint8_t* entry_bits = pool->untag()->entry_bits();
for (intptr_t i = 0; i < length; i++) {
auto entry_type = ObjectPool::TypeBits::decode(entry_bits[i]);
if (entry_type == ObjectPool::EntryType::kTaggedObject) {
s->Push(pool->untag()->data()[i].raw_obj_);
}
}
} else {
s->Push(code->untag()->object_pool_);
}
s->Push(code->untag()->compressed_stackmaps_);
s->Push(code->untag()->code_source_map_);
@ -5901,33 +5891,28 @@ class UnitSerializationRoots : public SerializationRoots {
ASSERT(!Code::IsDiscarded(code));
s->WriteInstructions(code->untag()->instructions_,
code->untag()->unchecked_offset_, code, false);
if (!FLAG_use_bare_instructions) {
s->WriteRootRef(code->untag()->object_pool_, "deferred-code");
}
s->WriteRootRef(code->untag()->compressed_stackmaps_, "deferred-code");
s->WriteRootRef(code->untag()->code_source_map_, "deferred-code");
}
if (FLAG_use_bare_instructions) {
ObjectPoolPtr pool =
s->isolate_group()->object_store()->global_object_pool();
const intptr_t length = pool->untag()->length_;
uint8_t* entry_bits = pool->untag()->entry_bits();
intptr_t last_write = 0;
for (intptr_t i = 0; i < length; i++) {
auto entry_type = ObjectPool::TypeBits::decode(entry_bits[i]);
if (entry_type == ObjectPool::EntryType::kTaggedObject) {
if (s->IsWritten(pool->untag()->data()[i].raw_obj_)) {
intptr_t skip = i - last_write;
s->WriteUnsigned(skip);
s->WriteRootRef(pool->untag()->data()[i].raw_obj_,
"deferred-literal");
last_write = i;
}
ObjectPoolPtr pool =
s->isolate_group()->object_store()->global_object_pool();
const intptr_t length = pool->untag()->length_;
uint8_t* entry_bits = pool->untag()->entry_bits();
intptr_t last_write = 0;
for (intptr_t i = 0; i < length; i++) {
auto entry_type = ObjectPool::TypeBits::decode(entry_bits[i]);
if (entry_type == ObjectPool::EntryType::kTaggedObject) {
if (s->IsWritten(pool->untag()->data()[i].raw_obj_)) {
intptr_t skip = i - last_write;
s->WriteUnsigned(skip);
s->WriteRootRef(pool->untag()->data()[i].raw_obj_,
"deferred-literal");
last_write = i;
}
}
s->WriteUnsigned(length - last_write);
}
s->WriteUnsigned(length - last_write);
#endif
}
@ -5966,8 +5951,7 @@ class UnitDeserializationRoots : public DeserializationRoots {
ASSERT(unchecked_entry_point != 0);
func->untag()->unchecked_entry_point_ = unchecked_entry_point;
#if defined(DART_PRECOMPILED_RUNTIME)
if (FLAG_use_bare_instructions &&
func->untag()->data()->IsHeapObject() &&
if (func->untag()->data()->IsHeapObject() &&
func->untag()->data()->IsClosureData()) {
// For closure functions in bare instructions mode, also update the
// cache inside the static implicit closure object, if any.
@ -5980,27 +5964,22 @@ class UnitDeserializationRoots : public DeserializationRoots {
}
#endif
}
if (!FLAG_use_bare_instructions) {
code->untag()->object_pool_ = static_cast<ObjectPoolPtr>(d->ReadRef());
}
code->untag()->compressed_stackmaps_ =
static_cast<CompressedStackMapsPtr>(d->ReadRef());
code->untag()->code_source_map_ =
static_cast<CodeSourceMapPtr>(d->ReadRef());
}
if (FLAG_use_bare_instructions) {
ObjectPoolPtr pool =
d->isolate_group()->object_store()->global_object_pool();
const intptr_t length = pool->untag()->length_;
uint8_t* entry_bits = pool->untag()->entry_bits();
for (intptr_t i = d->ReadUnsigned(); i < length; i += d->ReadUnsigned()) {
auto entry_type = ObjectPool::TypeBits::decode(entry_bits[i]);
ASSERT(entry_type == ObjectPool::EntryType::kTaggedObject);
// The existing entry will usually be null, but it might also be an
// equivalent object that was duplicated in another loading unit.
pool->untag()->data()[i].raw_obj_ = d->ReadRef();
}
ObjectPoolPtr pool =
d->isolate_group()->object_store()->global_object_pool();
const intptr_t length = pool->untag()->length_;
uint8_t* entry_bits = pool->untag()->entry_bits();
for (intptr_t i = d->ReadUnsigned(); i < length; i += d->ReadUnsigned()) {
auto entry_type = ObjectPool::TypeBits::decode(entry_bits[i]);
ASSERT(entry_type == ObjectPool::EntryType::kTaggedObject);
// The existing entry will usually be null, but it might also be an
// equivalent object that was duplicated in another loading unit.
pool->untag()->data()[i].raw_obj_ = d->ReadRef();
}
// Reinitialize the dispatch table by rereading the table's serialization
@ -6636,7 +6615,7 @@ intptr_t Serializer::PrepareInstructions() {
}
#if defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32)
if ((kind() == Snapshot::kFullAOT) && FLAG_use_bare_instructions) {
if (kind() == Snapshot::kFullAOT) {
// Group the code objects whose instructions are not being deferred in this
// snapshot unit in the order they will be written: first the code objects
// encountered for this first time in this unit being written by the
@ -6690,7 +6669,7 @@ void Serializer::WriteInstructions(InstructionsPtr instr,
{offset_space, offset});
}
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
ASSERT(offset != 0);
RELEASE_ASSERT(offset >= previous_text_offset_);
const uint32_t delta = offset - previous_text_offset_;
@ -7006,8 +6985,7 @@ ZoneGrowableArray<Object*>* Serializer::Serialize(SerializationRoots* roots) {
} else {
WriteUnsigned(0);
}
ASSERT((instructions_table_len_ == 0) ||
(FLAG_precompiled_mode && FLAG_use_bare_instructions));
ASSERT((instructions_table_len_ == 0) || FLAG_precompiled_mode);
WriteUnsigned(instructions_table_len_);
for (SerializationCluster* cluster : clusters) {
@ -7710,75 +7688,56 @@ void Deserializer::ReadInstructions(CodePtr code,
if (deferred) {
ASSERT(!discarded);
#if defined(DART_PRECOMPILED_RUNTIME)
if (FLAG_use_bare_instructions) {
uword entry_point = StubCode::NotLoaded().EntryPoint();
code->untag()->entry_point_ = entry_point;
code->untag()->unchecked_entry_point_ = entry_point;
code->untag()->monomorphic_entry_point_ = entry_point;
code->untag()->monomorphic_unchecked_entry_point_ = entry_point;
code->untag()->instructions_length_ = 0;
return;
}
#endif
InstructionsPtr instr = StubCode::NotLoaded().instructions();
uint32_t unchecked_offset = 0;
code->untag()->instructions_ = instr;
#if defined(DART_PRECOMPILED_RUNTIME)
code->untag()->instructions_length_ = Instructions::Size(instr);
uword entry_point = StubCode::NotLoaded().EntryPoint();
code->untag()->entry_point_ = entry_point;
code->untag()->unchecked_entry_point_ = entry_point;
code->untag()->monomorphic_entry_point_ = entry_point;
code->untag()->monomorphic_unchecked_entry_point_ = entry_point;
code->untag()->instructions_length_ = 0;
return;
#else
code->untag()->unchecked_offset_ = unchecked_offset;
UNREACHABLE();
#endif
Code::InitializeCachedEntryPointsFrom(code, instr, unchecked_offset);
return;
}
#if defined(DART_PRECOMPILED_RUNTIME)
if (FLAG_use_bare_instructions) {
previous_text_offset_ += ReadUnsigned();
const uword payload_start =
image_reader_->GetBareInstructionsAt(previous_text_offset_);
const uint32_t payload_info = ReadUnsigned();
const uint32_t unchecked_offset = payload_info >> 1;
const bool has_monomorphic_entrypoint = (payload_info & 0x1) == 0x1;
previous_text_offset_ += ReadUnsigned();
const uword payload_start =
image_reader_->GetBareInstructionsAt(previous_text_offset_);
const uint32_t payload_info = ReadUnsigned();
const uint32_t unchecked_offset = payload_info >> 1;
const bool has_monomorphic_entrypoint = (payload_info & 0x1) == 0x1;
const uword entry_offset = has_monomorphic_entrypoint
? Instructions::kPolymorphicEntryOffsetAOT
: 0;
const uword monomorphic_entry_offset =
has_monomorphic_entrypoint ? Instructions::kMonomorphicEntryOffsetAOT
: 0;
const uword entry_offset =
has_monomorphic_entrypoint ? Instructions::kPolymorphicEntryOffsetAOT : 0;
const uword monomorphic_entry_offset =
has_monomorphic_entrypoint ? Instructions::kMonomorphicEntryOffsetAOT : 0;
const uword entry_point = payload_start + entry_offset;
const uword monomorphic_entry_point =
payload_start + monomorphic_entry_offset;
const uword entry_point = payload_start + entry_offset;
const uword monomorphic_entry_point =
payload_start + monomorphic_entry_offset;
ObjectPtr code_descriptor = code;
if (discarded) {
code_descriptor = static_cast<CompressedStackMapsPtr>(ReadRef());
}
instructions_table_.SetEntryAt(instructions_index_++, payload_start,
has_monomorphic_entrypoint, code_descriptor);
if (!discarded) {
// There are no serialized RawInstructions objects in this mode.
code->untag()->instructions_ = Instructions::null();
code->untag()->entry_point_ = entry_point;
code->untag()->unchecked_entry_point_ = entry_point + unchecked_offset;
code->untag()->monomorphic_entry_point_ = monomorphic_entry_point;
code->untag()->monomorphic_unchecked_entry_point_ =
monomorphic_entry_point + unchecked_offset;
}
return;
ObjectPtr code_descriptor = code;
if (discarded) {
code_descriptor = static_cast<CompressedStackMapsPtr>(ReadRef());
}
#endif
instructions_table_.SetEntryAt(instructions_index_++, payload_start,
has_monomorphic_entrypoint, code_descriptor);
if (!discarded) {
// There are no serialized RawInstructions objects in this mode.
code->untag()->instructions_ = Instructions::null();
code->untag()->entry_point_ = entry_point;
code->untag()->unchecked_entry_point_ = entry_point + unchecked_offset;
code->untag()->monomorphic_entry_point_ = monomorphic_entry_point;
code->untag()->monomorphic_unchecked_entry_point_ =
monomorphic_entry_point + unchecked_offset;
}
#else
InstructionsPtr instr = image_reader_->GetInstructionsAt(Read<uint32_t>());
uint32_t unchecked_offset = ReadUnsigned();
code->untag()->instructions_ = instr;
#if defined(DART_PRECOMPILED_RUNTIME)
code->untag()->instructions_length_ = Instructions::Size(instr);
#else
code->untag()->unchecked_offset_ = unchecked_offset;
if (kind() == Snapshot::kFullJIT) {
const uint32_t active_offset = Read<uint32_t>();
@ -7786,36 +7745,34 @@ void Deserializer::ReadInstructions(CodePtr code,
unchecked_offset = ReadUnsigned();
}
code->untag()->active_instructions_ = instr;
#endif
Code::InitializeCachedEntryPointsFrom(code, instr, unchecked_offset);
#endif // defined(DART_PRECOMPILED_RUNTIME)
}
void Deserializer::EndInstructions() {
#if defined(DART_PRECOMPILED_RUNTIME)
if (FLAG_use_bare_instructions) {
uword previous_end = image_reader_->GetBareInstructionsEnd();
for (intptr_t i = instructions_index_ - 1; i >= 0; --i) {
ObjectPtr descriptor = instructions_table_.DescriptorAt(i);
uword start = instructions_table_.PayloadStartAt(i);
ASSERT(start <= previous_end);
if (descriptor->IsCode()) {
CodePtr code = static_cast<CodePtr>(descriptor);
code->untag()->instructions_length_ = previous_end - start;
}
previous_end = start;
uword previous_end = image_reader_->GetBareInstructionsEnd();
for (intptr_t i = instructions_index_ - 1; i >= 0; --i) {
ObjectPtr descriptor = instructions_table_.DescriptorAt(i);
uword start = instructions_table_.PayloadStartAt(i);
ASSERT(start <= previous_end);
if (descriptor->IsCode()) {
CodePtr code = static_cast<CodePtr>(descriptor);
code->untag()->instructions_length_ = previous_end - start;
}
previous_end = start;
}
ObjectStore* object_store = IsolateGroup::Current()->object_store();
GrowableObjectArray& tables =
GrowableObjectArray::Handle(zone_, object_store->instructions_tables());
if (tables.IsNull()) {
tables = GrowableObjectArray::New(Heap::kOld);
object_store->set_instructions_tables(tables);
}
if ((tables.Length() == 0) ||
(tables.At(tables.Length() - 1) != instructions_table_.ptr())) {
tables.Add(instructions_table_, Heap::kOld);
}
ObjectStore* object_store = IsolateGroup::Current()->object_store();
GrowableObjectArray& tables =
GrowableObjectArray::Handle(zone_, object_store->instructions_tables());
if (tables.IsNull()) {
tables = GrowableObjectArray::New(Heap::kOld);
object_store->set_instructions_tables(tables);
}
if ((tables.Length() == 0) ||
(tables.At(tables.Length() - 1) != instructions_table_.ptr())) {
tables.Add(instructions_table_, Heap::kOld);
}
#endif
}
@ -7858,7 +7815,7 @@ void Deserializer::Deserialize(DeserializationRoots* roots) {
#if defined(DART_PRECOMPILED_RUNTIME)
if (instructions_table_len > 0) {
ASSERT(FLAG_precompiled_mode && FLAG_use_bare_instructions);
ASSERT(FLAG_precompiled_mode);
const uword start_pc = image_reader_->GetBareInstructionsAt(0);
const uword end_pc = image_reader_->GetBareInstructionsEnd();
instructions_table_ =

View file

@ -96,7 +96,7 @@ void CodePatcher::PatchSwitchableCallAtWithMutatorsStopped(
const Code& caller_code,
const Object& data,
const Code& target) {
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
BareSwitchableCallPattern call(return_address);
call.SetData(data);
call.SetTarget(target);
@ -109,7 +109,7 @@ void CodePatcher::PatchSwitchableCallAtWithMutatorsStopped(
uword CodePatcher::GetSwitchableCallTargetEntryAt(uword return_address,
const Code& caller_code) {
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
BareSwitchableCallPattern call(return_address);
return call.target_entry();
} else {
@ -120,7 +120,7 @@ uword CodePatcher::GetSwitchableCallTargetEntryAt(uword return_address,
ObjectPtr CodePatcher::GetSwitchableCallDataAt(uword return_address,
const Code& caller_code) {
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
BareSwitchableCallPattern call(return_address);
return call.data();
} else {

View file

@ -132,7 +132,7 @@ void CodePatcher::PatchSwitchableCallAtWithMutatorsStopped(
const Code& caller_code,
const Object& data,
const Code& target) {
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
BareSwitchableCallPattern call(return_address);
call.SetData(data);
call.SetTarget(target);
@ -145,7 +145,7 @@ void CodePatcher::PatchSwitchableCallAtWithMutatorsStopped(
uword CodePatcher::GetSwitchableCallTargetEntryAt(uword return_address,
const Code& caller_code) {
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
BareSwitchableCallPattern call(return_address);
return call.target_entry();
} else {
@ -156,7 +156,7 @@ uword CodePatcher::GetSwitchableCallTargetEntryAt(uword return_address,
ObjectPtr CodePatcher::GetSwitchableCallDataAt(uword return_address,
const Code& caller_code) {
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
BareSwitchableCallPattern call(return_address);
return call.data();
} else {

View file

@ -486,7 +486,7 @@ void CodePatcher::PatchSwitchableCallAtWithMutatorsStopped(
const Code& caller_code,
const Object& data,
const Code& target) {
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
BareSwitchableCall call(return_address);
call.SetData(data);
call.SetTarget(target);
@ -499,7 +499,7 @@ void CodePatcher::PatchSwitchableCallAtWithMutatorsStopped(
uword CodePatcher::GetSwitchableCallTargetEntryAt(uword return_address,
const Code& caller_code) {
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
BareSwitchableCall call(return_address);
return call.target_entry();
} else {
@ -510,7 +510,7 @@ uword CodePatcher::GetSwitchableCallTargetEntryAt(uword return_address,
ObjectPtr CodePatcher::GetSwitchableCallDataAt(uword return_address,
const Code& caller_code) {
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
BareSwitchableCall call(return_address);
return call.data();
} else {

View file

@ -455,12 +455,10 @@ void Precompiler::DoCompileAll() {
retained_reasons_writer_ = &reasons_writer;
}
if (FLAG_use_bare_instructions) {
// Since we keep the object pool until the end of AOT compilation, it
// will hang on to its entries until the very end. Therefore we have
// to use handles which survive that long, so we use [zone_] here.
global_object_pool_builder_.InitializeWithZone(zone_);
}
// Since we keep the object pool until the end of AOT compilation, it
// will hang on to its entries until the very end. Therefore we have
// to use handles which survive that long, so we use [zone_] here.
global_object_pool_builder_.InitializeWithZone(zone_);
{
HANDLESCOPE(T);
@ -481,7 +479,7 @@ void Precompiler::DoCompileAll() {
// as well as other type checks.
HierarchyInfo hierarchy_info(T);
if (FLAG_use_bare_instructions && FLAG_use_table_dispatch) {
if (FLAG_use_table_dispatch) {
dispatch_table_generator_ = new compiler::DispatchTableGenerator(Z);
dispatch_table_generator_->Initialize(IG->class_table());
}
@ -489,7 +487,7 @@ void Precompiler::DoCompileAll() {
// Precompile constructors to compute information such as
// optimized instruction count (used in inlining heuristics).
ClassFinalizer::ClearAllCode(
/*including_nonchanging_cids=*/FLAG_use_bare_instructions);
/*including_nonchanging_cids=*/true);
{
CompilerState state(thread_, /*is_aot=*/true, /*is_optimizing=*/true);
@ -497,14 +495,14 @@ void Precompiler::DoCompileAll() {
}
ClassFinalizer::ClearAllCode(
/*including_nonchanging_cids=*/FLAG_use_bare_instructions);
/*including_nonchanging_cids=*/true);
tracer_ = PrecompilerTracer::StartTracingIfRequested(this);
// All stubs have already been generated, all of them share the same pool.
// We use that pool to initialize our global object pool, to guarantee
// stubs as well as code compiled from here on will have the same pool.
if (FLAG_use_bare_instructions) {
{
// We use any stub here to get it's object pool (all stubs share the
// same object pool in bare instructions mode).
const Code& code = StubCode::LazyCompile();
@ -571,7 +569,7 @@ void Precompiler::DoCompileAll() {
// [Type]-specialized stubs.
AttachOptimizedTypeTestingStub();
if (FLAG_use_bare_instructions) {
{
// Now we generate the actual object pool instance and attach it to the
// object store. The AOT runtime will use it from there in the enter
// dart code stub.
@ -853,9 +851,7 @@ void Precompiler::CollectCallbackFields() {
void Precompiler::ProcessFunction(const Function& function) {
HANDLESCOPE(T);
const intptr_t gop_offset =
FLAG_use_bare_instructions ? global_object_pool_builder()->CurrentLength()
: 0;
const intptr_t gop_offset = global_object_pool_builder()->CurrentLength();
RELEASE_ASSERT(!function.HasCode());
// Ffi trampoline functions have no signature.
ASSERT(function.kind() == UntaggedFunction::kFfiTrampoline ||
@ -949,7 +945,7 @@ void Precompiler::AddCalleesOf(const Function& function, intptr_t gop_offset) {
// rather than scanning global object pool - because we want to include
// *all* outgoing references into the trace. Scanning GOP would exclude
// references that have been deduplicated.
if (FLAG_use_bare_instructions && !is_tracing()) {
if (!is_tracing()) {
for (intptr_t i = gop_offset;
i < global_object_pool_builder()->CurrentLength(); i++) {
const auto& wrapper_entry = global_object_pool_builder()->EntryAt(i);
@ -1442,7 +1438,7 @@ void Precompiler::AddSelector(const String& selector) {
}
void Precompiler::AddTableSelector(const compiler::TableSelector* selector) {
ASSERT(FLAG_use_bare_instructions && FLAG_use_table_dispatch);
ASSERT(FLAG_use_table_dispatch);
if (is_tracing()) {
tracer_->WriteTableSelectorRef(selector->id);
@ -1455,7 +1451,7 @@ void Precompiler::AddTableSelector(const compiler::TableSelector* selector) {
}
bool Precompiler::IsHitByTableSelector(const Function& function) {
if (!(FLAG_use_bare_instructions && FLAG_use_table_dispatch)) {
if (!FLAG_use_table_dispatch) {
return false;
}
@ -2007,7 +2003,7 @@ void Precompiler::TraceForRetainedFunctions() {
void Precompiler::FinalizeDispatchTable() {
PRECOMPILER_TIMER_SCOPE(this, FinalizeDispatchTable);
if (!FLAG_use_bare_instructions || !FLAG_use_table_dispatch) return;
if (!FLAG_use_table_dispatch) return;
HANDLESCOPE(T);
// Build the entries used to serialize the dispatch table before
// dropping functions, as we may clear references to Code objects.
@ -2066,11 +2062,8 @@ void Precompiler::ReplaceFunctionStaticCallEntries() {
// the old references to the CallStaticFunction stub, but it is sufficient
// for the local pool to include the actual call target.
compiler::ObjectPoolBuilder builder;
bool append_to_pool = FLAG_use_bare_instructions;
if (append_to_pool) {
pool_ = code.object_pool();
pool_.CopyInto(&builder);
}
pool_ = code.object_pool();
pool_.CopyInto(&builder);
for (auto& view : static_calls) {
kind_and_offset_ = view.Get<Code::kSCallTableKindAndOffset>();
@ -2093,9 +2086,7 @@ void Precompiler::ReplaceFunctionStaticCallEntries() {
Code::OffsetField::decode(kind_and_offset_.Value());
const uword pc = pc_offset + code.PayloadStart();
CodePatcher::PatchStaticCallAt(pc, code, target_code_);
if (append_to_pool) {
builder.AddObject(Object::ZoneHandle(target_code_.ptr()));
}
builder.AddObject(Object::ZoneHandle(target_code_.ptr()));
}
if (FLAG_trace_precompiler) {
THR_Print("Updated static call entry to %s in \"%s\"\n",
@ -2104,9 +2095,7 @@ void Precompiler::ReplaceFunctionStaticCallEntries() {
}
}
if (append_to_pool) {
code.set_object_pool(ObjectPool::NewFromBuilder(builder));
}
code.set_object_pool(ObjectPool::NewFromBuilder(builder));
}
private:
@ -2902,11 +2891,9 @@ void Precompiler::DiscardCodeObjects() {
intptr_t discarded_codes_ = 0;
};
// Code objects are stored in stack frames if not use_bare_instructions.
// Code objects are used by stack traces if not dwarf_stack_traces.
// Code objects are used by profiler in non-PRODUCT mode.
if (!FLAG_use_bare_instructions || !FLAG_dwarf_stack_traces_mode ||
FLAG_retain_code_objects) {
if (!FLAG_dwarf_stack_traces_mode || FLAG_retain_code_objects) {
return;
}
@ -3065,9 +3052,7 @@ void PrecompileParsedFunctionHelper::FinalizeCompilation(
Array::Handle(zone, graph_compiler->CreateDeoptInfo(assembler));
// Allocates instruction object. Since this occurs only at safepoint,
// there can be no concurrent access to the instruction page.
const auto pool_attachment = FLAG_use_bare_instructions
? Code::PoolAttachment::kNotAttachPool
: Code::PoolAttachment::kAttachPool;
const auto pool_attachment = Code::PoolAttachment::kNotAttachPool;
SafepointWriteRwLocker ml(T, T->isolate_group()->program_lock());
const Code& code = Code::Handle(
@ -3193,19 +3178,17 @@ bool PrecompileParsedFunctionHelper::Compile(CompilationPipeline* pipeline) {
ASSERT(pass_state.inline_id_to_function.length() ==
pass_state.caller_inline_id.length());
ASSERT(!FLAG_use_bare_instructions || precompiler_ != nullptr);
ASSERT(precompiler_ != nullptr);
if (FLAG_use_bare_instructions) {
// When generating code in bare instruction mode all code objects
// share the same global object pool. To reduce interleaving of
// unrelated object pool entries from different code objects
// we attempt to pregenerate stubs referenced by the code
// we are going to generate.
//
// Reducing interleaving means reducing recompilations triggered by
// failure to commit object pool into the global object pool.
GenerateNecessaryAllocationStubs(flow_graph);
}
// When generating code in bare instruction mode all code objects
// share the same global object pool. To reduce interleaving of
// unrelated object pool entries from different code objects
// we attempt to pregenerate stubs referenced by the code
// we are going to generate.
//
// Reducing interleaving means reducing recompilations triggered by
// failure to commit object pool into the global object pool.
GenerateNecessaryAllocationStubs(flow_graph);
// Even in bare instructions mode we don't directly add objects into
// the global object pool because code generation can bail out
@ -3220,9 +3203,7 @@ bool PrecompileParsedFunctionHelper::Compile(CompilationPipeline* pipeline) {
// some stubs). If this indeed happens we retry the compilation.
// (See TryCommitToParent invocation below).
compiler::ObjectPoolBuilder object_pool_builder(
FLAG_use_bare_instructions
? precompiler_->global_object_pool_builder()
: nullptr);
precompiler_->global_object_pool_builder());
compiler::Assembler assembler(&object_pool_builder, use_far_branches);
CodeStatistics* function_stats = NULL;
@ -3283,8 +3264,7 @@ bool PrecompileParsedFunctionHelper::Compile(CompilationPipeline* pipeline) {
// method will lead to the same IR due to instability of inlining
// heuristics (under some conditions we might end up inlining
// more aggressively on the second attempt).
if (FLAG_use_bare_instructions &&
!object_pool_builder.TryCommitToParent()) {
if (!object_pool_builder.TryCommitToParent()) {
done = false;
continue;
}

View file

@ -243,12 +243,11 @@ class Precompiler : public ValueObject {
}
compiler::ObjectPoolBuilder* global_object_pool_builder() {
ASSERT(FLAG_use_bare_instructions);
return &global_object_pool_builder_;
}
compiler::SelectorMap* selector_map() {
ASSERT(FLAG_use_bare_instructions && FLAG_use_table_dispatch);
ASSERT(FLAG_use_table_dispatch);
return dispatch_table_generator_->selector_map();
}

View file

@ -90,7 +90,7 @@ void PrecompilerTracer::WriteEntityTable() {
const auto& fun = Function::Cast(obj);
cls_ = fun.Owner();
const intptr_t selector_id =
FLAG_use_bare_instructions && FLAG_use_table_dispatch
FLAG_use_table_dispatch
? precompiler_->selector_map()->SelectorId(fun)
: -1;
Write("\"%c\",%" Pd ",%" Pd ",%" Pd "",

View file

@ -1620,7 +1620,7 @@ void Assembler::LoadPoolPointer(Register reg) {
}
void Assembler::SetupGlobalPoolAndDispatchTable() {
ASSERT(FLAG_precompiled_mode && FLAG_use_bare_instructions);
ASSERT(FLAG_precompiled_mode);
ldr(PP, Address(THR, target::Thread::global_object_pool_offset()));
if (FLAG_use_table_dispatch) {
ldr(DISPATCH_TABLE_REG,
@ -3361,7 +3361,7 @@ void Assembler::EnterDartFrame(intptr_t frame_size, bool load_pool_pointer) {
COMPILE_ASSERT(CODE_REG < FP);
COMPILE_ASSERT(FP < LINK_REGISTER.code);
if (!(FLAG_precompiled_mode && FLAG_use_bare_instructions)) {
if (!FLAG_precompiled_mode) {
SPILLS_LR_TO_FRAME(
EnterFrame((1 << PP) | (1 << CODE_REG) | (1 << FP) | (1 << LR), 0));
@ -3391,7 +3391,7 @@ void Assembler::EnterOsrFrame(intptr_t extra_size) {
}
void Assembler::LeaveDartFrame() {
if (!(FLAG_precompiled_mode && FLAG_use_bare_instructions)) {
if (!FLAG_precompiled_mode) {
ldr(PP, Address(FP, target::frame_layout.saved_caller_pp_from_fp *
target::kWordSize));
}
@ -3403,7 +3403,7 @@ void Assembler::LeaveDartFrame() {
}
void Assembler::LeaveDartFrameAndReturn() {
if (!(FLAG_precompiled_mode && FLAG_use_bare_instructions)) {
if (!FLAG_precompiled_mode) {
ldr(PP, Address(FP, target::frame_layout.saved_caller_pp_from_fp *
target::kWordSize));
}

View file

@ -1565,7 +1565,7 @@ void Assembler::RestorePinnedRegisters() {
}
void Assembler::SetupGlobalPoolAndDispatchTable() {
ASSERT(FLAG_precompiled_mode && FLAG_use_bare_instructions);
ASSERT(FLAG_precompiled_mode);
ldr(PP, Address(THR, target::Thread::global_object_pool_offset()));
sub(PP, PP, Operand(kHeapObjectTag)); // Pool in PP is untagged!
if (FLAG_use_table_dispatch) {
@ -1648,7 +1648,7 @@ void Assembler::EnterDartFrame(intptr_t frame_size, Register new_pp) {
// Setup the frame.
EnterFrame(0);
if (!(FLAG_precompiled_mode && FLAG_use_bare_instructions)) {
if (!FLAG_precompiled_mode) {
TagAndPushPPAndPcMarker(); // Save PP and PC marker.
// Load the pool pointer.
@ -1683,7 +1683,7 @@ void Assembler::EnterOsrFrame(intptr_t extra_size, Register new_pp) {
}
void Assembler::LeaveDartFrame(RestorePP restore_pp) {
if (!(FLAG_precompiled_mode && FLAG_use_bare_instructions)) {
if (!FLAG_precompiled_mode) {
if (restore_pp == kRestoreCallerPP) {
// Restore and untag PP.
LoadFromOffset(
@ -1820,7 +1820,7 @@ void Assembler::TransitionNativeToGenerated(Register state,
void Assembler::EnterCallRuntimeFrame(intptr_t frame_size, bool is_leaf) {
Comment("EnterCallRuntimeFrame");
EnterFrame(0);
if (!(FLAG_precompiled_mode && FLAG_use_bare_instructions)) {
if (!FLAG_precompiled_mode) {
TagAndPushPPAndPcMarker(); // Save PP and PC marker.
}

View file

@ -1845,7 +1845,7 @@ static const RegisterSet kVolatileRegisterSet(
void Assembler::EnterCallRuntimeFrame(intptr_t frame_space) {
Comment("EnterCallRuntimeFrame");
EnterFrame(0);
if (!(FLAG_precompiled_mode && FLAG_use_bare_instructions)) {
if (!FLAG_precompiled_mode) {
pushq(CODE_REG);
pushq(PP);
}
@ -1939,7 +1939,7 @@ void Assembler::LoadPoolPointer(Register pp) {
void Assembler::EnterDartFrame(intptr_t frame_size, Register new_pp) {
ASSERT(!constant_pool_allowed());
EnterFrame(0);
if (!(FLAG_precompiled_mode && FLAG_use_bare_instructions)) {
if (!FLAG_precompiled_mode) {
pushq(CODE_REG);
pushq(PP);
if (new_pp == kNoRegister) {
@ -1956,7 +1956,7 @@ void Assembler::EnterDartFrame(intptr_t frame_size, Register new_pp) {
void Assembler::LeaveDartFrame(RestorePP restore_pp) {
// Restore caller's PP register that was pushed in EnterDartFrame.
if (!(FLAG_precompiled_mode && FLAG_use_bare_instructions)) {
if (!FLAG_precompiled_mode) {
if (restore_pp == kRestoreCallerPP) {
movq(PP, Address(RBP, (target::frame_layout.saved_caller_pp_from_fp *
target::kWordSize)));

View file

@ -245,7 +245,7 @@ void Disassembler::DisassembleCodeHelper(const char* function_fullname,
ASSERT(code.pointer_offsets_length() == 0);
#endif
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
THR_Print("(No object pool for bare instructions.)\n");
} else {
const ObjectPool& object_pool =
@ -480,7 +480,7 @@ void Disassembler::DisassembleStub(const char* name, const Code& code) {
code.Disassemble(&formatter);
THR_Print("}\n");
const ObjectPool& object_pool = ObjectPool::Handle(code.object_pool());
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
THR_Print("(No object pool for bare instructions.)\n");
} else if (!object_pool.IsNull()) {
object_pool.DebugPrint();

View file

@ -3687,19 +3687,17 @@ static intptr_t LoadingUnitOf(Zone* zone, const Code& code) {
}
bool FlowGraphCompiler::CanPcRelativeCall(const Function& target) const {
return FLAG_precompiled_mode && FLAG_use_bare_instructions &&
return FLAG_precompiled_mode &&
(LoadingUnitOf(zone_, function()) == LoadingUnitOf(zone_, target));
}
bool FlowGraphCompiler::CanPcRelativeCall(const Code& target) const {
return FLAG_precompiled_mode && FLAG_use_bare_instructions &&
!target.InVMIsolateHeap() &&
return FLAG_precompiled_mode && !target.InVMIsolateHeap() &&
(LoadingUnitOf(zone_, function()) == LoadingUnitOf(zone_, target));
}
bool FlowGraphCompiler::CanPcRelativeCall(const AbstractType& target) const {
return FLAG_precompiled_mode && FLAG_use_bare_instructions &&
!target.InVMIsolateHeap() &&
return FLAG_precompiled_mode && !target.InVMIsolateHeap() &&
(LoadingUnitOf(zone_, function()) == LoadingUnit::kRootId);
}

View file

@ -29,7 +29,7 @@ DEFINE_FLAG(bool, unbox_doubles, true, "Optimize double arithmetic.");
DECLARE_FLAG(bool, enable_simd_inline);
void FlowGraphCompiler::ArchSpecificInitialization() {
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
auto object_store = isolate_group()->object_store();
const auto& stub =
@ -286,7 +286,7 @@ void FlowGraphCompiler::GenerateMethodExtractorIntrinsic(
// R1 = extracted function
// R4 = offset of type argument vector (or 0 if class is not generic)
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
kPoolReg = PP;
} else {
__ LoadFieldFromOffset(kPoolReg, CODE_REG,
@ -340,7 +340,7 @@ void FlowGraphCompiler::EmitFrameEntry() {
ASSERT(StackSize() >= 0);
__ EnterDartFrame(StackSize() * compiler::target::kWordSize);
}
} else if (FLAG_use_bare_instructions) {
} else if (FLAG_precompiled_mode) {
assembler()->set_constant_pool_allowed(true);
}
}
@ -548,20 +548,11 @@ void FlowGraphCompiler::EmitMegamorphicInstanceCall(
(args_desc.Count() - 1) * compiler::target::kWordSize);
// Use same code pattern as instance call so it can be parsed by code patcher.
if (FLAG_precompiled_mode) {
if (FLAG_use_bare_instructions) {
// The AOT runtime will replace the slot in the object pool with the
// entrypoint address - see app_snapshot.cc.
CLOBBERS_LR(__ LoadUniqueObject(LR, StubCode::MegamorphicCall()));
} else {
__ LoadUniqueObject(CODE_REG, StubCode::MegamorphicCall());
CLOBBERS_LR(
__ ldr(LR, compiler::FieldAddress(
CODE_REG, compiler::target::Code::entry_point_offset(
Code::EntryKind::kMonomorphic))));
}
// The AOT runtime will replace the slot in the object pool with the
// entrypoint address - see app_snapshot.cc.
CLOBBERS_LR(__ LoadUniqueObject(LR, StubCode::MegamorphicCall()));
__ LoadUniqueObject(R9, cache);
CLOBBERS_LR(__ blx(LR));
} else {
__ LoadUniqueObject(R9, cache);
__ LoadUniqueObject(CODE_REG, StubCode::MegamorphicCall());
@ -609,7 +600,7 @@ void FlowGraphCompiler::EmitInstanceCallAOT(const ICData& ic_data,
__ LoadFromOffset(
R0, SP,
(ic_data.SizeWithoutTypeArgs() - 1) * compiler::target::kWordSize);
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
// The AOT runtime will replace the slot in the object pool with the
// entrypoint address - see app_snapshot.cc.
CLOBBERS_LR(__ LoadUniqueObject(LR, initial_stub));
@ -661,7 +652,7 @@ void FlowGraphCompiler::EmitOptimizedStaticCall(
if (function.HasOptionalParameters() || function.IsGeneric()) {
__ LoadObject(R4, arguments_descriptor);
} else {
if (!(FLAG_precompiled_mode && FLAG_use_bare_instructions)) {
if (!FLAG_precompiled_mode) {
__ LoadImmediate(R4, 0); // GC safe smi zero because of stub.
}
}

View file

@ -28,7 +28,7 @@ DEFINE_FLAG(bool, trap_on_deoptimization, false, "Trap on deoptimization.");
DECLARE_FLAG(bool, enable_simd_inline);
void FlowGraphCompiler::ArchSpecificInitialization() {
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
auto object_store = isolate_group()->object_store();
const auto& stub =
@ -278,7 +278,7 @@ void FlowGraphCompiler::GenerateMethodExtractorIntrinsic(
// R1 = extracted function
// R4 = offset of type argument vector (or 0 if class is not generic)
intptr_t pp_offset = 0;
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
// PP is not tagged on arm64.
kPoolReg = PP;
pp_offset = kHeapObjectTag;
@ -332,7 +332,7 @@ void FlowGraphCompiler::EmitFrameEntry() {
ASSERT(StackSize() >= 0);
__ EnterDartFrame(StackSize() * kWordSize);
}
} else if (FLAG_use_bare_instructions) {
} else if (FLAG_precompiled_mode) {
assembler()->set_constant_pool_allowed(true);
}
}
@ -542,7 +542,7 @@ void FlowGraphCompiler::EmitMegamorphicInstanceCall(
const intptr_t stub_index = op.AddObject(
StubCode::MegamorphicCall(), ObjectPool::Patchability::kPatchable);
ASSERT((data_index + 1) == stub_index);
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
// The AOT runtime will replace the slot in the object pool with the
// entrypoint address - see app_snapshot.cc.
CLOBBERS_LR(__ LoadDoubleWordFromPoolIndex(R5, LR, data_index));
@ -602,7 +602,7 @@ void FlowGraphCompiler::EmitInstanceCallAOT(const ICData& ic_data,
op.AddObject(initial_stub, ObjectPool::Patchability::kPatchable);
ASSERT((data_index + 1) == initial_stub_index);
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
// The AOT runtime will replace the slot in the object pool with the
// entrypoint address - see app_snapshot.cc.
CLOBBERS_LR(__ LoadDoubleWordFromPoolIndex(R5, LR, data_index));
@ -653,7 +653,7 @@ void FlowGraphCompiler::EmitOptimizedStaticCall(
if (function.HasOptionalParameters() || function.IsGeneric()) {
__ LoadObject(R4, arguments_descriptor);
} else {
if (!(FLAG_precompiled_mode && FLAG_use_bare_instructions)) {
if (!FLAG_precompiled_mode) {
__ LoadImmediate(R4, 0); // GC safe smi zero because of stub.
}
}

View file

@ -28,7 +28,7 @@ DEFINE_FLAG(bool, trap_on_deoptimization, false, "Trap on deoptimization.");
DECLARE_FLAG(bool, enable_simd_inline);
void FlowGraphCompiler::ArchSpecificInitialization() {
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
auto object_store = isolate_group()->object_store();
const auto& stub =
@ -282,7 +282,7 @@ void FlowGraphCompiler::GenerateMethodExtractorIntrinsic(
// RBX = extracted function
// RDX = offset of type argument vector (or 0 if class is not generic)
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
kPoolReg = PP;
} else {
__ movq(kPoolReg,
@ -301,7 +301,7 @@ void FlowGraphCompiler::GenerateMethodExtractorIntrinsic(
// needs to be updated to match.
void FlowGraphCompiler::EmitFrameEntry() {
if (!flow_graph().graph_entry()->NeedsFrame()) {
if (FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
assembler()->set_constant_pool_allowed(true);
}
return;
@ -552,16 +552,9 @@ void FlowGraphCompiler::EmitMegamorphicInstanceCall(
// Use same code pattern as instance call so it can be parsed by code patcher.
if (FLAG_precompiled_mode) {
if (FLAG_use_bare_instructions) {
// The AOT runtime will replace the slot in the object pool with the
// entrypoint address - see app_snapshot.cc.
__ LoadUniqueObject(RCX, StubCode::MegamorphicCall());
} else {
__ LoadUniqueObject(CODE_REG, StubCode::MegamorphicCall());
__ movq(RCX, compiler::FieldAddress(CODE_REG,
Code::entry_point_offset(
Code::EntryKind::kMonomorphic)));
}
// The AOT runtime will replace the slot in the object pool with the
// entrypoint address - see app_snapshot.cc.
__ LoadUniqueObject(RCX, StubCode::MegamorphicCall());
__ LoadUniqueObject(RBX, cache);
__ call(RCX);
} else {
@ -610,7 +603,7 @@ void FlowGraphCompiler::EmitInstanceCallAOT(const ICData& ic_data,
__ Comment("InstanceCallAOT (%s)", switchable_call_mode);
__ movq(RDX, compiler::Address(
RSP, (ic_data.SizeWithoutTypeArgs() - 1) * kWordSize));
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
// The AOT runtime will replace the slot in the object pool with the
// entrypoint address - see app_snapshot.cc.
__ LoadUniqueObject(RCX, initial_stub);
@ -643,7 +636,7 @@ void FlowGraphCompiler::EmitOptimizedStaticCall(
if (function.HasOptionalParameters() || function.IsGeneric()) {
__ LoadObject(R10, arguments_descriptor);
} else {
if (!(FLAG_precompiled_mode && FLAG_use_bare_instructions)) {
if (!FLAG_precompiled_mode) {
__ xorl(R10, R10); // GC safe smi zero because of stub.
}
}

View file

@ -599,7 +599,7 @@ void ClosureCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
__ LoadObject(R4, arguments_descriptor);
ASSERT(locs()->in(0).reg() == R0);
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
// R0: Closure with a cached entry point.
__ ldr(R2, compiler::FieldAddress(
R0, compiler::target::Closure::entry_point_offset()));
@ -1493,7 +1493,7 @@ void FfiCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
// Restore the global object pool after returning from runtime (old space is
// moving, so the GOP could have been relocated).
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
__ SetupGlobalPoolAndDispatchTable();
}
}
@ -1660,7 +1660,7 @@ void NativeEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
// Put the code object in the reserved slot.
__ StoreToOffset(CODE_REG, FPREG,
kPcMarkerSlotFromFp * compiler::target::kWordSize);
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
__ SetupGlobalPoolAndDispatchTable();
} else {
__ LoadImmediate(PP, 0); // GC safe value into PP.
@ -4768,7 +4768,7 @@ LocationSummary* BoxInt64Instr::MakeLocationSummary(Zone* zone,
const intptr_t kNumInputs = 1;
const intptr_t kNumTemps = ValueFitsSmi() ? 0 : 1;
// Shared slow path is used in BoxInt64Instr::EmitNativeCode in
// FLAG_use_bare_instructions mode and only after VM isolate stubs where
// precompiled mode and only after VM isolate stubs where
// replaced with isolate-specific stubs.
auto object_store = IsolateGroup::Current()->object_store();
const bool stubs_in_vm_isolate =
@ -4779,7 +4779,6 @@ LocationSummary* BoxInt64Instr::MakeLocationSummary(Zone* zone,
->untag()
->InVMIsolateHeap();
const bool shared_slow_path_call = SlowPathSharingSupported(opt) &&
FLAG_use_bare_instructions &&
!stubs_in_vm_isolate;
LocationSummary* summary = new (zone) LocationSummary(
zone, kNumInputs, kNumTemps,

View file

@ -523,7 +523,7 @@ void ClosureCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
__ LoadObject(R4, arguments_descriptor);
ASSERT(locs()->in(0).reg() == R0);
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
// R0: Closure with a cached entry point.
__ LoadFieldFromOffset(R2, R0,
compiler::target::Closure::entry_point_offset());
@ -1343,7 +1343,7 @@ void FfiCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
// Restore the global object pool after returning from runtime (old space is
// moving, so the GOP could have been relocated).
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
__ SetupGlobalPoolAndDispatchTable();
}
@ -1506,7 +1506,7 @@ void NativeEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
// Put the code object in the reserved slot.
__ StoreToOffset(CODE_REG, FPREG,
kPcMarkerSlotFromFp * compiler::target::kWordSize);
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
__ SetupGlobalPoolAndDispatchTable();
} else {
// We now load the pool pointer (PP) with a GC safe value as we are about to
@ -3994,7 +3994,7 @@ LocationSummary* BoxInt64Instr::MakeLocationSummary(Zone* zone,
const intptr_t kNumInputs = 1;
const intptr_t kNumTemps = ValueFitsSmi() ? 0 : 1;
// Shared slow path is used in BoxInt64Instr::EmitNativeCode in
// FLAG_use_bare_instructions mode and only after VM isolate stubs where
// precompiled mode and only after VM isolate stubs where
// replaced with isolate-specific stubs.
auto object_store = IsolateGroup::Current()->object_store();
const bool stubs_in_vm_isolate =
@ -4005,7 +4005,6 @@ LocationSummary* BoxInt64Instr::MakeLocationSummary(Zone* zone,
->untag()
->InVMIsolateHeap();
const bool shared_slow_path_call = SlowPathSharingSupported(opt) &&
FLAG_use_bare_instructions &&
!stubs_in_vm_isolate;
LocationSummary* summary = new (zone) LocationSummary(
zone, kNumInputs, kNumTemps,

View file

@ -656,7 +656,7 @@ void AssertBooleanInstr::PrintOperandsTo(BaseTextBuffer* f) const {
}
void ClosureCallInstr::PrintOperandsTo(BaseTextBuffer* f) const {
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
f->AddString(" closure=");
} else {
f->AddString(" function=");

View file

@ -1292,7 +1292,7 @@ void FfiCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
__ LeaveDartFrame(compiler::kRestoreCallerPP);
// Restore the global object pool after returning from runtime (old space is
// moving, so the GOP could have been relocated).
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
__ movq(PP, compiler::Address(THR, Thread::global_object_pool_offset()));
}
__ set_constant_pool_allowed(true);
@ -1401,7 +1401,7 @@ void NativeEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
kPcMarkerSlotFromFp * compiler::target::kWordSize),
CODE_REG);
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
__ movq(PP,
compiler::Address(
THR, compiler::target::Thread::global_object_pool_offset()));
@ -4396,7 +4396,7 @@ LocationSummary* BoxInt64Instr::MakeLocationSummary(Zone* zone,
const intptr_t kNumInputs = 1;
const intptr_t kNumTemps = ValueFitsSmi() ? 0 : 1;
// Shared slow path is used in BoxInt64Instr::EmitNativeCode in
// FLAG_use_bare_instructions mode and only after VM isolate stubs where
// precompiled mode and only after VM isolate stubs where
// replaced with isolate-specific stubs.
auto object_store = IsolateGroup::Current()->object_store();
const bool stubs_in_vm_isolate =
@ -4407,7 +4407,6 @@ LocationSummary* BoxInt64Instr::MakeLocationSummary(Zone* zone,
->untag()
->InVMIsolateHeap();
const bool shared_slow_path_call = SlowPathSharingSupported(opt) &&
FLAG_use_bare_instructions &&
!stubs_in_vm_isolate;
LocationSummary* summary = new (zone) LocationSummary(
zone, kNumInputs, kNumTemps,
@ -6918,7 +6917,7 @@ void ClosureCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
__ LoadObject(R10, arguments_descriptor);
ASSERT(locs()->in(0).reg() == RAX);
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
// RAX: Closure with cached entry point.
__ movq(RCX, compiler::FieldAddress(
RAX, compiler::target::Closure::entry_point_offset()));

View file

@ -3053,7 +3053,7 @@ void FlowGraphAllocator::RemoveFrameIfNotNeeded() {
// frameless functions. Outside of bare instructions mode we need to preserve
// caller PP - so all functions need a frame if they have their own pool which
// is hard to determine at this stage.
if (!(FLAG_precompiled_mode && FLAG_use_bare_instructions)) {
if (!FLAG_precompiled_mode) {
return;
}

View file

@ -470,7 +470,7 @@ COMPILER_PASS(SelectRepresentations, {
});
COMPILER_PASS(UseTableDispatch, {
if (FLAG_use_bare_instructions && FLAG_use_table_dispatch) {
if (FLAG_use_table_dispatch) {
state->call_specializer->ReplaceInstanceCallsWithDispatchTableCalls();
}
});

View file

@ -1133,7 +1133,7 @@ Fragment BaseFlowGraphBuilder::BuildEntryPointsIntrospection() {
call_hook += Constant(closure);
call_hook += Constant(function_name);
call_hook += LoadLocal(entry_point_num);
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
call_hook += Constant(closure);
} else {
call_hook += Constant(Function::ZoneHandle(Z, closure.function()));

View file

@ -3110,7 +3110,7 @@ Fragment StreamingFlowGraphBuilder::BuildLocalFunctionInvocation(
// Lookup the function in the closure.
instructions += LoadLocal(variable);
if (!FLAG_precompiled_mode || !FLAG_use_bare_instructions) {
if (!FLAG_precompiled_mode) {
instructions += LoadNativeField(Slot::Closure_function());
}
if (parsed_function()->function().is_debuggable()) {
@ -3173,7 +3173,7 @@ Fragment StreamingFlowGraphBuilder::BuildFunctionInvocation(TokenPosition* p) {
/*clear_temp=*/false);
// Lookup the function in the closure.
instructions += LoadLocal(receiver_temp);
if (!FLAG_precompiled_mode || !FLAG_use_bare_instructions) {
if (!FLAG_precompiled_mode) {
instructions += LoadNativeField(Slot::Closure_function());
}
if (parsed_function()->function().is_debuggable()) {

View file

@ -3087,7 +3087,7 @@ FlowGraph* FlowGraphBuilder::BuildGraphOfInvokeFieldDispatcher(
if (is_closure_call) {
body += LoadLocal(closure);
if (!FLAG_precompiled_mode || !FLAG_use_bare_instructions) {
if (!FLAG_precompiled_mode) {
// Lookup the function in the closure.
body += LoadNativeField(Slot::Closure_function());
}

View file

@ -3639,7 +3639,7 @@ void TypeTranslator::SetupUnboxingInfoMetadata(const Function& function,
// TODO(dartbug.com/32292): accept unboxed parameters and return value
// when FLAG_use_table_dispatch == false.
if (FLAG_precompiled_mode && unboxing_info != nullptr &&
FLAG_use_table_dispatch && FLAG_use_bare_instructions) {
FLAG_use_table_dispatch) {
for (intptr_t i = 0; i < unboxing_info->unboxed_args_info.length(); i++) {
SetupUnboxingInfoOfParameter(function, i, unboxing_info);
}
@ -3658,7 +3658,7 @@ void TypeTranslator::SetupUnboxingInfoMetadataForFieldAccessors(
// TODO(dartbug.com/32292): accept unboxed parameters and return value
// when FLAG_use_table_dispatch == false.
if (FLAG_precompiled_mode && unboxing_info != nullptr &&
FLAG_use_table_dispatch && FLAG_use_bare_instructions) {
FLAG_use_table_dispatch) {
if (field_accessor.IsImplicitSetterFunction()) {
for (intptr_t i = 0; i < unboxing_info->unboxed_args_info.length(); i++) {
SetupUnboxingInfoOfParameter(field_accessor, i, unboxing_info);

View file

@ -549,7 +549,7 @@ intptr_t CodeRelocator::FindDestinationInText(const InstructionsPtr destination,
}
intptr_t CodeRelocator::AdjustPayloadOffset(intptr_t payload_offset) {
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
return payload_offset;
}
return compiler::target::Instructions::HeaderSize() + payload_offset;

View file

@ -43,13 +43,11 @@ struct RelocatorTestHelper {
safepoint_and_growth_scope(thread, SafepointLevel::kGC) {
// So the relocator uses the correct instruction size layout.
FLAG_precompiled_mode = true;
FLAG_use_bare_instructions = true;
FLAG_lower_pc_relative_call_distance = -128;
FLAG_upper_pc_relative_call_distance = 128;
}
~RelocatorTestHelper() {
FLAG_use_bare_instructions = false;
FLAG_precompiled_mode = false;
}

View file

@ -793,7 +793,7 @@ const word MarkingStackBlock::kSize = dart::MarkingStackBlock::kSize;
// Used for InstructionsSection and Instructions methods, since we don't
// serialize Instructions objects in bare instructions mode, just payloads.
DART_FORCE_INLINE static bool BareInstructionsPayloads() {
return FLAG_precompiled_mode && FLAG_use_bare_instructions;
return FLAG_precompiled_mode;
}
word InstructionsSection::HeaderSize() {

View file

@ -467,11 +467,9 @@ static constexpr dart::compiler::target::word
UnhandledException_stacktrace_offset = 8;
static constexpr dart::compiler::target::word UserTag_tag_offset = 8;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_expected_cid_offset = 8;
MonomorphicSmiableCall_expected_cid_offset = 4;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_entrypoint_offset = 12;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_target_offset = 4;
MonomorphicSmiableCall_entrypoint_offset = 8;
static constexpr dart::compiler::target::word WeakProperty_key_offset = 4;
static constexpr dart::compiler::target::word WeakProperty_value_offset = 8;
static constexpr dart::compiler::target::word Code_entry_point_offset[] = {
@ -528,7 +526,7 @@ static constexpr dart::compiler::target::word MegamorphicCache_InstanceSize =
static constexpr dart::compiler::target::word Mint_InstanceSize = 16;
static constexpr dart::compiler::target::word MirrorReference_InstanceSize = 8;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_InstanceSize = 16;
MonomorphicSmiableCall_InstanceSize = 12;
static constexpr dart::compiler::target::word Namespace_InstanceSize = 20;
static constexpr dart::compiler::target::word NativeArguments_StructSize = 16;
static constexpr dart::compiler::target::word Number_InstanceSize = 4;
@ -1025,11 +1023,9 @@ static constexpr dart::compiler::target::word
UnhandledException_stacktrace_offset = 16;
static constexpr dart::compiler::target::word UserTag_tag_offset = 16;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_expected_cid_offset = 16;
MonomorphicSmiableCall_expected_cid_offset = 8;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_entrypoint_offset = 24;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_target_offset = 8;
MonomorphicSmiableCall_entrypoint_offset = 16;
static constexpr dart::compiler::target::word WeakProperty_key_offset = 8;
static constexpr dart::compiler::target::word WeakProperty_value_offset = 16;
static constexpr dart::compiler::target::word Code_entry_point_offset[] = {
@ -1087,7 +1083,7 @@ static constexpr dart::compiler::target::word MegamorphicCache_InstanceSize =
static constexpr dart::compiler::target::word Mint_InstanceSize = 16;
static constexpr dart::compiler::target::word MirrorReference_InstanceSize = 16;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_InstanceSize = 32;
MonomorphicSmiableCall_InstanceSize = 24;
static constexpr dart::compiler::target::word Namespace_InstanceSize = 40;
static constexpr dart::compiler::target::word NativeArguments_StructSize = 32;
static constexpr dart::compiler::target::word Number_InstanceSize = 8;
@ -1576,11 +1572,9 @@ static constexpr dart::compiler::target::word
UnhandledException_stacktrace_offset = 8;
static constexpr dart::compiler::target::word UserTag_tag_offset = 8;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_expected_cid_offset = 8;
MonomorphicSmiableCall_expected_cid_offset = 4;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_entrypoint_offset = 12;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_target_offset = 4;
MonomorphicSmiableCall_entrypoint_offset = 8;
static constexpr dart::compiler::target::word WeakProperty_key_offset = 4;
static constexpr dart::compiler::target::word WeakProperty_value_offset = 8;
static constexpr dart::compiler::target::word Code_entry_point_offset[] = {
@ -1634,7 +1628,7 @@ static constexpr dart::compiler::target::word MegamorphicCache_InstanceSize =
static constexpr dart::compiler::target::word Mint_InstanceSize = 16;
static constexpr dart::compiler::target::word MirrorReference_InstanceSize = 8;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_InstanceSize = 16;
MonomorphicSmiableCall_InstanceSize = 12;
static constexpr dart::compiler::target::word Namespace_InstanceSize = 20;
static constexpr dart::compiler::target::word NativeArguments_StructSize = 16;
static constexpr dart::compiler::target::word Number_InstanceSize = 4;
@ -2131,11 +2125,9 @@ static constexpr dart::compiler::target::word
UnhandledException_stacktrace_offset = 16;
static constexpr dart::compiler::target::word UserTag_tag_offset = 16;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_expected_cid_offset = 16;
MonomorphicSmiableCall_expected_cid_offset = 8;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_entrypoint_offset = 24;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_target_offset = 8;
MonomorphicSmiableCall_entrypoint_offset = 16;
static constexpr dart::compiler::target::word WeakProperty_key_offset = 8;
static constexpr dart::compiler::target::word WeakProperty_value_offset = 16;
static constexpr dart::compiler::target::word Code_entry_point_offset[] = {
@ -2194,7 +2186,7 @@ static constexpr dart::compiler::target::word MegamorphicCache_InstanceSize =
static constexpr dart::compiler::target::word Mint_InstanceSize = 16;
static constexpr dart::compiler::target::word MirrorReference_InstanceSize = 16;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_InstanceSize = 32;
MonomorphicSmiableCall_InstanceSize = 24;
static constexpr dart::compiler::target::word Namespace_InstanceSize = 40;
static constexpr dart::compiler::target::word NativeArguments_StructSize = 32;
static constexpr dart::compiler::target::word Number_InstanceSize = 8;
@ -2689,11 +2681,9 @@ static constexpr dart::compiler::target::word
UnhandledException_stacktrace_offset = 12;
static constexpr dart::compiler::target::word UserTag_tag_offset = 16;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_expected_cid_offset = 16;
MonomorphicSmiableCall_expected_cid_offset = 8;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_entrypoint_offset = 24;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_target_offset = 8;
MonomorphicSmiableCall_entrypoint_offset = 16;
static constexpr dart::compiler::target::word WeakProperty_key_offset = 8;
static constexpr dart::compiler::target::word WeakProperty_value_offset = 12;
static constexpr dart::compiler::target::word Code_entry_point_offset[] = {
@ -2751,7 +2741,7 @@ static constexpr dart::compiler::target::word MegamorphicCache_InstanceSize =
static constexpr dart::compiler::target::word Mint_InstanceSize = 16;
static constexpr dart::compiler::target::word MirrorReference_InstanceSize = 16;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_InstanceSize = 32;
MonomorphicSmiableCall_InstanceSize = 24;
static constexpr dart::compiler::target::word Namespace_InstanceSize = 24;
static constexpr dart::compiler::target::word NativeArguments_StructSize = 32;
static constexpr dart::compiler::target::word Number_InstanceSize = 8;
@ -3246,11 +3236,9 @@ static constexpr dart::compiler::target::word
UnhandledException_stacktrace_offset = 12;
static constexpr dart::compiler::target::word UserTag_tag_offset = 16;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_expected_cid_offset = 16;
MonomorphicSmiableCall_expected_cid_offset = 8;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_entrypoint_offset = 24;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_target_offset = 8;
MonomorphicSmiableCall_entrypoint_offset = 16;
static constexpr dart::compiler::target::word WeakProperty_key_offset = 8;
static constexpr dart::compiler::target::word WeakProperty_value_offset = 12;
static constexpr dart::compiler::target::word Code_entry_point_offset[] = {
@ -3309,7 +3297,7 @@ static constexpr dart::compiler::target::word MegamorphicCache_InstanceSize =
static constexpr dart::compiler::target::word Mint_InstanceSize = 16;
static constexpr dart::compiler::target::word MirrorReference_InstanceSize = 16;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_InstanceSize = 32;
MonomorphicSmiableCall_InstanceSize = 24;
static constexpr dart::compiler::target::word Namespace_InstanceSize = 24;
static constexpr dart::compiler::target::word NativeArguments_StructSize = 32;
static constexpr dart::compiler::target::word Number_InstanceSize = 8;
@ -3794,11 +3782,9 @@ static constexpr dart::compiler::target::word
UnhandledException_stacktrace_offset = 8;
static constexpr dart::compiler::target::word UserTag_tag_offset = 8;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_expected_cid_offset = 8;
MonomorphicSmiableCall_expected_cid_offset = 4;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_entrypoint_offset = 12;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_target_offset = 4;
MonomorphicSmiableCall_entrypoint_offset = 8;
static constexpr dart::compiler::target::word WeakProperty_key_offset = 4;
static constexpr dart::compiler::target::word WeakProperty_value_offset = 8;
static constexpr dart::compiler::target::word Code_entry_point_offset[] = {
@ -3855,7 +3841,7 @@ static constexpr dart::compiler::target::word MegamorphicCache_InstanceSize =
static constexpr dart::compiler::target::word Mint_InstanceSize = 16;
static constexpr dart::compiler::target::word MirrorReference_InstanceSize = 8;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_InstanceSize = 16;
MonomorphicSmiableCall_InstanceSize = 12;
static constexpr dart::compiler::target::word Namespace_InstanceSize = 20;
static constexpr dart::compiler::target::word NativeArguments_StructSize = 16;
static constexpr dart::compiler::target::word Number_InstanceSize = 4;
@ -4346,11 +4332,9 @@ static constexpr dart::compiler::target::word
UnhandledException_stacktrace_offset = 16;
static constexpr dart::compiler::target::word UserTag_tag_offset = 16;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_expected_cid_offset = 16;
MonomorphicSmiableCall_expected_cid_offset = 8;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_entrypoint_offset = 24;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_target_offset = 8;
MonomorphicSmiableCall_entrypoint_offset = 16;
static constexpr dart::compiler::target::word WeakProperty_key_offset = 8;
static constexpr dart::compiler::target::word WeakProperty_value_offset = 16;
static constexpr dart::compiler::target::word Code_entry_point_offset[] = {
@ -4408,7 +4392,7 @@ static constexpr dart::compiler::target::word MegamorphicCache_InstanceSize =
static constexpr dart::compiler::target::word Mint_InstanceSize = 16;
static constexpr dart::compiler::target::word MirrorReference_InstanceSize = 16;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_InstanceSize = 32;
MonomorphicSmiableCall_InstanceSize = 24;
static constexpr dart::compiler::target::word Namespace_InstanceSize = 40;
static constexpr dart::compiler::target::word NativeArguments_StructSize = 32;
static constexpr dart::compiler::target::word Number_InstanceSize = 8;
@ -4891,11 +4875,9 @@ static constexpr dart::compiler::target::word
UnhandledException_stacktrace_offset = 8;
static constexpr dart::compiler::target::word UserTag_tag_offset = 8;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_expected_cid_offset = 8;
MonomorphicSmiableCall_expected_cid_offset = 4;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_entrypoint_offset = 12;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_target_offset = 4;
MonomorphicSmiableCall_entrypoint_offset = 8;
static constexpr dart::compiler::target::word WeakProperty_key_offset = 4;
static constexpr dart::compiler::target::word WeakProperty_value_offset = 8;
static constexpr dart::compiler::target::word Code_entry_point_offset[] = {
@ -4949,7 +4931,7 @@ static constexpr dart::compiler::target::word MegamorphicCache_InstanceSize =
static constexpr dart::compiler::target::word Mint_InstanceSize = 16;
static constexpr dart::compiler::target::word MirrorReference_InstanceSize = 8;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_InstanceSize = 16;
MonomorphicSmiableCall_InstanceSize = 12;
static constexpr dart::compiler::target::word Namespace_InstanceSize = 20;
static constexpr dart::compiler::target::word NativeArguments_StructSize = 16;
static constexpr dart::compiler::target::word Number_InstanceSize = 4;
@ -5440,11 +5422,9 @@ static constexpr dart::compiler::target::word
UnhandledException_stacktrace_offset = 16;
static constexpr dart::compiler::target::word UserTag_tag_offset = 16;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_expected_cid_offset = 16;
MonomorphicSmiableCall_expected_cid_offset = 8;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_entrypoint_offset = 24;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_target_offset = 8;
MonomorphicSmiableCall_entrypoint_offset = 16;
static constexpr dart::compiler::target::word WeakProperty_key_offset = 8;
static constexpr dart::compiler::target::word WeakProperty_value_offset = 16;
static constexpr dart::compiler::target::word Code_entry_point_offset[] = {
@ -5503,7 +5483,7 @@ static constexpr dart::compiler::target::word MegamorphicCache_InstanceSize =
static constexpr dart::compiler::target::word Mint_InstanceSize = 16;
static constexpr dart::compiler::target::word MirrorReference_InstanceSize = 16;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_InstanceSize = 32;
MonomorphicSmiableCall_InstanceSize = 24;
static constexpr dart::compiler::target::word Namespace_InstanceSize = 40;
static constexpr dart::compiler::target::word NativeArguments_StructSize = 32;
static constexpr dart::compiler::target::word Number_InstanceSize = 8;
@ -5992,11 +5972,9 @@ static constexpr dart::compiler::target::word
UnhandledException_stacktrace_offset = 12;
static constexpr dart::compiler::target::word UserTag_tag_offset = 16;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_expected_cid_offset = 16;
MonomorphicSmiableCall_expected_cid_offset = 8;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_entrypoint_offset = 24;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_target_offset = 8;
MonomorphicSmiableCall_entrypoint_offset = 16;
static constexpr dart::compiler::target::word WeakProperty_key_offset = 8;
static constexpr dart::compiler::target::word WeakProperty_value_offset = 12;
static constexpr dart::compiler::target::word Code_entry_point_offset[] = {
@ -6054,7 +6032,7 @@ static constexpr dart::compiler::target::word MegamorphicCache_InstanceSize =
static constexpr dart::compiler::target::word Mint_InstanceSize = 16;
static constexpr dart::compiler::target::word MirrorReference_InstanceSize = 16;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_InstanceSize = 32;
MonomorphicSmiableCall_InstanceSize = 24;
static constexpr dart::compiler::target::word Namespace_InstanceSize = 24;
static constexpr dart::compiler::target::word NativeArguments_StructSize = 32;
static constexpr dart::compiler::target::word Number_InstanceSize = 8;
@ -6543,11 +6521,9 @@ static constexpr dart::compiler::target::word
UnhandledException_stacktrace_offset = 12;
static constexpr dart::compiler::target::word UserTag_tag_offset = 16;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_expected_cid_offset = 16;
MonomorphicSmiableCall_expected_cid_offset = 8;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_entrypoint_offset = 24;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_target_offset = 8;
MonomorphicSmiableCall_entrypoint_offset = 16;
static constexpr dart::compiler::target::word WeakProperty_key_offset = 8;
static constexpr dart::compiler::target::word WeakProperty_value_offset = 12;
static constexpr dart::compiler::target::word Code_entry_point_offset[] = {
@ -6606,7 +6582,7 @@ static constexpr dart::compiler::target::word MegamorphicCache_InstanceSize =
static constexpr dart::compiler::target::word Mint_InstanceSize = 16;
static constexpr dart::compiler::target::word MirrorReference_InstanceSize = 16;
static constexpr dart::compiler::target::word
MonomorphicSmiableCall_InstanceSize = 32;
MonomorphicSmiableCall_InstanceSize = 24;
static constexpr dart::compiler::target::word Namespace_InstanceSize = 24;
static constexpr dart::compiler::target::word NativeArguments_StructSize = 32;
static constexpr dart::compiler::target::word Number_InstanceSize = 8;
@ -7151,11 +7127,9 @@ static constexpr dart::compiler::target::word
AOT_UnhandledException_stacktrace_offset = 8;
static constexpr dart::compiler::target::word AOT_UserTag_tag_offset = 8;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_expected_cid_offset = 8;
AOT_MonomorphicSmiableCall_expected_cid_offset = 4;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_entrypoint_offset = 12;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_target_offset = 4;
AOT_MonomorphicSmiableCall_entrypoint_offset = 8;
static constexpr dart::compiler::target::word AOT_WeakProperty_key_offset = 4;
static constexpr dart::compiler::target::word AOT_WeakProperty_value_offset = 8;
static constexpr dart::compiler::target::word AOT_Code_entry_point_offset[] = {
@ -7219,7 +7193,7 @@ static constexpr dart::compiler::target::word AOT_Mint_InstanceSize = 16;
static constexpr dart::compiler::target::word AOT_MirrorReference_InstanceSize =
8;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_InstanceSize = 16;
AOT_MonomorphicSmiableCall_InstanceSize = 12;
static constexpr dart::compiler::target::word AOT_Namespace_InstanceSize = 20;
static constexpr dart::compiler::target::word AOT_NativeArguments_StructSize =
16;
@ -7769,11 +7743,9 @@ static constexpr dart::compiler::target::word
AOT_UnhandledException_stacktrace_offset = 16;
static constexpr dart::compiler::target::word AOT_UserTag_tag_offset = 16;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_expected_cid_offset = 16;
AOT_MonomorphicSmiableCall_expected_cid_offset = 8;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_entrypoint_offset = 24;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_target_offset = 8;
AOT_MonomorphicSmiableCall_entrypoint_offset = 16;
static constexpr dart::compiler::target::word AOT_WeakProperty_key_offset = 8;
static constexpr dart::compiler::target::word AOT_WeakProperty_value_offset =
16;
@ -7839,7 +7811,7 @@ static constexpr dart::compiler::target::word AOT_Mint_InstanceSize = 16;
static constexpr dart::compiler::target::word AOT_MirrorReference_InstanceSize =
16;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_InstanceSize = 32;
AOT_MonomorphicSmiableCall_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_Namespace_InstanceSize = 40;
static constexpr dart::compiler::target::word AOT_NativeArguments_StructSize =
32;
@ -8392,11 +8364,9 @@ static constexpr dart::compiler::target::word
AOT_UnhandledException_stacktrace_offset = 16;
static constexpr dart::compiler::target::word AOT_UserTag_tag_offset = 16;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_expected_cid_offset = 16;
AOT_MonomorphicSmiableCall_expected_cid_offset = 8;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_entrypoint_offset = 24;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_target_offset = 8;
AOT_MonomorphicSmiableCall_entrypoint_offset = 16;
static constexpr dart::compiler::target::word AOT_WeakProperty_key_offset = 8;
static constexpr dart::compiler::target::word AOT_WeakProperty_value_offset =
16;
@ -8463,7 +8433,7 @@ static constexpr dart::compiler::target::word AOT_Mint_InstanceSize = 16;
static constexpr dart::compiler::target::word AOT_MirrorReference_InstanceSize =
16;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_InstanceSize = 32;
AOT_MonomorphicSmiableCall_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_Namespace_InstanceSize = 40;
static constexpr dart::compiler::target::word AOT_NativeArguments_StructSize =
32;
@ -9012,11 +8982,9 @@ static constexpr dart::compiler::target::word
AOT_UnhandledException_stacktrace_offset = 12;
static constexpr dart::compiler::target::word AOT_UserTag_tag_offset = 16;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_expected_cid_offset = 16;
AOT_MonomorphicSmiableCall_expected_cid_offset = 8;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_entrypoint_offset = 24;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_target_offset = 8;
AOT_MonomorphicSmiableCall_entrypoint_offset = 16;
static constexpr dart::compiler::target::word AOT_WeakProperty_key_offset = 8;
static constexpr dart::compiler::target::word AOT_WeakProperty_value_offset =
12;
@ -9082,7 +9050,7 @@ static constexpr dart::compiler::target::word AOT_Mint_InstanceSize = 16;
static constexpr dart::compiler::target::word AOT_MirrorReference_InstanceSize =
16;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_InstanceSize = 32;
AOT_MonomorphicSmiableCall_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_Namespace_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_NativeArguments_StructSize =
32;
@ -9631,11 +9599,9 @@ static constexpr dart::compiler::target::word
AOT_UnhandledException_stacktrace_offset = 12;
static constexpr dart::compiler::target::word AOT_UserTag_tag_offset = 16;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_expected_cid_offset = 16;
AOT_MonomorphicSmiableCall_expected_cid_offset = 8;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_entrypoint_offset = 24;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_target_offset = 8;
AOT_MonomorphicSmiableCall_entrypoint_offset = 16;
static constexpr dart::compiler::target::word AOT_WeakProperty_key_offset = 8;
static constexpr dart::compiler::target::word AOT_WeakProperty_value_offset =
12;
@ -9702,7 +9668,7 @@ static constexpr dart::compiler::target::word AOT_Mint_InstanceSize = 16;
static constexpr dart::compiler::target::word AOT_MirrorReference_InstanceSize =
16;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_InstanceSize = 32;
AOT_MonomorphicSmiableCall_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_Namespace_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_NativeArguments_StructSize =
32;
@ -10245,11 +10211,9 @@ static constexpr dart::compiler::target::word
AOT_UnhandledException_stacktrace_offset = 8;
static constexpr dart::compiler::target::word AOT_UserTag_tag_offset = 8;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_expected_cid_offset = 8;
AOT_MonomorphicSmiableCall_expected_cid_offset = 4;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_entrypoint_offset = 12;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_target_offset = 4;
AOT_MonomorphicSmiableCall_entrypoint_offset = 8;
static constexpr dart::compiler::target::word AOT_WeakProperty_key_offset = 4;
static constexpr dart::compiler::target::word AOT_WeakProperty_value_offset = 8;
static constexpr dart::compiler::target::word AOT_Code_entry_point_offset[] = {
@ -10313,7 +10277,7 @@ static constexpr dart::compiler::target::word AOT_Mint_InstanceSize = 16;
static constexpr dart::compiler::target::word AOT_MirrorReference_InstanceSize =
8;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_InstanceSize = 16;
AOT_MonomorphicSmiableCall_InstanceSize = 12;
static constexpr dart::compiler::target::word AOT_Namespace_InstanceSize = 20;
static constexpr dart::compiler::target::word AOT_NativeArguments_StructSize =
16;
@ -10856,11 +10820,9 @@ static constexpr dart::compiler::target::word
AOT_UnhandledException_stacktrace_offset = 16;
static constexpr dart::compiler::target::word AOT_UserTag_tag_offset = 16;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_expected_cid_offset = 16;
AOT_MonomorphicSmiableCall_expected_cid_offset = 8;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_entrypoint_offset = 24;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_target_offset = 8;
AOT_MonomorphicSmiableCall_entrypoint_offset = 16;
static constexpr dart::compiler::target::word AOT_WeakProperty_key_offset = 8;
static constexpr dart::compiler::target::word AOT_WeakProperty_value_offset =
16;
@ -10926,7 +10888,7 @@ static constexpr dart::compiler::target::word AOT_Mint_InstanceSize = 16;
static constexpr dart::compiler::target::word AOT_MirrorReference_InstanceSize =
16;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_InstanceSize = 32;
AOT_MonomorphicSmiableCall_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_Namespace_InstanceSize = 40;
static constexpr dart::compiler::target::word AOT_NativeArguments_StructSize =
32;
@ -11472,11 +11434,9 @@ static constexpr dart::compiler::target::word
AOT_UnhandledException_stacktrace_offset = 16;
static constexpr dart::compiler::target::word AOT_UserTag_tag_offset = 16;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_expected_cid_offset = 16;
AOT_MonomorphicSmiableCall_expected_cid_offset = 8;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_entrypoint_offset = 24;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_target_offset = 8;
AOT_MonomorphicSmiableCall_entrypoint_offset = 16;
static constexpr dart::compiler::target::word AOT_WeakProperty_key_offset = 8;
static constexpr dart::compiler::target::word AOT_WeakProperty_value_offset =
16;
@ -11543,7 +11503,7 @@ static constexpr dart::compiler::target::word AOT_Mint_InstanceSize = 16;
static constexpr dart::compiler::target::word AOT_MirrorReference_InstanceSize =
16;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_InstanceSize = 32;
AOT_MonomorphicSmiableCall_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_Namespace_InstanceSize = 40;
static constexpr dart::compiler::target::word AOT_NativeArguments_StructSize =
32;
@ -12085,11 +12045,9 @@ static constexpr dart::compiler::target::word
AOT_UnhandledException_stacktrace_offset = 12;
static constexpr dart::compiler::target::word AOT_UserTag_tag_offset = 16;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_expected_cid_offset = 16;
AOT_MonomorphicSmiableCall_expected_cid_offset = 8;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_entrypoint_offset = 24;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_target_offset = 8;
AOT_MonomorphicSmiableCall_entrypoint_offset = 16;
static constexpr dart::compiler::target::word AOT_WeakProperty_key_offset = 8;
static constexpr dart::compiler::target::word AOT_WeakProperty_value_offset =
12;
@ -12155,7 +12113,7 @@ static constexpr dart::compiler::target::word AOT_Mint_InstanceSize = 16;
static constexpr dart::compiler::target::word AOT_MirrorReference_InstanceSize =
16;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_InstanceSize = 32;
AOT_MonomorphicSmiableCall_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_Namespace_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_NativeArguments_StructSize =
32;
@ -12697,11 +12655,9 @@ static constexpr dart::compiler::target::word
AOT_UnhandledException_stacktrace_offset = 12;
static constexpr dart::compiler::target::word AOT_UserTag_tag_offset = 16;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_expected_cid_offset = 16;
AOT_MonomorphicSmiableCall_expected_cid_offset = 8;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_entrypoint_offset = 24;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_target_offset = 8;
AOT_MonomorphicSmiableCall_entrypoint_offset = 16;
static constexpr dart::compiler::target::word AOT_WeakProperty_key_offset = 8;
static constexpr dart::compiler::target::word AOT_WeakProperty_value_offset =
12;
@ -12768,7 +12724,7 @@ static constexpr dart::compiler::target::word AOT_Mint_InstanceSize = 16;
static constexpr dart::compiler::target::word AOT_MirrorReference_InstanceSize =
16;
static constexpr dart::compiler::target::word
AOT_MonomorphicSmiableCall_InstanceSize = 32;
AOT_MonomorphicSmiableCall_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_Namespace_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_NativeArguments_StructSize =
32;

View file

@ -323,7 +323,6 @@
FIELD(UserTag, tag_offset) \
FIELD(MonomorphicSmiableCall, expected_cid_offset) \
FIELD(MonomorphicSmiableCall, entrypoint_offset) \
FIELD(MonomorphicSmiableCall, target_offset) \
FIELD(WeakProperty, key_offset) \
FIELD(WeakProperty, value_offset) \
RANGE(Code, entry_point_offset, CodeEntryKind, CodeEntryKind::kNormal, \

View file

@ -82,7 +82,7 @@ void StubCodeCompiler::GenerateInitLateInstanceFieldStub(Assembler* assembler,
__ LoadCompressedFieldFromOffset(
kFunctionReg, InitInstanceFieldABI::kFieldReg,
target::Field::initializer_function_offset());
if (!FLAG_precompiled_mode || !FLAG_use_bare_instructions) {
if (!FLAG_precompiled_mode) {
__ LoadCompressedFieldFromOffset(CODE_REG, kFunctionReg,
target::Function::code_offset());
// Load a GC-safe value for the arguments descriptor (unused but tagged).
@ -675,7 +675,7 @@ void StubCodeCompiler::GenerateLazySpecializeNullableTypeTestStub(
void StubCodeCompiler::GenerateSlowTypeTestStub(Assembler* assembler) {
Label done, call_runtime;
if (!(FLAG_precompiled_mode && FLAG_use_bare_instructions)) {
if (!FLAG_precompiled_mode) {
__ LoadFromOffset(CODE_REG, THR,
target::Thread::slow_type_test_stub_offset());
}
@ -807,13 +807,9 @@ void StubCodeCompiler::GenerateAllocateClosureStub(Assembler* assembler) {
// entry point in bare instructions mode or to 0 otherwise (to catch
// misuse). This overwrites the scratch register, but there are no more
// boxed fields.
if (FLAG_use_bare_instructions) {
__ LoadFromSlot(AllocateClosureABI::kScratchReg,
AllocateClosureABI::kFunctionReg,
Slot::Function_entry_point());
} else {
__ LoadImmediate(AllocateClosureABI::kScratchReg, 0);
}
__ LoadFromSlot(AllocateClosureABI::kScratchReg,
AllocateClosureABI::kFunctionReg,
Slot::Function_entry_point());
__ StoreToSlotNoBarrier(AllocateClosureABI::kScratchReg,
AllocateClosureABI::kResultReg,
Slot::Closure_entry_point());

View file

@ -140,7 +140,7 @@ void StubCodeCompiler::GenerateCallToRuntimeStub(Assembler* assembler) {
// Restore the global object pool after returning from runtime (old space is
// moving, so the GOP could have been relocated).
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
__ SetupGlobalPoolAndDispatchTable();
}
@ -604,7 +604,7 @@ static void GenerateCallNativeWithWrapperStub(Assembler* assembler,
// Restore the global object pool after returning from runtime (old space is
// moving, so the GOP could have been relocated).
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
__ SetupGlobalPoolAndDispatchTable();
}
@ -1278,7 +1278,7 @@ void StubCodeCompiler::GenerateInvokeDartCodeStub(Assembler* assembler) {
__ Bind(&done_push_arguments);
// Call the Dart code entrypoint.
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
__ SetupGlobalPoolAndDispatchTable();
__ LoadImmediate(CODE_REG, 0); // GC safe value into CODE_REG.
} else {
@ -1845,7 +1845,7 @@ void StubCodeCompiler::GenerateAllocateObjectSlowStub(Assembler* assembler) {
const Register kClsReg = R1;
const Register kTagsReg = R2;
if (!FLAG_use_bare_instructions) {
if (!FLAG_precompiled_mode) {
__ ldr(CODE_REG,
Address(THR, target::Thread::call_to_runtime_stub_offset()));
}
@ -2659,8 +2659,7 @@ static void GenerateSubtypeNTestCacheStub(Assembler* assembler, int n) {
// the corresponding slot in the current cache entry.
// NOTFP must be preserved for bare payloads, otherwise CODE_REG.
const bool use_bare_payloads =
FLAG_precompiled_mode && FLAG_use_bare_instructions;
const bool use_bare_payloads = FLAG_precompiled_mode;
// For this, we choose the register that need not be preserved of the pair.
const Register kNullReg = use_bare_payloads ? CODE_REG : NOTFP;
__ LoadObject(kNullReg, NullObject());
@ -2926,7 +2925,7 @@ void StubCodeCompiler::GenerateJumpToFrameStub(Assembler* assembler) {
__ StoreToOffset(R2, THR, target::Thread::top_exit_frame_info_offset());
// Restore the pool pointer.
__ RestoreCodePointer();
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
__ SetupGlobalPoolAndDispatchTable();
__ set_constant_pool_allowed(true);
} else {
@ -3150,7 +3149,7 @@ void StubCodeCompiler::GenerateMegamorphicCallStub(Assembler* assembler) {
// illegal class id was found, the target is a cache miss handler that can
// be invoked as a normal Dart function.
__ ldr(R0, FieldAddress(IP, base + target::kWordSize));
if (!(FLAG_precompiled_mode && FLAG_use_bare_instructions)) {
if (!FLAG_precompiled_mode) {
__ ldr(CODE_REG, FieldAddress(R0, target::Function::code_offset()));
}
__ ldr(ARGS_DESC_REG,
@ -3196,7 +3195,7 @@ void StubCodeCompiler::GenerateICCallThroughCodeStub(Assembler* assembler) {
__ b(&loop);
__ Bind(&found);
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
const intptr_t entry_offset =
target::ICData::EntryPointIndexFor(1) * target::kWordSize;
__ LoadCompressed(R0, Address(R8, entry_offset));
@ -3225,34 +3224,20 @@ void StubCodeCompiler::GenerateMonomorphicSmiableCheckStub(
Assembler* assembler) {
__ LoadClassIdMayBeSmi(IP, R0);
// expected_cid_ should come right after target_
ASSERT(target::MonomorphicSmiableCall::expected_cid_offset() ==
target::MonomorphicSmiableCall::target_offset() + target::kWordSize);
// entrypoint_ should come right after expected_cid_
ASSERT(target::MonomorphicSmiableCall::entrypoint_offset() ==
target::MonomorphicSmiableCall::expected_cid_offset() +
target::kWordSize);
if (FLAG_use_bare_instructions) {
// Simultaneously load the expected cid into R2 and the entrypoint into R3.
__ ldrd(
R2, R3, R9,
target::MonomorphicSmiableCall::expected_cid_offset() - kHeapObjectTag);
__ cmp(R2, Operand(IP));
__ Branch(Address(THR, target::Thread::switchable_call_miss_entry_offset()),
NE);
__ bx(R3);
} else {
// Simultaneously load the target into R2 and the expected cid into R3.
__ ldrd(R2, R3, R9,
target::MonomorphicSmiableCall::target_offset() - kHeapObjectTag);
__ mov(CODE_REG, Operand(R2));
__ cmp(R3, Operand(IP));
__ Branch(Address(THR, target::Thread::switchable_call_miss_entry_offset()),
NE);
__ LoadField(IP, FieldAddress(R2, target::Code::entry_point_offset()));
__ bx(IP);
}
// Note: this stub is only used in AOT mode, hence the direct (bare) call.
// Simultaneously load the expected cid into R2 and the entrypoint into R3.
__ ldrd(
R2, R3, R9,
target::MonomorphicSmiableCall::expected_cid_offset() - kHeapObjectTag);
__ cmp(R2, Operand(IP));
__ Branch(Address(THR, target::Thread::switchable_call_miss_entry_offset()),
NE);
__ bx(R3);
}
static void CallSwitchableCallMissRuntimeEntry(Assembler* assembler,

View file

@ -157,7 +157,7 @@ void StubCodeCompiler::GenerateCallToRuntimeStub(Assembler* assembler) {
// Restore the global object pool after returning from runtime (old space is
// moving, so the GOP could have been relocated).
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
__ SetupGlobalPoolAndDispatchTable();
}
@ -728,7 +728,7 @@ static void GenerateCallNativeWithWrapperStub(Assembler* assembler,
// Restore the global object pool after returning from runtime (old space is
// moving, so the GOP could have been relocated).
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
__ SetupGlobalPoolAndDispatchTable();
}
@ -1437,7 +1437,7 @@ void StubCodeCompiler::GenerateInvokeDartCodeStub(Assembler* assembler) {
__ b(&push_arguments, LT);
__ Bind(&done_push_arguments);
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
__ SetupGlobalPoolAndDispatchTable();
__ mov(CODE_REG, ZR); // GC-safe value into CODE_REG.
} else {
@ -2003,7 +2003,7 @@ void StubCodeCompiler::GenerateAllocateObjectParameterizedStub(
void StubCodeCompiler::GenerateAllocateObjectSlowStub(Assembler* assembler) {
const Register kTagsToClsIdReg = R2;
if (!FLAG_use_bare_instructions) {
if (!FLAG_precompiled_mode) {
__ ldr(CODE_REG,
Address(THR, target::Thread::call_to_runtime_stub_offset()));
}
@ -3110,7 +3110,7 @@ void StubCodeCompiler::GenerateJumpToFrameStub(Assembler* assembler) {
__ StoreToOffset(ZR, THR, target::Thread::top_exit_frame_info_offset());
// Restore the pool pointer.
__ RestoreCodePointer();
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
__ SetupGlobalPoolAndDispatchTable();
} else {
__ LoadPoolPointer();
@ -3221,8 +3221,8 @@ static void GenerateIdenticalWithNumberCheckStub(Assembler* assembler,
__ Bind(&reference_compare);
__ CompareObjectRegisters(left, right);
// None of the branches above go directly here to avoid generating a conditional
// branch to a ret instruction.
// None of the branches above go directly here to avoid generating a
// conditional branch to a ret instruction.
// This is an attempt to work-around a possible CPU on Exynos 2100 SoC.
// See https://github.com/flutter/flutter/issues/88261
__ ret();
@ -3332,7 +3332,7 @@ void StubCodeCompiler::GenerateMegamorphicCallStub(Assembler* assembler) {
__ ldr(R1, FieldAddress(R0, target::Function::entry_point_offset()));
__ ldr(ARGS_DESC_REG,
FieldAddress(R5, target::CallSiteData::arguments_descriptor_offset()));
if (!(FLAG_precompiled_mode && FLAG_use_bare_instructions)) {
if (!FLAG_precompiled_mode) {
__ LoadCompressed(CODE_REG,
FieldAddress(R0, target::Function::code_offset()));
}
@ -3385,7 +3385,7 @@ void StubCodeCompiler::GenerateICCallThroughCodeStub(Assembler* assembler) {
__ b(&loop);
__ Bind(&found);
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
const intptr_t entry_offset =
target::ICData::EntryPointIndexFor(1) * target::kCompressedWordSize;
__ LoadCompressed(R1,
@ -3419,30 +3419,16 @@ void StubCodeCompiler::GenerateMonomorphicSmiableCheckStub(
Label miss;
__ LoadClassIdMayBeSmi(IP0, R0);
if (FLAG_use_bare_instructions) {
__ LoadField(
IP1, FieldAddress(
R5, target::MonomorphicSmiableCall::expected_cid_offset()));
__ LoadField(
R1,
FieldAddress(R5, target::MonomorphicSmiableCall::entrypoint_offset()));
__ cmp(IP0, Operand(IP1));
__ b(&miss, NE);
__ br(R1);
} else {
__ LoadField(
IP1, FieldAddress(
R5, target::MonomorphicSmiableCall::expected_cid_offset()));
__ LoadField(
CODE_REG,
FieldAddress(R5, target::MonomorphicSmiableCall::target_offset()));
__ LoadField(
R1,
FieldAddress(R5, target::MonomorphicSmiableCall::entrypoint_offset()));
__ cmp(IP0, Operand(IP1));
__ b(&miss, NE);
__ br(R1);
}
// Note: this stub is only used in AOT mode, hence the direct (bare) call.
__ LoadField(
IP1,
FieldAddress(R5, target::MonomorphicSmiableCall::expected_cid_offset()));
__ LoadField(
R1,
FieldAddress(R5, target::MonomorphicSmiableCall::entrypoint_offset()));
__ cmp(IP0, Operand(IP1));
__ b(&miss, NE);
__ br(R1);
__ Bind(&miss);
__ ldr(IP0,

View file

@ -141,7 +141,7 @@ void StubCodeCompiler::GenerateCallToRuntimeStub(Assembler* assembler) {
// Restore the global object pool after returning from runtime (old space is
// moving, so the GOP could have been relocated).
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
__ movq(PP, Address(THR, target::Thread::global_object_pool_offset()));
}
@ -645,7 +645,7 @@ static void GenerateCallNativeWithWrapperStub(Assembler* assembler,
// Restore the global object pool after returning from runtime (old space is
// moving, so the GOP could have been relocated).
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
__ movq(PP, Address(THR, target::Thread::global_object_pool_offset()));
}
@ -1366,7 +1366,7 @@ void StubCodeCompiler::GenerateInvokeDartCodeStub(Assembler* assembler) {
__ Bind(&done_push_arguments);
// Call the Dart code entrypoint.
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
__ movq(PP, Address(THR, target::Thread::global_object_pool_offset()));
__ xorq(CODE_REG, CODE_REG); // GC-safe value into CODE_REG.
} else {
@ -1935,7 +1935,7 @@ void StubCodeCompiler::GenerateAllocateObjectParameterizedStub(
void StubCodeCompiler::GenerateAllocateObjectSlowStub(Assembler* assembler) {
const Register kTagsToClsIdReg = R8;
if (!FLAG_use_bare_instructions) {
if (!FLAG_precompiled_mode) {
__ movq(CODE_REG,
Address(THR, target::Thread::call_to_runtime_stub_offset()));
}
@ -3052,7 +3052,7 @@ void StubCodeCompiler::GenerateJumpToFrameStub(Assembler* assembler) {
Immediate(0));
// Restore the pool pointer.
__ RestoreCodePointer();
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
__ movq(PP, Address(THR, target::Thread::global_object_pool_offset()));
} else {
__ LoadPoolPointer(PP);
@ -3274,7 +3274,7 @@ void StubCodeCompiler::GenerateMegamorphicCallStub(Assembler* assembler) {
__ movq(R10, FieldAddress(
RBX, target::CallSiteData::arguments_descriptor_offset()));
__ movq(RCX, FieldAddress(RAX, target::Function::entry_point_offset()));
if (!(FLAG_precompiled_mode && FLAG_use_bare_instructions)) {
if (!FLAG_precompiled_mode) {
__ LoadCompressed(CODE_REG,
FieldAddress(RAX, target::Function::code_offset()));
}
@ -3329,7 +3329,7 @@ void StubCodeCompiler::GenerateICCallThroughCodeStub(Assembler* assembler) {
__ jmp(&loop);
__ Bind(&found);
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
const intptr_t entry_offset =
target::ICData::EntryPointIndexFor(1) * target::kCompressedWordSize;
__ LoadCompressed(RCX, Address(R13, entry_offset));
@ -3362,14 +3362,9 @@ void StubCodeCompiler::GenerateMonomorphicSmiableCheckStub(
__ Bind(&have_cid);
__ cmpq(RAX, RCX);
__ j(NOT_EQUAL, &miss, Assembler::kNearJump);
if (FLAG_use_bare_instructions) {
__ jmp(
FieldAddress(RBX, target::MonomorphicSmiableCall::entrypoint_offset()));
} else {
__ movq(CODE_REG,
FieldAddress(RBX, target::MonomorphicSmiableCall::target_offset()));
__ jmp(FieldAddress(CODE_REG, target::Code::entry_point_offset()));
}
// Note: this stub is only used in AOT mode, hence the direct (bare) call.
__ jmp(
FieldAddress(RBX, target::MonomorphicSmiableCall::entrypoint_offset()));
__ Bind(&miss);
__ jmp(Address(THR, target::Thread::switchable_call_miss_entry_offset()));

View file

@ -107,16 +107,14 @@ ObjectPtr DartEntry::InvokeFunction(const Function& function,
#if !defined(DART_PRECOMPILED_RUNTIME)
UNREACHABLE();
#else
if (FLAG_use_bare_instructions) {
Thread* thread = Thread::Current();
thread->set_global_object_pool(
thread->isolate_group()->object_store()->global_object_pool());
const DispatchTable* dispatch_table = thread->isolate()->dispatch_table();
if (dispatch_table != nullptr) {
thread->set_dispatch_table_array(dispatch_table->ArrayOrigin());
}
ASSERT(thread->global_object_pool() != Object::null());
Thread* thread = Thread::Current();
thread->set_global_object_pool(
thread->isolate_group()->object_store()->global_object_pool());
const DispatchTable* dispatch_table = thread->isolate()->dispatch_table();
if (dispatch_table != nullptr) {
thread->set_dispatch_table_array(dispatch_table->ArrayOrigin());
}
ASSERT(thread->global_object_pool() != Object::null());
#endif // !defined(DART_PRECOMPILED_RUNTIME)
}
@ -179,14 +177,13 @@ ObjectPtr DartEntry::InvokeCode(const Code& code,
#if defined(USING_SIMULATOR)
return bit_copy<ObjectPtr, int64_t>(Simulator::Current()->Call(
static_cast<intptr_t>(stub),
((FLAG_precompiled_mode && FLAG_use_bare_instructions)
? static_cast<intptr_t>(entry_point)
: reinterpret_cast<intptr_t>(&code)),
FLAG_precompiled_mode ? static_cast<intptr_t>(entry_point)
: reinterpret_cast<intptr_t>(&code),
reinterpret_cast<intptr_t>(&arguments_descriptor),
reinterpret_cast<intptr_t>(&arguments),
reinterpret_cast<intptr_t>(thread)));
#else
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
return static_cast<ObjectPtr>(
(reinterpret_cast<invokestub_bare_instructions>(stub))(
entry_point, arguments_descriptor, arguments, thread));

View file

@ -66,7 +66,6 @@ constexpr bool FLAG_support_il_printer = false;
"Use --[no-]dwarf-stack-traces instead.") \
P(lazy_async_stacks, bool, true, "Reconstruct async stacks from listeners") \
P(lazy_dispatchers, bool, true, "Generate dispatchers lazily") \
P(use_bare_instructions, bool, true, "Enable bare instructions mode.") \
R(dedup_instructions, true, bool, false, \
"Canonicalize instructions when precompiling.")

View file

@ -56,7 +56,6 @@ const UntaggedInstructionsSection* Image::ExtraInfo(const uword raw_memory,
// (unless splitting into multiple outputs and there are no Code objects
// in this particular output), but is guaranteed empty otherwise (the
// instructions follow the InstructionsSection object instead).
ASSERT(FLAG_use_bare_instructions || layout->payload_length_ == 0);
ASSERT(raw_value <=
size - InstructionsSection::InstanceSize(layout->payload_length_));
return layout;
@ -599,8 +598,7 @@ const char* ImageWriter::SectionSymbol(ProgramSection section, bool vm) const {
}
void ImageWriter::WriteText(bool vm) {
const bool bare_instruction_payloads =
FLAG_precompiled_mode && FLAG_use_bare_instructions;
const bool bare_instruction_payloads = FLAG_precompiled_mode;
// Start snapshot at page boundary.
if (!EnterSection(ProgramSection::Text, vm, ImageWriter::kTextAlignment)) {
@ -1568,20 +1566,18 @@ ApiErrorPtr ImageReader::VerifyAlignment() const {
#if defined(DART_PRECOMPILED_RUNTIME)
uword ImageReader::GetBareInstructionsAt(uint32_t offset) const {
ASSERT(FLAG_use_bare_instructions);
ASSERT(Utils::IsAligned(offset, Instructions::kBarePayloadAlignment));
return reinterpret_cast<uword>(instructions_image_) + offset;
}
uword ImageReader::GetBareInstructionsEnd() const {
ASSERT(FLAG_use_bare_instructions);
Image image(instructions_image_);
return reinterpret_cast<uword>(image.object_start()) + image.object_size();
}
#endif
InstructionsPtr ImageReader::GetInstructionsAt(uint32_t offset) const {
ASSERT(!FLAG_precompiled_mode || !FLAG_use_bare_instructions);
ASSERT(!FLAG_precompiled_mode);
ASSERT(Utils::IsAligned(offset, kObjectAlignment));
ObjectPtr result = UntaggedObject::FromAddr(

View file

@ -251,13 +251,8 @@ class ImageWriter : public ValueObject {
if (FLAG_precompiled_mode) {
// We reserve space for the initial InstructionsSection object. It is
// manually serialized since it includes offsets to other snapshot parts.
// In bare instructions mode, it contains all the payloads and so we
// start after the header, whereas in non-bare mode, it contains no
// payload and Instructions start after it.
next_text_offset_ +=
FLAG_use_bare_instructions
? compiler::target::InstructionsSection::HeaderSize()
: compiler::target::InstructionsSection::InstanceSize(0);
// It contains all the payloads which start directly after the header.
next_text_offset_ += compiler::target::InstructionsSection::HeaderSize();
}
#endif
objects_.Clear();

View file

@ -10148,8 +10148,7 @@ bool Function::NeedsMonomorphicCheckedEntry(Zone* zone) const {
}
// If table dispatch is disabled, all instance calls use switchable calls.
if (!(FLAG_precompiled_mode && FLAG_use_bare_instructions &&
FLAG_use_table_dispatch)) {
if (!(FLAG_precompiled_mode && FLAG_use_table_dispatch)) {
return true;
}
@ -15531,7 +15530,6 @@ MonomorphicSmiableCallPtr MonomorphicSmiableCall::New(classid_t expected_cid,
result ^= Object::Allocate(
MonomorphicSmiableCall::kClassId, MonomorphicSmiableCall::InstanceSize(),
Heap::kOld, MonomorphicSmiableCall::ContainsCompressedPointers());
result.untag()->set_target(target.ptr());
result.StoreNonPointer(&result.untag()->expected_cid_, expected_cid);
result.StoreNonPointer(&result.untag()->entrypoint_, target.EntryPoint());
return result.ptr();
@ -16906,7 +16904,7 @@ void Code::set_static_calls_target_table(const Array& value) const {
ObjectPoolPtr Code::GetObjectPool() const {
#if defined(DART_PRECOMPILER) || defined(DART_PRECOMPILED_RUNTIME)
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
return IsolateGroup::Current()->object_store()->global_object_pool();
}
#endif
@ -25514,8 +25512,7 @@ ClosurePtr Closure::New(const TypeArguments& instantiator_type_arguments,
result.untag()->set_function(function.ptr());
result.untag()->set_context(context.ptr());
#if defined(DART_PRECOMPILED_RUNTIME)
result.set_entry_point(FLAG_use_bare_instructions ? function.entry_point()
: 0);
result.set_entry_point(function.entry_point());
#endif
}
return result.ptr();

View file

@ -2014,7 +2014,6 @@ class SingleTargetCache : public Object {
class MonomorphicSmiableCall : public Object {
public:
CodePtr target() const { return untag()->target(); }
classid_t expected_cid() const { return untag()->expected_cid_; }
static intptr_t InstanceSize() {
@ -2028,10 +2027,6 @@ class MonomorphicSmiableCall : public Object {
return OFFSET_OF(UntaggedMonomorphicSmiableCall, expected_cid_);
}
static intptr_t target_offset() {
return OFFSET_OF(UntaggedMonomorphicSmiableCall, target_);
}
static intptr_t entrypoint_offset() {
return OFFSET_OF(UntaggedMonomorphicSmiableCall, entrypoint_);
}
@ -5450,7 +5445,8 @@ class Instructions : public Object {
// _not_ at the start of the payload.
static const intptr_t kBarePayloadAlignment = 4;
// In non-bare mode, we align the payloads on word boundaries.
// When instructions reside in the heap we align the payloads on word
// boundaries.
static const intptr_t kNonBarePayloadAlignment = kWordSize;
// In the precompiled runtime when running in bare instructions mode,
@ -5459,9 +5455,7 @@ class Instructions : public Object {
static intptr_t HeaderSize() {
#if defined(DART_PRECOMPILED_RUNTIME)
if (FLAG_use_bare_instructions) {
UNREACHABLE();
}
UNREACHABLE();
#endif
return Utils::RoundUp(sizeof(UntaggedInstructions),
kNonBarePayloadAlignment);
@ -5475,18 +5469,14 @@ class Instructions : public Object {
static intptr_t InstanceSize(intptr_t size) {
#if defined(DART_PRECOMPILED_RUNTIME)
if (FLAG_use_bare_instructions) {
UNREACHABLE();
}
UNREACHABLE();
#endif
return RoundedAllocationSize(HeaderSize() + size);
}
static InstructionsPtr FromPayloadStart(uword payload_start) {
#if defined(DART_PRECOMPILED_RUNTIME)
if (FLAG_use_bare_instructions) {
UNREACHABLE();
}
UNREACHABLE();
#endif
return static_cast<InstructionsPtr>(payload_start - HeaderSize() +
kHeapObjectTag);

View file

@ -837,7 +837,6 @@ void MonomorphicSmiableCall::PrintJSONImpl(JSONStream* stream, bool ref) const {
if (ref) {
return;
}
jsobj.AddProperty("_target", Code::Handle(target()));
}
void CallSiteData::PrintJSONImpl(JSONStream* stream, bool ref) const {

View file

@ -897,7 +897,7 @@ void ProgramVisitor::DedupUnlinkedCalls(Thread* thread) {
pool_(ObjectPool::Handle(zone)) {
auto& gop = ObjectPool::Handle(
zone, isolate_group->object_store()->global_object_pool());
ASSERT_EQUAL(!gop.IsNull(), FLAG_use_bare_instructions);
ASSERT(!gop.IsNull());
DedupPool(gop);
}
@ -935,8 +935,7 @@ void ProgramVisitor::DedupUnlinkedCalls(Thread* thread) {
// objects and other objects in the snapshots (these references are otherwise
// implicit and go through global object pool). This information is needed
// to produce more informative snapshot profile.
if (!FLAG_use_bare_instructions ||
FLAG_write_v8_snapshot_profile_to != nullptr ||
if (FLAG_write_v8_snapshot_profile_to != nullptr ||
FLAG_trace_precompiler_to != nullptr) {
WalkProgram(thread->zone(), thread->isolate_group(), &visitor);
}
@ -1160,14 +1159,11 @@ class InstructionsKeyValueTrait {
// The instruction deduplication naturally causes us to have a one-to-many
// relationship between Instructions and Code objects.
//
// In AOT bare instructions mode frames only have PCs. However, the runtime
// needs e.g. stack maps from the [Code] to scan such a frame. So we ensure that
// instructions of code objects are only deduplicated if the metadata in the
// code is the same. The runtime can then pick any code object corresponding to
// the PC in the frame and use the metadata.
//
// In AOT non-bare instructions mode frames are expanded, like in JIT, and
// contain the unique code object.
// In AOT frames only have PCs. However, the runtime needs e.g. stack maps from
// the [Code] to scan such a frame. So we ensure that instructions of code
// objects are only deduplicated if the metadata in the code is the same.
// The runtime can then pick any code object corresponding to the PC in the
// frame and use the metadata.
#if defined(DART_PRECOMPILER)
class CodeKeyValueTrait {
public:
@ -1300,7 +1296,7 @@ void ProgramVisitor::DedupInstructions(Thread* thread) {
Instructions& instructions_;
};
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
StackZone stack_zone(thread);
DedupInstructionsWithSameMetadataVisitor visitor(thread->zone());
WalkProgram(thread->zone(), thread->isolate_group(), &visitor);
@ -1329,25 +1325,6 @@ void ProgramVisitor::Dedup(Thread* thread) {
// Reduces binary size but obfuscates profiler results.
if (FLAG_dedup_instructions) {
// In non-bare mode (unused atm) dedupping instructions would cause us to
// loose the ability to uniquely map a PC to a given UnlinkedCall object,
// since two code objects might point to the same deduped instructions
// object but might have two different UnlinkedCall objects in their pool.
//
// In bare mode this cannot happen because different UnlinkedCall objects
// would get different indices into the (global) object pool, therefore
// making the instructions different.
//
// (When transitioning the switchable call site we loose track of the args
// descriptor. Since we need it for further transitions we currently save it
// via a PC -> UnlinkedCall mapping).
//
// We therfore disable the instruction deduplication in product-non-bare
// mode (which is unused atm).
#if defined(PRODUCT)
if (FLAG_precompiled_mode && !FLAG_use_bare_instructions) return;
#endif
DedupInstructions(thread);
}
}
@ -1389,10 +1366,6 @@ class AssignLoadingUnitsCodeVisitor : public CodeVisitor {
MergeAssignment(obj_, id);
obj_ = code.compressed_stackmaps();
MergeAssignment(obj_, id);
if (!FLAG_use_bare_instructions) {
obj_ = code.object_pool();
MergeAssignment(obj_, id);
}
}
void MergeAssignment(const Object& obj, intptr_t id) {

View file

@ -540,7 +540,7 @@ COMPRESSED_VISITOR(Closure)
COMPRESSED_VISITOR(LibraryPrefix)
REGULAR_VISITOR(SingleTargetCache)
REGULAR_VISITOR(UnlinkedCall)
REGULAR_VISITOR(MonomorphicSmiableCall)
NULL_VISITOR(MonomorphicSmiableCall)
REGULAR_VISITOR(ICData)
REGULAR_VISITOR(MegamorphicCache)
COMPRESSED_VISITOR(ApiError)

View file

@ -2273,13 +2273,10 @@ class UntaggedSingleTargetCache : public UntaggedObject {
class UntaggedMonomorphicSmiableCall : public UntaggedObject {
RAW_HEAP_OBJECT_IMPLEMENTATION(MonomorphicSmiableCall);
POINTER_FIELD(CodePtr,
target); // Entrypoint PC in bare mode, Code in non-bare mode.
VISIT_FROM(target)
VISIT_TO(target)
VISIT_NOTHING();
uword expected_cid_;
uword entrypoint_;
ObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
};
// Abstract base class for RawICData/RawMegamorphicCache

View file

@ -97,7 +97,7 @@ namespace dart {
F(UnlinkedCall, target_name_) \
F(UnlinkedCall, args_descriptor_) \
F(MonomorphicSmiableCall, expected_cid_) \
F(MonomorphicSmiableCall, target_) \
F(MonomorphicSmiableCall, entrypoint_) \
F(CallSiteData, target_name_) \
F(CallSiteData, args_descriptor_) \
F(ICData, target_name_) \

View file

@ -47,7 +47,7 @@ ObjectPtr ReversePc::FindCodeDescriptor(IsolateGroup* group,
uword pc,
bool is_return_address,
uword* code_start) {
ASSERT(FLAG_precompiled_mode && FLAG_use_bare_instructions);
ASSERT(FLAG_precompiled_mode);
NoSafepointScope no_safepoint;
ObjectPtr code_descriptor =
@ -62,7 +62,7 @@ ObjectPtr ReversePc::FindCodeDescriptor(IsolateGroup* group,
CodePtr ReversePc::Lookup(IsolateGroup* group,
uword pc,
bool is_return_address) {
ASSERT(FLAG_precompiled_mode && FLAG_use_bare_instructions);
ASSERT(FLAG_precompiled_mode);
NoSafepointScope no_safepoint;
uword code_start;
@ -85,7 +85,7 @@ CompressedStackMapsPtr ReversePc::FindCompressedStackMaps(
uword pc,
bool is_return_address,
uword* code_start) {
ASSERT(FLAG_precompiled_mode && FLAG_use_bare_instructions);
ASSERT(FLAG_precompiled_mode);
NoSafepointScope no_safepoint;
ObjectPtr code_descriptor =

View file

@ -3696,14 +3696,14 @@ void Simulator::JumpToFrame(uword pc, uword sp, uword fp, Thread* thread) {
// Restore pool pointer.
int32_t code =
*reinterpret_cast<int32_t*>(fp + kPcMarkerSlotFromFp * kWordSize);
int32_t pp = (FLAG_precompiled_mode && FLAG_use_bare_instructions)
int32_t pp = FLAG_precompiled_mode
? static_cast<int32_t>(thread->global_object_pool())
: *reinterpret_cast<int32_t*>(
(code + Code::object_pool_offset() - kHeapObjectTag));
set_register(CODE_REG, code);
set_register(PP, pp);
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
set_register(DISPATCH_TABLE_REG,
reinterpret_cast<int32_t>(thread->dispatch_table_array()));
}

View file

@ -3741,7 +3741,7 @@ void Simulator::JumpToFrame(uword pc, uword sp, uword fp, Thread* thread) {
// Restore pool pointer.
int64_t code =
*reinterpret_cast<int64_t*>(fp + kPcMarkerSlotFromFp * kWordSize);
int64_t pp = (FLAG_precompiled_mode && FLAG_use_bare_instructions)
int64_t pp = FLAG_precompiled_mode
? static_cast<int64_t>(thread->global_object_pool())
: *reinterpret_cast<int64_t*>(
code + Code::object_pool_offset() - kHeapObjectTag);
@ -3752,7 +3752,7 @@ void Simulator::JumpToFrame(uword pc, uword sp, uword fp, Thread* thread) {
NULL, HEAP_BITS,
(thread->write_barrier_mask() << 32) | (thread->heap_base() >> 32));
set_register(NULL, NULL_REG, static_cast<int64_t>(Object::null()));
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
set_register(NULL, DISPATCH_TABLE_REG,
reinterpret_cast<int64_t>(thread->dispatch_table_array()));
}

View file

@ -93,11 +93,11 @@ void UntaggedFrame::Init() {
compiler::target::frame_layout = default_frame_layout;
runtime_frame_layout = default_frame_layout;
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
compiler::target::frame_layout = bare_instructions_frame_layout;
}
#if defined(DART_PRECOMPILED_RUNTIME)
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
compiler::target::frame_layout = invalid_frame_layout;
runtime_frame_layout = bare_instructions_frame_layout;
}
@ -105,7 +105,7 @@ void UntaggedFrame::Init() {
}
bool StackFrame::IsBareInstructionsDartFrame() const {
if (!(FLAG_precompiled_mode && FLAG_use_bare_instructions)) {
if (!FLAG_precompiled_mode) {
return false;
}
NoSafepointScope no_safepoint;
@ -123,7 +123,7 @@ bool StackFrame::IsBareInstructionsDartFrame() const {
}
bool StackFrame::IsBareInstructionsStubFrame() const {
if (!(FLAG_precompiled_mode && FLAG_use_bare_instructions)) {
if (!FLAG_precompiled_mode) {
return false;
}
NoSafepointScope no_safepoint;
@ -141,7 +141,7 @@ bool StackFrame::IsBareInstructionsStubFrame() const {
}
bool StackFrame::IsStubFrame() const {
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
return IsBareInstructionsStubFrame();
}
@ -208,7 +208,7 @@ void StackFrame::VisitObjectPointers(ObjectPointerVisitor* visitor) {
CompressedStackMaps maps;
uword code_start;
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
maps = ReversePc::FindCompressedStackMaps(isolate_group(), pc(),
/*is_return_address=*/true,
&code_start);
@ -290,7 +290,7 @@ void StackFrame::VisitObjectPointers(ObjectPointerVisitor* visitor) {
// to an osr function. In each of these cases, all stack slots contain
// tagged pointers, so fall through.
#if defined(DEBUG)
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
ASSERT(IsStubFrame());
} else {
ASSERT(!code.is_optimized() ||
@ -337,7 +337,7 @@ CodePtr StackFrame::LookupDartCode() const {
CodePtr StackFrame::GetCodeObject() const {
#if defined(DART_PRECOMPILED_RUNTIME)
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
NoSafepointScope no_safepoint;
CodePtr code = ReversePc::Lookup(isolate_group(), pc(),
/*is_return_address=*/true);

View file

@ -195,20 +195,18 @@ CodePtr StubCode::GetAllocationStubForClass(const Class& cls) {
Precompiler* precompiler = Precompiler::Instance();
compiler::ObjectPoolBuilder* wrapper =
FLAG_use_bare_instructions && precompiler != NULL
? precompiler->global_object_pool_builder()
: &object_pool_builder;
precompiler != NULL ? precompiler->global_object_pool_builder()
: &object_pool_builder;
const auto pool_attachment =
FLAG_precompiled_mode && FLAG_use_bare_instructions
? Code::PoolAttachment::kNotAttachPool
: Code::PoolAttachment::kAttachPool;
const auto pool_attachment = FLAG_precompiled_mode
? Code::PoolAttachment::kNotAttachPool
: Code::PoolAttachment::kAttachPool;
auto zone = thread->zone();
auto object_store = thread->isolate_group()->object_store();
auto& allocate_object_stub = Code::ZoneHandle(zone);
auto& allocate_object_parametrized_stub = Code::ZoneHandle(zone);
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
allocate_object_stub = object_store->allocate_object_stub();
allocate_object_parametrized_stub =
object_store->allocate_object_parametrized_stub();

View file

@ -255,7 +255,7 @@ CodePtr TypeTestingStubGenerator::BuildCodeForType(const Type& type) {
ASSERT(!type_class.IsNull());
auto& slow_tts_stub = Code::ZoneHandle(zone);
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
if (FLAG_precompiled_mode) {
slow_tts_stub = thread->isolate_group()->object_store()->slow_tts_stub();
}
@ -273,9 +273,8 @@ CodePtr TypeTestingStubGenerator::BuildCodeForType(const Type& type) {
const char* name = namer_.StubNameForType(type);
const auto pool_attachment =
FLAG_use_bare_instructions
? Code::PoolAttachment::kNotAttachPool
: Code::PoolAttachment::kAttachPool;
FLAG_precompiled_mode ? Code::PoolAttachment::kNotAttachPool
: Code::PoolAttachment::kAttachPool;
Code& code = Code::Handle(thread->zone());
auto install_code_fun = [&]() {

View file

@ -11,7 +11,6 @@
// VMOptions=--use-slow-path --stacktrace-every=100
// VMOptions=--use-slow-path --write-protect-code --no-dual-map-code
// VMOptions=--use-slow-path --write-protect-code --no-dual-map-code --stacktrace-every=100
// VMOptions=--use-bare-instructions=false
// VMOptions=--dwarf_stack_traces --no-retain_function_objects --no-retain_code_objects
// SharedObjects=ffi_test_functions

View file

@ -13,7 +13,6 @@
// VMOptions=--use-slow-path --stacktrace-every=100
// VMOptions=--use-slow-path --write-protect-code --no-dual-map-code
// VMOptions=--use-slow-path --write-protect-code --no-dual-map-code --stacktrace-every=100
// VMOptions=--use-bare-instructions=false
// VMOptions=--dwarf_stack_traces --no-retain_function_objects --no-retain_code_objects
// SharedObjects=ffi_test_functions

View file

@ -2,9 +2,6 @@
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
// VMOptions=--use_bare_instructions=false
// VMOptions=--use_bare_instructions=true
import "package:expect/expect.dart";
import "split_constants_canonicalization_a.dart" deferred as a;
import "split_constants_canonicalization_b.dart" deferred as b;

View file

@ -4,9 +4,6 @@
// @dart = 2.9
// VMOptions=--use_bare_instructions=false
// VMOptions=--use_bare_instructions=true
import "package:expect/expect.dart";
import "split_constants_canonicalization_a.dart" deferred as a;
import "split_constants_canonicalization_b.dart" deferred as b;

View file

@ -826,21 +826,6 @@
"vm-options": []
}
},
"dartkp-no-bare-(linux|mac|win)-(debug|product|release)-(x64|x64c)": {
"options": {
"vm-options": [
"--no-use-bare-instructions"
]
}
},
"dartkp-no-bare-(linux|mac|win)-(debug|product|release)-(simarm|simarm64|simarm64c)": {
"options": {
"vm-options": [
"--no-use-bare-instructions"
],
"use-elf": true
}
},
"dartk-(linux|mac|win)-(debug|product|release)-(ia32|x64|x64c)": {},
"dartk-fuchsia-(debug|product|release)-(x64|x64c)": {},
"dartk-linux-debug-(ia32|x64)-canary": {
@ -1546,36 +1531,6 @@
}
]
},
{
"builders": [
"vm-kernel-precomp-bare-linux-release-x64",
"vm-kernel-precomp-bare-linux-release-x64c",
"vm-kernel-precomp-bare-linux-release-simarm",
"vm-kernel-precomp-bare-linux-release-simarm64",
"vm-kernel-precomp-bare-linux-release-simarm64c"
],
"meta": {
"description": "This configuration is used by the vm kernel precomp builders using bare instructions."
},
"steps": [
{
"name": "build dart",
"script": "tools/build.py",
"arguments": [
"runtime",
"dart_precompiled_runtime"
]
},
{
"name": "vm tests",
"arguments": [
"-ndartkp-no-bare-${system}-${mode}-${arch}"
],
"fileset": "vm-kernel",
"shards": 10
}
]
},
{
"builders": [
"vm-kernel-precomp-obfuscate-linux-release-x64",