mirror of
https://github.com/dart-lang/sdk
synced 2024-10-01 18:49:24 +00:00
[vm, compiler] Allow targeting TSAN or MSAN by passing a flag to gen_snapshot.
Add check that the snapshot and runtime agree on whether to use MSAN. We already have this check for TSAN. TEST=vm/dart/sanitizer_compatibility_test Bug: https://github.com/dart-lang/sdk/issues/55637 Bug: https://github.com/dart-lang/sdk/issues/55638 Change-Id: I320e6f55cd59209ce6e58a82ac205a87c8a60a84 Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/365487 Reviewed-by: Daco Harkes <dacoharkes@google.com> Commit-Queue: Ryan Macnak <rmacnak@google.com> Reviewed-by: Ivan Inozemtsev <iinozemtsev@google.com> Reviewed-by: Siva Annamalai <asiva@google.com>
This commit is contained in:
parent
49e495d48c
commit
8c1de038e0
|
@ -89,6 +89,8 @@ config("dart_precompiler_config") {
|
|||
# the same mode (TSAN or non-TSAN).
|
||||
if (is_tsan) {
|
||||
defines += [ "TARGET_USES_THREAD_SANITIZER" ]
|
||||
} else if (is_msan) {
|
||||
defines += [ "TARGET_USES_MEMORY_SANITIZER" ]
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -35,20 +35,4 @@ extern "C" void __tsan_release(void* addr);
|
|||
#define DO_IF_NOT_TSAN(CODE) CODE
|
||||
#endif
|
||||
|
||||
// By default TSAN is enabled if this code is compiled under TSAN.
|
||||
//
|
||||
// Though in our AOT compiler we don't know whether the target AOT runtime will
|
||||
// use TSAN or not, so we'll rely on the build rules telling us that
|
||||
// information.
|
||||
#if defined(USING_THREAD_SANITIZER) && !defined(DART_PRECOMPILER) && \
|
||||
!defined(TARGET_USES_THREAD_SANITIZER)
|
||||
#define TARGET_USES_THREAD_SANITIZER
|
||||
#endif
|
||||
|
||||
#if defined(TARGET_USES_THREAD_SANITIZER)
|
||||
constexpr bool kTargetUsesThreadSanitizer = true;
|
||||
#else
|
||||
constexpr bool kTargetUsesThreadSanitizer = false;
|
||||
#endif
|
||||
|
||||
#endif // RUNTIME_PLATFORM_THREAD_SANITIZER_H_
|
||||
|
|
91
runtime/tests/vm/dart/sanitizer_compatibility_test.dart
Normal file
91
runtime/tests/vm/dart/sanitizer_compatibility_test.dart
Normal file
|
@ -0,0 +1,91 @@
|
|||
// Copyright (c) 2024, the Dart project authors. Please see the AUTHORS file
|
||||
// for details. All rights reserved. Use of this source code is governed by a
|
||||
// BSD-style license that can be found in the LICENSE file.
|
||||
|
||||
// Check for a proper error when a snapshot and a runtime don't agree on which
|
||||
// sanitizer they are using.
|
||||
|
||||
import "dart:io";
|
||||
|
||||
import "package:expect/expect.dart";
|
||||
|
||||
import "use_flag_test_helper.dart";
|
||||
|
||||
String find(String haystack, List<String> needles) {
|
||||
for (String needle in needles) {
|
||||
if (haystack.contains(needle)) {
|
||||
return needle;
|
||||
}
|
||||
}
|
||||
throw "None of ${needles.join(' ')}";
|
||||
}
|
||||
|
||||
void checkExists(String path) {
|
||||
if (!File(path).existsSync()) {
|
||||
throw "$path does not exist";
|
||||
}
|
||||
}
|
||||
|
||||
main() async {
|
||||
var sanitizer = find(Platform.executable, ["MSAN", "TSAN"]);
|
||||
var mode = find(Platform.executable, ["Debug", "Release", "Product"]);
|
||||
var arch = find(Platform.executable, ["X64", "ARM64", "RISCV64"]);
|
||||
var out = find(Platform.executable, ["out", "xcodebuild"]);
|
||||
var targetFlag = {
|
||||
"MSAN": "--target_memory_sanitizer",
|
||||
"TSAN": "--target_thread_sanitizer"
|
||||
}[sanitizer]!;
|
||||
|
||||
var nonePlatform = "$out/$mode$arch/vm_platform_strong.dill";
|
||||
var noneGenSnapshot = "$out/$mode$arch/gen_snapshot";
|
||||
var noneJitRuntime = "$out/$mode$arch/dart";
|
||||
var noneAotRuntime = "$out/$mode$arch/dart_precompiled_runtime";
|
||||
var sanitizerGenSnapshot = "$out/$mode$sanitizer$arch/gen_snapshot";
|
||||
var sanitizerAotRuntime =
|
||||
"$out/$mode$sanitizer$arch/dart_precompiled_runtime";
|
||||
|
||||
checkExists(noneGenSnapshot);
|
||||
checkExists(noneJitRuntime);
|
||||
checkExists(noneAotRuntime);
|
||||
checkExists(sanitizerGenSnapshot);
|
||||
checkExists(sanitizerAotRuntime);
|
||||
|
||||
await withTempDir('sanitizer-compatibility-test', (String tempDir) async {
|
||||
var aotDill = "$tempDir/aot.dill";
|
||||
var noneElf = "$tempDir/none.elf";
|
||||
var sanitizerElf = "$tempDir/$sanitizer.elf";
|
||||
var sanitizerElf2 = "$tempDir/${sanitizer}2.elf";
|
||||
|
||||
await run(noneJitRuntime, [
|
||||
"pkg/vm/bin/gen_kernel.dart",
|
||||
"--platform",
|
||||
nonePlatform,
|
||||
"--aot",
|
||||
"-o",
|
||||
aotDill,
|
||||
"tests/language/unsorted/first_test.dart"
|
||||
]);
|
||||
|
||||
await run(noneGenSnapshot,
|
||||
["--snapshot-kind=app-aot-elf", "--elf=$noneElf", aotDill]);
|
||||
await run(sanitizerGenSnapshot,
|
||||
["--snapshot-kind=app-aot-elf", "--elf=$sanitizerElf", aotDill]);
|
||||
await run(noneGenSnapshot, [
|
||||
"--snapshot-kind=app-aot-elf",
|
||||
"--elf=$sanitizerElf2",
|
||||
targetFlag,
|
||||
aotDill
|
||||
]);
|
||||
|
||||
await run(noneAotRuntime, [noneElf]);
|
||||
await run(sanitizerAotRuntime, [sanitizerElf]);
|
||||
await run(sanitizerAotRuntime, [sanitizerElf2]);
|
||||
|
||||
var errorLines = await runError(noneAotRuntime, [sanitizerElf]);
|
||||
Expect.contains("Snapshot not compatible", errorLines[0]);
|
||||
errorLines = await runError(noneAotRuntime, [sanitizerElf2]);
|
||||
Expect.contains("Snapshot not compatible", errorLines[0]);
|
||||
errorLines = await runError(sanitizerAotRuntime, [noneElf]);
|
||||
Expect.contains("Snapshot not compatible", errorLines[0]);
|
||||
});
|
||||
}
|
|
@ -359,6 +359,9 @@ dart/finalizer/finalizer_isolate_groups_run_gc_test: SkipByDesign # uses spawnUr
|
|||
dart/isolates/send_object_to_spawn_uri_isolate_test: SkipByDesign # uses spawnUri
|
||||
dart/issue32950_test: SkipByDesign # uses spawnUri.
|
||||
|
||||
[ $runtime != dart_precompiled || $sanitizer != msan && $sanitizer != tsan ]
|
||||
dart/sanitizer_compatibility_test: SkipByDesign
|
||||
|
||||
[ $system != macos || $simulator ]
|
||||
dart/thread_priority_macos_test: SkipByDesign
|
||||
|
||||
|
|
|
@ -286,7 +286,6 @@ void Assembler::Align(intptr_t alignment, intptr_t offset) {
|
|||
ASSERT(((offset + buffer_.GetPosition()) & (alignment - 1)) == 0);
|
||||
}
|
||||
|
||||
#if defined(TARGET_USES_THREAD_SANITIZER)
|
||||
void Assembler::TsanLoadAcquire(Register addr) {
|
||||
LeafRuntimeScope rt(this, /*frame_size=*/0, /*preserve_registers=*/true);
|
||||
MoveRegister(R0, addr);
|
||||
|
@ -298,7 +297,6 @@ void Assembler::TsanStoreRelease(Register addr) {
|
|||
MoveRegister(R0, addr);
|
||||
rt.Call(kTsanStoreReleaseRuntimeEntry, /*argument_count=*/1);
|
||||
}
|
||||
#endif
|
||||
|
||||
static int CountLeadingZeros(uint64_t value, int width) {
|
||||
if (width == 64) return Utils::CountLeadingZeros64(value);
|
||||
|
@ -1542,7 +1540,7 @@ void Assembler::EnterFullSafepoint(Register state) {
|
|||
ASSERT(addr != state);
|
||||
|
||||
Label slow_path, done, retry;
|
||||
if (FLAG_use_slow_path || kTargetUsesThreadSanitizer) {
|
||||
if (FLAG_use_slow_path || FLAG_target_thread_sanitizer) {
|
||||
b(&slow_path);
|
||||
}
|
||||
|
||||
|
@ -1557,7 +1555,7 @@ void Assembler::EnterFullSafepoint(Register state) {
|
|||
stxr(TMP, state, addr);
|
||||
cbz(&done, TMP); // 0 means stxr was successful.
|
||||
|
||||
if (!FLAG_use_slow_path && !kTargetUsesThreadSanitizer) {
|
||||
if (!FLAG_use_slow_path && !FLAG_target_thread_sanitizer) {
|
||||
b(&retry);
|
||||
}
|
||||
|
||||
|
@ -1601,7 +1599,7 @@ void Assembler::ExitFullSafepoint(Register state,
|
|||
ASSERT(addr != state);
|
||||
|
||||
Label slow_path, done, retry;
|
||||
if (FLAG_use_slow_path || kTargetUsesThreadSanitizer) {
|
||||
if (FLAG_use_slow_path || FLAG_target_thread_sanitizer) {
|
||||
b(&slow_path);
|
||||
}
|
||||
|
||||
|
@ -1616,7 +1614,7 @@ void Assembler::ExitFullSafepoint(Register state,
|
|||
stxr(TMP, state, addr);
|
||||
cbz(&done, TMP); // 0 means stxr was successful.
|
||||
|
||||
if (!FLAG_use_slow_path && !kTargetUsesThreadSanitizer) {
|
||||
if (!FLAG_use_slow_path && !FLAG_target_thread_sanitizer) {
|
||||
b(&retry);
|
||||
}
|
||||
|
||||
|
|
|
@ -522,10 +522,8 @@ class Assembler : public AssemblerBase {
|
|||
StoreToOffset(src, base, offset, kEightBytes);
|
||||
}
|
||||
|
||||
#if defined(TARGET_USES_THREAD_SANITIZER)
|
||||
void TsanLoadAcquire(Register addr);
|
||||
void TsanStoreRelease(Register addr);
|
||||
#endif
|
||||
|
||||
void LoadAcquire(Register dst,
|
||||
const Address& address,
|
||||
|
@ -538,9 +536,9 @@ class Assembler : public AssemblerBase {
|
|||
src = TMP2;
|
||||
}
|
||||
ldar(dst, src, size);
|
||||
#if defined(TARGET_USES_THREAD_SANITIZER)
|
||||
TsanLoadAcquire(src);
|
||||
#endif
|
||||
if (FLAG_target_thread_sanitizer) {
|
||||
TsanLoadAcquire(src);
|
||||
}
|
||||
}
|
||||
|
||||
#if defined(DART_COMPRESSED_POINTERS)
|
||||
|
@ -561,9 +559,9 @@ class Assembler : public AssemblerBase {
|
|||
dst = TMP2;
|
||||
}
|
||||
stlr(src, dst, size);
|
||||
#if defined(TARGET_USES_THREAD_SANITIZER)
|
||||
TsanStoreRelease(dst);
|
||||
#endif
|
||||
if (FLAG_target_thread_sanitizer) {
|
||||
TsanStoreRelease(dst);
|
||||
}
|
||||
}
|
||||
|
||||
void CompareWithMemoryValue(Register value,
|
||||
|
|
|
@ -7559,7 +7559,7 @@ ASSEMBLER_TEST_RUN(CompareImmediate32Negative, test) {
|
|||
}
|
||||
|
||||
// can't call (tsan) runtime methods
|
||||
#if !defined(TARGET_USES_THREAD_SANITIZER)
|
||||
#if !defined(USING_THREAD_SANITIZER)
|
||||
|
||||
ASSEMBLER_TEST_GENERATE(StoreReleaseLoadAcquire, assembler) {
|
||||
__ SetupDartSP();
|
||||
|
@ -7636,7 +7636,7 @@ ASSEMBLER_TEST_RUN(StoreReleaseLoadAcquire1024, test) {
|
|||
"mov csp, sp\n"
|
||||
"ret\n");
|
||||
}
|
||||
#endif // !defined(TARGET_USES_THREAD_SANITIZER)
|
||||
#endif // !defined(USING_THREAD_SANITIZER)
|
||||
|
||||
static void RangeCheck(Assembler* assembler, Register value, Register temp) {
|
||||
const Register return_reg = CallingConventions::kReturnReg;
|
||||
|
|
|
@ -2229,11 +2229,11 @@ void Assembler::StoreObjectIntoObjectNoBarrier(Register object,
|
|||
// We don't run TSAN on 32 bit systems.
|
||||
// Don't call StoreRelease here because we would have to load the immediate
|
||||
// into a temp register which causes spilling.
|
||||
#if defined(TARGET_USES_THREAD_SANITIZER)
|
||||
if (memory_order == kRelease) {
|
||||
UNIMPLEMENTED();
|
||||
if (FLAG_target_thread_sanitizer) {
|
||||
if (memory_order == kRelease) {
|
||||
UNIMPLEMENTED();
|
||||
}
|
||||
}
|
||||
#endif
|
||||
if (target::CanEmbedAsRawPointerInGeneratedCode(value)) {
|
||||
Immediate imm_value(target::ToRawPointer(value));
|
||||
movl(dest, imm_value);
|
||||
|
|
|
@ -674,9 +674,9 @@ class Assembler : public AssemblerBase {
|
|||
// On intel loads have load-acquire behavior (i.e. loads are not re-ordered
|
||||
// with other loads).
|
||||
Load(dst, address, size);
|
||||
#if defined(TARGET_USES_THREAD_SANITIZER)
|
||||
#error No support for TSAN on IA32.
|
||||
#endif
|
||||
if (FLAG_target_thread_sanitizer) {
|
||||
FATAL("No support for TSAN on IA32.");
|
||||
}
|
||||
}
|
||||
void StoreRelease(Register src,
|
||||
const Address& address,
|
||||
|
@ -684,9 +684,9 @@ class Assembler : public AssemblerBase {
|
|||
// On intel stores have store-release behavior (i.e. stores are not
|
||||
// re-ordered with other stores).
|
||||
Store(src, address, size);
|
||||
#if defined(TARGET_USES_THREAD_SANITIZER)
|
||||
#error No support for TSAN on IA32.
|
||||
#endif
|
||||
if (FLAG_target_thread_sanitizer) {
|
||||
FATAL("No support for TSAN on IA32.");
|
||||
}
|
||||
}
|
||||
|
||||
void CompareWithMemoryValue(Register value,
|
||||
|
|
|
@ -2567,7 +2567,6 @@ void Assembler::Jump(const Address& address) {
|
|||
jr(TMP2);
|
||||
}
|
||||
|
||||
#if defined(TARGET_USES_THREAD_SANITIZER)
|
||||
void Assembler::TsanLoadAcquire(Register addr) {
|
||||
LeafRuntimeScope rt(this, /*frame_size=*/0, /*preserve_registers=*/true);
|
||||
MoveRegister(A0, addr);
|
||||
|
@ -2578,7 +2577,6 @@ void Assembler::TsanStoreRelease(Register addr) {
|
|||
MoveRegister(A0, addr);
|
||||
rt.Call(kTsanStoreReleaseRuntimeEntry, /*argument_count=*/1);
|
||||
}
|
||||
#endif
|
||||
|
||||
void Assembler::LoadAcquire(Register dst,
|
||||
const Address& address,
|
||||
|
@ -2587,14 +2585,14 @@ void Assembler::LoadAcquire(Register dst,
|
|||
Load(dst, address, size);
|
||||
fence(HartEffects::kRead, HartEffects::kMemory);
|
||||
|
||||
#if defined(TARGET_USES_THREAD_SANITIZER)
|
||||
if (address.offset() == 0) {
|
||||
TsanLoadAcquire(address.base());
|
||||
} else {
|
||||
AddImmediate(TMP2, address.base(), address.offset());
|
||||
TsanLoadAcquire(TMP2);
|
||||
if (FLAG_target_thread_sanitizer) {
|
||||
if (address.offset() == 0) {
|
||||
TsanLoadAcquire(address.base());
|
||||
} else {
|
||||
AddImmediate(TMP2, address.base(), address.offset());
|
||||
TsanLoadAcquire(TMP2);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void Assembler::StoreRelease(Register src,
|
||||
|
@ -3773,7 +3771,7 @@ void Assembler::EnterFullSafepoint(Register state) {
|
|||
ASSERT(addr != state);
|
||||
|
||||
Label slow_path, done, retry;
|
||||
if (FLAG_use_slow_path || kTargetUsesThreadSanitizer) {
|
||||
if (FLAG_use_slow_path || FLAG_target_thread_sanitizer) {
|
||||
j(&slow_path, Assembler::kNearJump);
|
||||
}
|
||||
|
||||
|
@ -3787,7 +3785,7 @@ void Assembler::EnterFullSafepoint(Register state) {
|
|||
sc(state, state, Address(addr, 0));
|
||||
beqz(state, &done, Assembler::kNearJump); // 0 means sc was successful.
|
||||
|
||||
if (!FLAG_use_slow_path && !kTargetUsesThreadSanitizer) {
|
||||
if (!FLAG_use_slow_path && !FLAG_target_thread_sanitizer) {
|
||||
j(&retry, Assembler::kNearJump);
|
||||
}
|
||||
|
||||
|
@ -3809,7 +3807,7 @@ void Assembler::ExitFullSafepoint(Register state,
|
|||
ASSERT(addr != state);
|
||||
|
||||
Label slow_path, done, retry;
|
||||
if (FLAG_use_slow_path || kTargetUsesThreadSanitizer) {
|
||||
if (FLAG_use_slow_path || FLAG_target_thread_sanitizer) {
|
||||
j(&slow_path, Assembler::kNearJump);
|
||||
}
|
||||
|
||||
|
@ -3823,7 +3821,7 @@ void Assembler::ExitFullSafepoint(Register state,
|
|||
sc(state, state, Address(addr, 0));
|
||||
beqz(state, &done, Assembler::kNearJump); // 0 means sc was successful.
|
||||
|
||||
if (!FLAG_use_slow_path && !kTargetUsesThreadSanitizer) {
|
||||
if (!FLAG_use_slow_path && !FLAG_target_thread_sanitizer) {
|
||||
j(&retry, Assembler::kNearJump);
|
||||
}
|
||||
|
||||
|
|
|
@ -873,10 +873,8 @@ class Assembler : public MicroAssembler {
|
|||
StoreToOffset(src, base, offset, kWordBytes);
|
||||
}
|
||||
|
||||
#if defined(TARGET_USES_THREAD_SANITIZER)
|
||||
void TsanLoadAcquire(Register addr);
|
||||
void TsanStoreRelease(Register addr);
|
||||
#endif
|
||||
|
||||
void LoadAcquire(Register dst,
|
||||
const Address& address,
|
||||
|
|
|
@ -148,7 +148,7 @@ void Assembler::EnterFullSafepoint() {
|
|||
// For TSAN, we always go to the runtime so TSAN is aware of the release
|
||||
// semantics of entering the safepoint.
|
||||
Label done, slow_path;
|
||||
if (FLAG_use_slow_path || kTargetUsesThreadSanitizer) {
|
||||
if (FLAG_use_slow_path || FLAG_target_thread_sanitizer) {
|
||||
jmp(&slow_path);
|
||||
}
|
||||
|
||||
|
@ -162,7 +162,7 @@ void Assembler::EnterFullSafepoint() {
|
|||
popq(RAX);
|
||||
cmpq(TMP, Immediate(target::Thread::full_safepoint_state_unacquired()));
|
||||
|
||||
if (!FLAG_use_slow_path && !kTargetUsesThreadSanitizer) {
|
||||
if (!FLAG_use_slow_path && !FLAG_target_thread_sanitizer) {
|
||||
j(EQUAL, &done);
|
||||
}
|
||||
|
||||
|
@ -205,7 +205,7 @@ void Assembler::ExitFullSafepoint(bool ignore_unwind_in_progress) {
|
|||
// For TSAN, we always go to the runtime so TSAN is aware of the acquire
|
||||
// semantics of leaving the safepoint.
|
||||
Label done, slow_path;
|
||||
if (FLAG_use_slow_path || kTargetUsesThreadSanitizer) {
|
||||
if (FLAG_use_slow_path || FLAG_target_thread_sanitizer) {
|
||||
jmp(&slow_path);
|
||||
}
|
||||
|
||||
|
@ -221,7 +221,7 @@ void Assembler::ExitFullSafepoint(bool ignore_unwind_in_progress) {
|
|||
popq(RAX);
|
||||
cmpq(TMP, Immediate(target::Thread::full_safepoint_state_acquired()));
|
||||
|
||||
if (!FLAG_use_slow_path && !kTargetUsesThreadSanitizer) {
|
||||
if (!FLAG_use_slow_path && !FLAG_target_thread_sanitizer) {
|
||||
j(EQUAL, &done);
|
||||
}
|
||||
|
||||
|
@ -2023,7 +2023,6 @@ LeafRuntimeScope::~LeafRuntimeScope() {
|
|||
__ LeaveFrame();
|
||||
}
|
||||
|
||||
#if defined(TARGET_USES_THREAD_SANITIZER)
|
||||
void Assembler::TsanLoadAcquire(Address addr) {
|
||||
LeafRuntimeScope rt(this, /*frame_size=*/0, /*preserve_registers=*/true);
|
||||
leaq(CallingConventions::kArg1Reg, addr);
|
||||
|
@ -2035,7 +2034,6 @@ void Assembler::TsanStoreRelease(Address addr) {
|
|||
leaq(CallingConventions::kArg1Reg, addr);
|
||||
rt.Call(kTsanStoreReleaseRuntimeEntry, /*argument_count=*/1);
|
||||
}
|
||||
#endif
|
||||
|
||||
void Assembler::RestoreCodePointer() {
|
||||
movq(CODE_REG,
|
||||
|
|
|
@ -1102,10 +1102,8 @@ class Assembler : public AssemblerBase {
|
|||
}
|
||||
}
|
||||
|
||||
#if defined(TARGET_USES_THREAD_SANITIZER)
|
||||
void TsanLoadAcquire(Address addr);
|
||||
void TsanStoreRelease(Address addr);
|
||||
#endif
|
||||
|
||||
void LoadAcquire(Register dst,
|
||||
const Address& address,
|
||||
|
@ -1113,18 +1111,18 @@ class Assembler : public AssemblerBase {
|
|||
// On intel loads have load-acquire behavior (i.e. loads are not re-ordered
|
||||
// with other loads).
|
||||
Load(dst, address, size);
|
||||
#if defined(TARGET_USES_THREAD_SANITIZER)
|
||||
TsanLoadAcquire(address);
|
||||
#endif
|
||||
if (FLAG_target_thread_sanitizer) {
|
||||
TsanLoadAcquire(address);
|
||||
}
|
||||
}
|
||||
#if defined(DART_COMPRESSED_POINTERS)
|
||||
void LoadAcquireCompressed(Register dst, const Address& address) override {
|
||||
// On intel loads have load-acquire behavior (i.e. loads are not re-ordered
|
||||
// with other loads).
|
||||
LoadCompressed(dst, address);
|
||||
#if defined(TARGET_USES_THREAD_SANITIZER)
|
||||
TsanLoadAcquire(address);
|
||||
#endif
|
||||
if (FLAG_target_thread_sanitizer) {
|
||||
TsanLoadAcquire(address);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
void StoreRelease(Register src,
|
||||
|
@ -1133,9 +1131,9 @@ class Assembler : public AssemblerBase {
|
|||
// On intel stores have store-release behavior (i.e. stores are not
|
||||
// re-ordered with other stores).
|
||||
Store(src, address, size);
|
||||
#if defined(TARGET_USES_THREAD_SANITIZER)
|
||||
TsanStoreRelease(address);
|
||||
#endif
|
||||
if (FLAG_target_thread_sanitizer) {
|
||||
TsanStoreRelease(address);
|
||||
}
|
||||
}
|
||||
|
||||
void CompareWithMemoryValue(Register value,
|
||||
|
|
|
@ -6227,11 +6227,11 @@ IMMEDIATE_TEST(AddrImmRAXByte,
|
|||
__ popq(RAX))
|
||||
|
||||
ASSEMBLER_TEST_GENERATE(StoreReleaseLoadAcquire, assembler) {
|
||||
#if defined(TARGET_USES_THREAD_SANITIZER)
|
||||
// On TSAN builds StoreRelease/LoadAcquire will do a runtime
|
||||
// call to tell TSAN about our action.
|
||||
__ MoveRegister(THR, CallingConventions::kArg2Reg);
|
||||
#endif
|
||||
if (FLAG_target_thread_sanitizer) {
|
||||
// On TSAN builds StoreRelease/LoadAcquire will do a runtime
|
||||
// call to tell TSAN about our action.
|
||||
__ MoveRegister(THR, CallingConventions::kArg2Reg);
|
||||
}
|
||||
|
||||
__ pushq(RCX);
|
||||
__ xorq(RCX, RCX);
|
||||
|
@ -6306,11 +6306,11 @@ ASSEMBLER_TEST_RUN(StoreReleaseLoadAcquire, test) {
|
|||
}
|
||||
|
||||
ASSEMBLER_TEST_GENERATE(StoreReleaseLoadAcquire1024, assembler) {
|
||||
#if defined(TARGET_USES_THREAD_SANITIZER)
|
||||
// On TSAN builds StoreRelease/LoadAcquire will do a runtime
|
||||
// call to tell TSAN about our action.
|
||||
__ MoveRegister(THR, CallingConventions::kArg2Reg);
|
||||
#endif
|
||||
if (FLAG_target_thread_sanitizer) {
|
||||
// On TSAN builds StoreRelease/LoadAcquire will do a runtime
|
||||
// call to tell TSAN about our action.
|
||||
__ MoveRegister(THR, CallingConventions::kArg2Reg);
|
||||
}
|
||||
|
||||
__ pushq(RCX);
|
||||
__ xorq(RCX, RCX);
|
||||
|
@ -6327,19 +6327,19 @@ ASSEMBLER_TEST_GENERATE(StoreReleaseLoadAcquire1024, assembler) {
|
|||
ASSEMBLER_TEST_RUN(StoreReleaseLoadAcquire1024, test) {
|
||||
const intptr_t res = test->InvokeWithCodeAndThread<intptr_t>(123);
|
||||
EXPECT_EQ(123, res);
|
||||
#if !defined(TARGET_USES_THREAD_SANITIZER)
|
||||
EXPECT_DISASSEMBLY_NOT_WINDOWS(
|
||||
"push rcx\n"
|
||||
"xorq rcx,rcx\n"
|
||||
"push rcx\n"
|
||||
"subq rsp,0x400\n"
|
||||
"movq [rsp+0x400],rdx\n"
|
||||
"movq rax,[rsp+0x400]\n"
|
||||
"addq rsp,0x400\n"
|
||||
"pop rcx\n"
|
||||
"pop rcx\n"
|
||||
"ret\n");
|
||||
#endif
|
||||
if (!FLAG_target_thread_sanitizer) {
|
||||
EXPECT_DISASSEMBLY_NOT_WINDOWS(
|
||||
"push rcx\n"
|
||||
"xorq rcx,rcx\n"
|
||||
"push rcx\n"
|
||||
"subq rsp,0x400\n"
|
||||
"movq [rsp+0x400],rdx\n"
|
||||
"movq rax,[rsp+0x400]\n"
|
||||
"addq rsp,0x400\n"
|
||||
"pop rcx\n"
|
||||
"pop rcx\n"
|
||||
"ret\n");
|
||||
}
|
||||
}
|
||||
|
||||
ASSEMBLER_TEST_GENERATE(MoveByteRunTest, assembler) {
|
||||
|
|
|
@ -7065,11 +7065,9 @@ void MemoryCopyInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
|||
// additionally verify here that there is an actual overlap. Instead, only
|
||||
// do that when we need to calculate the end address of the regions in
|
||||
// the loop case.
|
||||
#if defined(USING_MEMORY_SANITIZER)
|
||||
const auto jump_distance = compiler::Assembler::kFarJump;
|
||||
#else
|
||||
const auto jump_distance = compiler::Assembler::kNearJump;
|
||||
#endif
|
||||
const auto jump_distance = FLAG_target_memory_sanitizer
|
||||
? compiler::Assembler::kFarJump
|
||||
: compiler::Assembler::kNearJump;
|
||||
__ BranchIf(UNSIGNED_LESS_EQUAL, ©_forwards, jump_distance);
|
||||
__ Comment("Copying backwards");
|
||||
if (constant_length) {
|
||||
|
@ -7169,13 +7167,15 @@ void MemoryCopyInstr::EmitUnrolledCopy(FlowGraphCompiler* compiler,
|
|||
}
|
||||
}
|
||||
|
||||
#if defined(USING_MEMORY_SANITIZER) && defined(TARGET_ARCH_X64)
|
||||
RegisterSet kVolatileRegisterSet(CallingConventions::kVolatileCpuRegisters,
|
||||
CallingConventions::kVolatileXmmRegisters);
|
||||
__ PushRegisters(kVolatileRegisterSet);
|
||||
__ MsanUnpoison(dest_reg, num_bytes);
|
||||
__ PopRegisters(kVolatileRegisterSet);
|
||||
if (FLAG_target_memory_sanitizer) {
|
||||
#if defined(TARGET_ARCH_X64)
|
||||
RegisterSet kVolatileRegisterSet(CallingConventions::kVolatileCpuRegisters,
|
||||
CallingConventions::kVolatileXmmRegisters);
|
||||
__ PushRegisters(kVolatileRegisterSet);
|
||||
__ MsanUnpoison(dest_reg, num_bytes);
|
||||
__ PopRegisters(kVolatileRegisterSet);
|
||||
#endif
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
|
|
|
@ -1788,9 +1788,9 @@ void FfiCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
|||
|
||||
// Reserve space for the arguments that go on the stack (if any), then align.
|
||||
__ ReserveAlignedFrameSpace(marshaller_.RequiredStackSpaceInBytes());
|
||||
#if defined(USING_MEMORY_SANITIZER)
|
||||
UNIMPLEMENTED();
|
||||
#endif
|
||||
if (FLAG_target_memory_sanitizer) {
|
||||
UNIMPLEMENTED();
|
||||
}
|
||||
|
||||
EmitParamMoves(compiler, is_leaf_ ? FPREG : saved_fp_or_sp, temp1, TMP);
|
||||
|
||||
|
|
|
@ -252,9 +252,9 @@ void MemoryCopyInstr::EmitLoopCopy(FlowGraphCompiler* compiler,
|
|||
const bool reversed = copy_forwards != nullptr;
|
||||
const intptr_t shift = Utils::ShiftForPowerOfTwo(element_size_) -
|
||||
(unboxed_inputs() ? 0 : kSmiTagShift);
|
||||
#if defined(USING_MEMORY_SANITIZER)
|
||||
__ PushPair(length_reg, dest_reg);
|
||||
#endif
|
||||
if (FLAG_target_memory_sanitizer) {
|
||||
__ PushPair(length_reg, dest_reg);
|
||||
}
|
||||
if (reversed) {
|
||||
// Verify that the overlap actually exists by checking to see if
|
||||
// dest_start < src_end.
|
||||
|
@ -298,18 +298,18 @@ void MemoryCopyInstr::EmitLoopCopy(FlowGraphCompiler* compiler,
|
|||
compiler::kObjectBytes);
|
||||
__ b(&loop, NOT_ZERO);
|
||||
|
||||
#if defined(USING_MEMORY_SANITIZER)
|
||||
__ PopPair(length_reg, dest_reg);
|
||||
if (!unboxed_inputs()) {
|
||||
__ ExtendNonNegativeSmi(length_reg);
|
||||
if (FLAG_target_memory_sanitizer) {
|
||||
__ PopPair(length_reg, dest_reg);
|
||||
if (!unboxed_inputs()) {
|
||||
__ ExtendNonNegativeSmi(length_reg);
|
||||
}
|
||||
if (shift < 0) {
|
||||
__ AsrImmediate(length_reg, length_reg, -shift);
|
||||
} else {
|
||||
__ LslImmediate(length_reg, length_reg, shift);
|
||||
}
|
||||
__ MsanUnpoison(dest_reg, length_reg);
|
||||
}
|
||||
if (shift < 0) {
|
||||
__ AsrImmediate(length_reg, length_reg, -shift);
|
||||
} else {
|
||||
__ LslImmediate(length_reg, length_reg, shift);
|
||||
}
|
||||
__ MsanUnpoison(dest_reg, length_reg);
|
||||
#endif
|
||||
}
|
||||
|
||||
void MemoryCopyInstr::EmitComputeStartPointer(FlowGraphCompiler* compiler,
|
||||
|
@ -1508,8 +1508,7 @@ void FfiCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
|||
// Reserve space for the arguments that go on the stack (if any), then align.
|
||||
intptr_t stack_space = marshaller_.RequiredStackSpaceInBytes();
|
||||
__ ReserveAlignedFrameSpace(stack_space);
|
||||
#if defined(USING_MEMORY_SANITIZER)
|
||||
{
|
||||
if (FLAG_target_memory_sanitizer) {
|
||||
RegisterSet kVolatileRegisterSet(kAbiVolatileCpuRegs & ~(1 << SP),
|
||||
kAbiVolatileFpuRegs);
|
||||
__ mov(temp1, SP);
|
||||
|
@ -1519,7 +1518,7 @@ void FfiCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
|||
// reserved for outgoing arguments and the spills which might have
|
||||
// been generated by the register allocator. Some of these spill slots
|
||||
// can be used as handles passed down to the runtime.
|
||||
__ sub(R1, is_leaf_ ? FPREG : saved_fp_or_sp, temp1);
|
||||
__ sub(R1, is_leaf_ ? FPREG : saved_fp_or_sp, compiler::Operand(temp1));
|
||||
__ MsanUnpoison(temp1, R1);
|
||||
|
||||
// Incoming Dart arguments to this trampoline are potentially used as local
|
||||
|
@ -1534,7 +1533,6 @@ void FfiCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
|||
|
||||
RESTORES_LR_FROM_FRAME(__ PopRegisters(kVolatileRegisterSet));
|
||||
}
|
||||
#endif
|
||||
|
||||
EmitParamMoves(compiler, is_leaf_ ? FPREG : saved_fp_or_sp, temp1, temp2);
|
||||
|
||||
|
@ -2282,20 +2280,20 @@ void StoreIndexedInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
|||
UNREACHABLE();
|
||||
}
|
||||
|
||||
#if defined(USING_MEMORY_SANITIZER)
|
||||
if (index.IsRegister()) {
|
||||
__ ComputeElementAddressForRegIndex(TMP, IsUntagged(), class_id(),
|
||||
index_scale(), index_unboxed_, array,
|
||||
index.reg());
|
||||
} else {
|
||||
__ ComputeElementAddressForIntIndex(TMP, IsUntagged(), class_id(),
|
||||
index_scale(), array,
|
||||
Smi::Cast(index.constant()).Value());
|
||||
if (FLAG_target_memory_sanitizer) {
|
||||
if (index.IsRegister()) {
|
||||
__ ComputeElementAddressForRegIndex(TMP, IsUntagged(), class_id(),
|
||||
index_scale(), index_unboxed_, array,
|
||||
index.reg());
|
||||
} else {
|
||||
__ ComputeElementAddressForIntIndex(TMP, IsUntagged(), class_id(),
|
||||
index_scale(), array,
|
||||
Smi::Cast(index.constant()).Value());
|
||||
}
|
||||
const intptr_t length_in_bytes = RepresentationUtils::ValueSize(
|
||||
RepresentationUtils::RepresentationOfArrayElement(class_id()));
|
||||
__ MsanUnpoison(TMP, length_in_bytes);
|
||||
}
|
||||
const intptr_t length_in_bytes = RepresentationUtils::ValueSize(
|
||||
RepresentationUtils::RepresentationOfArrayElement(class_id()));
|
||||
__ MsanUnpoison(TMP, length_in_bytes);
|
||||
#endif
|
||||
}
|
||||
|
||||
static void LoadValueCid(FlowGraphCompiler* compiler,
|
||||
|
|
|
@ -1241,9 +1241,9 @@ void FfiCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
|||
|
||||
// Reserve space for the arguments that go on the stack (if any), then align.
|
||||
__ ReserveAlignedFrameSpace(stack_required);
|
||||
#if defined(USING_MEMORY_SANITIZER)
|
||||
UNIMPLEMENTED();
|
||||
#endif
|
||||
if (FLAG_target_memory_sanitizer) {
|
||||
UNIMPLEMENTED();
|
||||
}
|
||||
|
||||
EmitParamMoves(compiler, is_leaf_ ? FPREG : saved_fp_or_sp, temp,
|
||||
locs()->temp(1).reg());
|
||||
|
|
|
@ -1653,8 +1653,7 @@ void FfiCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
|||
// Reserve space for the arguments that go on the stack (if any), then align.
|
||||
intptr_t stack_space = marshaller_.RequiredStackSpaceInBytes();
|
||||
__ ReserveAlignedFrameSpace(stack_space);
|
||||
#if defined(USING_MEMORY_SANITIZER)
|
||||
{
|
||||
if (FLAG_target_memory_sanitizer) {
|
||||
RegisterSet kVolatileRegisterSet(kAbiVolatileCpuRegs, kAbiVolatileFpuRegs);
|
||||
__ mv(temp1, SP);
|
||||
__ PushRegisters(kVolatileRegisterSet);
|
||||
|
@ -1679,7 +1678,6 @@ void FfiCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
|||
|
||||
__ PopRegisters(kVolatileRegisterSet);
|
||||
}
|
||||
#endif
|
||||
|
||||
EmitParamMoves(compiler, is_leaf_ ? FPREG : saved_fp_or_sp, temp1, temp2);
|
||||
|
||||
|
@ -2544,9 +2542,9 @@ void StoreIndexedInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
|||
UNREACHABLE();
|
||||
}
|
||||
|
||||
#if defined(USING_MEMORY_SANITIZER)
|
||||
UNIMPLEMENTED();
|
||||
#endif
|
||||
if (FLAG_target_memory_sanitizer) {
|
||||
UNIMPLEMENTED();
|
||||
}
|
||||
}
|
||||
|
||||
static void LoadValueCid(FlowGraphCompiler* compiler,
|
||||
|
|
|
@ -236,11 +236,9 @@ void MemoryCopyInstr::EmitLoopCopy(FlowGraphCompiler* compiler,
|
|||
const ScaleFactor scale = ToScaleFactor(mov_size, /*index_unboxed=*/true);
|
||||
__ leaq(TMP, compiler::Address(src_reg, length_reg, scale, -mov_size));
|
||||
__ CompareRegisters(dest_reg, TMP);
|
||||
#if defined(USING_MEMORY_SANITIZER)
|
||||
const auto jump_distance = compiler::Assembler::kFarJump;
|
||||
#else
|
||||
const auto jump_distance = compiler::Assembler::kNearJump;
|
||||
#endif
|
||||
const auto jump_distance = FLAG_target_memory_sanitizer
|
||||
? compiler::Assembler::kFarJump
|
||||
: compiler::Assembler::kNearJump;
|
||||
__ BranchIf(UNSIGNED_GREATER, copy_forwards, jump_distance);
|
||||
// The backwards move must be performed, so move TMP -> src_reg and do the
|
||||
// same adjustment for dest_reg.
|
||||
|
@ -249,18 +247,18 @@ void MemoryCopyInstr::EmitLoopCopy(FlowGraphCompiler* compiler,
|
|||
compiler::Address(dest_reg, length_reg, scale, -mov_size));
|
||||
__ std();
|
||||
}
|
||||
#if defined(USING_MEMORY_SANITIZER)
|
||||
// For reversed, do the `rep` first. It sets `dest_reg` to the start again.
|
||||
// For forward, do the unpoisining first, before `dest_reg` is modified.
|
||||
__ movq(TMP, length_reg);
|
||||
if (mov_size != 1) {
|
||||
// Unpoison takes the length in bytes.
|
||||
__ MulImmediate(TMP, mov_size);
|
||||
if (FLAG_target_memory_sanitizer) {
|
||||
// For reversed, do the `rep` first. It sets `dest_reg` to the start again.
|
||||
// For forward, do the unpoisining first, before `dest_reg` is modified.
|
||||
__ movq(TMP, length_reg);
|
||||
if (mov_size != 1) {
|
||||
// Unpoison takes the length in bytes.
|
||||
__ MulImmediate(TMP, mov_size);
|
||||
}
|
||||
if (!reversed) {
|
||||
__ MsanUnpoison(dest_reg, TMP);
|
||||
}
|
||||
}
|
||||
if (!reversed) {
|
||||
__ MsanUnpoison(dest_reg, TMP);
|
||||
}
|
||||
#endif
|
||||
switch (mov_size) {
|
||||
case 1:
|
||||
__ rep_movsb();
|
||||
|
@ -281,11 +279,11 @@ void MemoryCopyInstr::EmitLoopCopy(FlowGraphCompiler* compiler,
|
|||
__ cld();
|
||||
}
|
||||
|
||||
#if defined(USING_MEMORY_SANITIZER)
|
||||
if (reversed) {
|
||||
__ MsanUnpoison(dest_reg, TMP);
|
||||
if (FLAG_target_memory_sanitizer) {
|
||||
if (reversed) {
|
||||
__ MsanUnpoison(dest_reg, TMP);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void MemoryCopyInstr::EmitComputeStartPointer(FlowGraphCompiler* compiler,
|
||||
|
@ -1430,8 +1428,7 @@ void FfiCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
|||
// Reserve space for the arguments that go on the stack (if any), then align.
|
||||
intptr_t stack_space = marshaller_.RequiredStackSpaceInBytes();
|
||||
__ ReserveAlignedFrameSpace(stack_space);
|
||||
#if defined(USING_MEMORY_SANITIZER)
|
||||
{
|
||||
if (FLAG_target_memory_sanitizer) {
|
||||
RegisterSet kVolatileRegisterSet(CallingConventions::kVolatileCpuRegisters,
|
||||
CallingConventions::kVolatileXmmRegisters);
|
||||
__ movq(temp, RSP);
|
||||
|
@ -1457,7 +1454,6 @@ void FfiCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
|||
|
||||
__ PopRegisters(kVolatileRegisterSet);
|
||||
}
|
||||
#endif
|
||||
|
||||
if (is_leaf_) {
|
||||
EmitParamMoves(compiler, FPREG, saved_fp, TMP);
|
||||
|
@ -2259,12 +2255,12 @@ void StoreIndexedInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
|||
UNREACHABLE();
|
||||
}
|
||||
|
||||
#if defined(USING_MEMORY_SANITIZER)
|
||||
__ leaq(TMP, element_address);
|
||||
const intptr_t length_in_bytes = RepresentationUtils::ValueSize(
|
||||
RepresentationUtils::RepresentationOfArrayElement(class_id()));
|
||||
__ MsanUnpoison(TMP, length_in_bytes);
|
||||
#endif
|
||||
if (FLAG_target_memory_sanitizer) {
|
||||
__ leaq(TMP, element_address);
|
||||
const intptr_t length_in_bytes = RepresentationUtils::ValueSize(
|
||||
RepresentationUtils::RepresentationOfArrayElement(class_id()));
|
||||
__ MsanUnpoison(TMP, length_in_bytes);
|
||||
}
|
||||
}
|
||||
|
||||
LocationSummary* GuardFieldClassInstr::MakeLocationSummary(Zone* zone,
|
||||
|
|
|
@ -72,7 +72,7 @@ void StubCodeCompiler::EnsureIsNewOrRemembered() {
|
|||
// [Thread::tsan_utils_->setjmp_buffer_]).
|
||||
static void WithExceptionCatchingTrampoline(Assembler* assembler,
|
||||
std::function<void()> fun) {
|
||||
#if defined(TARGET_USES_THREAD_SANITIZER) && !defined(USING_SIMULATOR)
|
||||
#if !defined(USING_SIMULATOR)
|
||||
const Register kTsanUtilsReg = R3;
|
||||
|
||||
// Reserve space for arguments and align frame before entering C++ world.
|
||||
|
@ -87,69 +87,77 @@ static void WithExceptionCatchingTrampoline(Assembler* assembler,
|
|||
// We rely on THR being preserved across the setjmp() call.
|
||||
COMPILE_ASSERT(IsCalleeSavedRegister(THR));
|
||||
|
||||
Label do_native_call;
|
||||
if (FLAG_target_memory_sanitizer) {
|
||||
Label do_native_call;
|
||||
|
||||
// Save old jmp_buf.
|
||||
__ ldr(kTsanUtilsReg, Address(THR, target::Thread::tsan_utils_offset()));
|
||||
__ ldr(TMP,
|
||||
Address(kTsanUtilsReg, target::TsanUtils::setjmp_buffer_offset()));
|
||||
__ Push(TMP);
|
||||
// Save old jmp_buf.
|
||||
__ ldr(kTsanUtilsReg, Address(THR, target::Thread::tsan_utils_offset()));
|
||||
__ ldr(TMP,
|
||||
Address(kTsanUtilsReg, target::TsanUtils::setjmp_buffer_offset()));
|
||||
__ Push(TMP);
|
||||
|
||||
// Allocate jmp_buf struct on stack & remember pointer to it on the
|
||||
// [Thread::tsan_utils_->setjmp_buffer] (which exceptions.cc will longjmp()
|
||||
// to)
|
||||
__ AddImmediate(SP, -kJumpBufferSize);
|
||||
__ str(SP, Address(kTsanUtilsReg, target::TsanUtils::setjmp_buffer_offset()));
|
||||
// Allocate jmp_buf struct on stack & remember pointer to it on the
|
||||
// [Thread::tsan_utils_->setjmp_buffer] (which exceptions.cc will longjmp()
|
||||
// to)
|
||||
__ AddImmediate(SP, -kJumpBufferSize);
|
||||
__ str(SP,
|
||||
Address(kTsanUtilsReg, target::TsanUtils::setjmp_buffer_offset()));
|
||||
|
||||
// Call setjmp() with a pointer to the allocated jmp_buf struct.
|
||||
__ MoveRegister(R0, SP);
|
||||
__ PushRegisters(volatile_registers);
|
||||
__ EnterCFrame(0);
|
||||
__ mov(R25, CSP);
|
||||
__ mov(CSP, SP);
|
||||
__ ldr(kTsanUtilsReg, Address(THR, target::Thread::tsan_utils_offset()));
|
||||
__ CallCFunction(
|
||||
Address(kTsanUtilsReg, target::TsanUtils::setjmp_function_offset()));
|
||||
__ mov(SP, CSP);
|
||||
__ mov(CSP, R25);
|
||||
__ LeaveCFrame();
|
||||
__ PopRegisters(volatile_registers);
|
||||
// Call setjmp() with a pointer to the allocated jmp_buf struct.
|
||||
__ MoveRegister(R0, SP);
|
||||
__ PushRegisters(volatile_registers);
|
||||
__ EnterCFrame(0);
|
||||
__ mov(R25, CSP);
|
||||
__ mov(CSP, SP);
|
||||
__ ldr(kTsanUtilsReg, Address(THR, target::Thread::tsan_utils_offset()));
|
||||
__ CallCFunction(
|
||||
Address(kTsanUtilsReg, target::TsanUtils::setjmp_function_offset()));
|
||||
__ mov(SP, CSP);
|
||||
__ mov(CSP, R25);
|
||||
__ LeaveCFrame();
|
||||
__ PopRegisters(volatile_registers);
|
||||
|
||||
// We are the target of a longjmp() iff setjmp() returns non-0.
|
||||
__ cbz(&do_native_call, R0);
|
||||
// We are the target of a longjmp() iff setjmp() returns non-0.
|
||||
__ cbz(&do_native_call, R0);
|
||||
|
||||
// We are the target of a longjmp: Cleanup the stack and tail-call the
|
||||
// JumpToFrame stub which will take care of unwinding the stack and hand
|
||||
// execution to the catch entry.
|
||||
__ AddImmediate(SP, kJumpBufferSize);
|
||||
__ ldr(kTsanUtilsReg, Address(THR, target::Thread::tsan_utils_offset()));
|
||||
__ Pop(TMP);
|
||||
__ str(TMP,
|
||||
Address(kTsanUtilsReg, target::TsanUtils::setjmp_buffer_offset()));
|
||||
// We are the target of a longjmp: Cleanup the stack and tail-call the
|
||||
// JumpToFrame stub which will take care of unwinding the stack and hand
|
||||
// execution to the catch entry.
|
||||
__ AddImmediate(SP, kJumpBufferSize);
|
||||
__ ldr(kTsanUtilsReg, Address(THR, target::Thread::tsan_utils_offset()));
|
||||
__ Pop(TMP);
|
||||
__ str(TMP,
|
||||
Address(kTsanUtilsReg, target::TsanUtils::setjmp_buffer_offset()));
|
||||
|
||||
__ ldr(R0, Address(kTsanUtilsReg, target::TsanUtils::exception_pc_offset()));
|
||||
__ ldr(R1, Address(kTsanUtilsReg, target::TsanUtils::exception_sp_offset()));
|
||||
__ ldr(R2, Address(kTsanUtilsReg, target::TsanUtils::exception_fp_offset()));
|
||||
__ MoveRegister(R3, THR);
|
||||
__ Jump(Address(THR, target::Thread::jump_to_frame_entry_point_offset()));
|
||||
__ ldr(R0,
|
||||
Address(kTsanUtilsReg, target::TsanUtils::exception_pc_offset()));
|
||||
__ ldr(R1,
|
||||
Address(kTsanUtilsReg, target::TsanUtils::exception_sp_offset()));
|
||||
__ ldr(R2,
|
||||
Address(kTsanUtilsReg, target::TsanUtils::exception_fp_offset()));
|
||||
__ MoveRegister(R3, THR);
|
||||
__ Jump(Address(THR, target::Thread::jump_to_frame_entry_point_offset()));
|
||||
|
||||
// We leave the created [jump_buf] structure on the stack as well as the
|
||||
// pushed old [Thread::tsan_utils_->setjmp_buffer_].
|
||||
__ Bind(&do_native_call);
|
||||
__ MoveRegister(kSavedRspReg, SP);
|
||||
#endif // defined(TARGET_USES_THREAD_SANITIZER) && !defined(USING_SIMULATOR)
|
||||
// We leave the created [jump_buf] structure on the stack as well as the
|
||||
// pushed old [Thread::tsan_utils_->setjmp_buffer_].
|
||||
__ Bind(&do_native_call);
|
||||
__ MoveRegister(kSavedRspReg, SP);
|
||||
}
|
||||
#endif // !defined(USING_SIMULATOR)
|
||||
|
||||
fun();
|
||||
|
||||
#if defined(TARGET_USES_THREAD_SANITIZER) && !defined(USING_SIMULATOR)
|
||||
__ MoveRegister(SP, kSavedRspReg);
|
||||
__ AddImmediate(SP, kJumpBufferSize);
|
||||
const Register kTsanUtilsReg2 = kSavedRspReg;
|
||||
__ ldr(kTsanUtilsReg2, Address(THR, target::Thread::tsan_utils_offset()));
|
||||
__ Pop(TMP);
|
||||
__ str(TMP,
|
||||
Address(kTsanUtilsReg2, target::TsanUtils::setjmp_buffer_offset()));
|
||||
#endif // defined(TARGET_USES_THREAD_SANITIZER) && !defined(USING_SIMULATOR)
|
||||
#if !defined(USING_SIMULATOR)
|
||||
if (FLAG_target_memory_sanitizer) {
|
||||
__ MoveRegister(SP, kSavedRspReg);
|
||||
__ AddImmediate(SP, kJumpBufferSize);
|
||||
const Register kTsanUtilsReg2 = kSavedRspReg;
|
||||
__ ldr(kTsanUtilsReg2, Address(THR, target::Thread::tsan_utils_offset()));
|
||||
__ Pop(TMP);
|
||||
__ str(TMP,
|
||||
Address(kTsanUtilsReg2, target::TsanUtils::setjmp_buffer_offset()));
|
||||
}
|
||||
#endif // !defined(USING_SIMULATOR)
|
||||
}
|
||||
|
||||
// Input parameters:
|
||||
|
|
|
@ -74,7 +74,7 @@ void StubCodeCompiler::EnsureIsNewOrRemembered() {
|
|||
// [Thread::tsan_utils_->setjmp_buffer_]).
|
||||
static void WithExceptionCatchingTrampoline(Assembler* assembler,
|
||||
std::function<void()> fun) {
|
||||
#if defined(TARGET_USES_THREAD_SANITIZER) && !defined(USING_SIMULATOR)
|
||||
#if !defined(USING_SIMULATOR)
|
||||
const Register kTsanUtilsReg = RAX;
|
||||
|
||||
// Reserve space for arguments and align frame before entering C++ world.
|
||||
|
@ -89,70 +89,74 @@ static void WithExceptionCatchingTrampoline(Assembler* assembler,
|
|||
// We rely on THR being preserved across the setjmp() call.
|
||||
COMPILE_ASSERT(IsCalleeSavedRegister(THR));
|
||||
|
||||
Label do_native_call;
|
||||
if (FLAG_target_thread_sanitizer) {
|
||||
Label do_native_call;
|
||||
|
||||
// Save old jmp_buf.
|
||||
__ movq(kTsanUtilsReg, Address(THR, target::Thread::tsan_utils_offset()));
|
||||
__ pushq(Address(kTsanUtilsReg, target::TsanUtils::setjmp_buffer_offset()));
|
||||
// Save old jmp_buf.
|
||||
__ movq(kTsanUtilsReg, Address(THR, target::Thread::tsan_utils_offset()));
|
||||
__ pushq(Address(kTsanUtilsReg, target::TsanUtils::setjmp_buffer_offset()));
|
||||
|
||||
// Allocate jmp_buf struct on stack & remember pointer to it on the
|
||||
// [Thread::tsan_utils_->setjmp_buffer] (which exceptions.cc will longjmp()
|
||||
// to)
|
||||
__ AddImmediate(RSP, Immediate(-kJumpBufferSize));
|
||||
__ movq(Address(kTsanUtilsReg, target::TsanUtils::setjmp_buffer_offset()),
|
||||
RSP);
|
||||
// Allocate jmp_buf struct on stack & remember pointer to it on the
|
||||
// [Thread::tsan_utils_->setjmp_buffer] (which exceptions.cc will longjmp()
|
||||
// to)
|
||||
__ AddImmediate(RSP, Immediate(-kJumpBufferSize));
|
||||
__ movq(Address(kTsanUtilsReg, target::TsanUtils::setjmp_buffer_offset()),
|
||||
RSP);
|
||||
|
||||
// Call setjmp() with a pointer to the allocated jmp_buf struct.
|
||||
__ MoveRegister(CallingConventions::kArg1Reg, RSP);
|
||||
__ PushRegisters(volatile_registers);
|
||||
if (OS::ActivationFrameAlignment() > 1) {
|
||||
// Call setjmp() with a pointer to the allocated jmp_buf struct.
|
||||
__ MoveRegister(CallingConventions::kArg1Reg, RSP);
|
||||
__ PushRegisters(volatile_registers);
|
||||
if (OS::ActivationFrameAlignment() > 1) {
|
||||
__ MoveRegister(kSavedRspReg, RSP);
|
||||
__ andq(RSP, Immediate(~(OS::ActivationFrameAlignment() - 1)));
|
||||
}
|
||||
__ movq(kTsanUtilsReg, Address(THR, target::Thread::tsan_utils_offset()));
|
||||
__ CallCFunction(
|
||||
Address(kTsanUtilsReg, target::TsanUtils::setjmp_function_offset()),
|
||||
/*restore_rsp=*/true);
|
||||
if (OS::ActivationFrameAlignment() > 1) {
|
||||
__ MoveRegister(RSP, kSavedRspReg);
|
||||
}
|
||||
__ PopRegisters(volatile_registers);
|
||||
|
||||
// We are the target of a longjmp() iff setjmp() returns non-0.
|
||||
__ CompareImmediate(RAX, 0);
|
||||
__ BranchIf(EQUAL, &do_native_call);
|
||||
|
||||
// We are the target of a longjmp: Cleanup the stack and tail-call the
|
||||
// JumpToFrame stub which will take care of unwinding the stack and hand
|
||||
// execution to the catch entry.
|
||||
__ AddImmediate(RSP, Immediate(kJumpBufferSize));
|
||||
__ movq(kTsanUtilsReg, Address(THR, target::Thread::tsan_utils_offset()));
|
||||
__ popq(Address(kTsanUtilsReg, target::TsanUtils::setjmp_buffer_offset()));
|
||||
|
||||
__ movq(CallingConventions::kArg1Reg,
|
||||
Address(kTsanUtilsReg, target::TsanUtils::exception_pc_offset()));
|
||||
__ movq(CallingConventions::kArg2Reg,
|
||||
Address(kTsanUtilsReg, target::TsanUtils::exception_sp_offset()));
|
||||
__ movq(CallingConventions::kArg3Reg,
|
||||
Address(kTsanUtilsReg, target::TsanUtils::exception_fp_offset()));
|
||||
__ MoveRegister(CallingConventions::kArg4Reg, THR);
|
||||
__ jmp(Address(THR, target::Thread::jump_to_frame_entry_point_offset()));
|
||||
|
||||
// We leave the created [jump_buf] structure on the stack as well as the
|
||||
// pushed old [Thread::tsan_utils_->setjmp_buffer_].
|
||||
__ Bind(&do_native_call);
|
||||
__ MoveRegister(kSavedRspReg, RSP);
|
||||
__ andq(RSP, Immediate(~(OS::ActivationFrameAlignment() - 1)));
|
||||
}
|
||||
__ movq(kTsanUtilsReg, Address(THR, target::Thread::tsan_utils_offset()));
|
||||
__ CallCFunction(
|
||||
Address(kTsanUtilsReg, target::TsanUtils::setjmp_function_offset()),
|
||||
/*restore_rsp=*/true);
|
||||
if (OS::ActivationFrameAlignment() > 1) {
|
||||
__ MoveRegister(RSP, kSavedRspReg);
|
||||
}
|
||||
__ PopRegisters(volatile_registers);
|
||||
|
||||
// We are the target of a longjmp() iff setjmp() returns non-0.
|
||||
__ CompareImmediate(RAX, 0);
|
||||
__ BranchIf(EQUAL, &do_native_call);
|
||||
|
||||
// We are the target of a longjmp: Cleanup the stack and tail-call the
|
||||
// JumpToFrame stub which will take care of unwinding the stack and hand
|
||||
// execution to the catch entry.
|
||||
__ AddImmediate(RSP, Immediate(kJumpBufferSize));
|
||||
__ movq(kTsanUtilsReg, Address(THR, target::Thread::tsan_utils_offset()));
|
||||
__ popq(Address(kTsanUtilsReg, target::TsanUtils::setjmp_buffer_offset()));
|
||||
|
||||
__ movq(CallingConventions::kArg1Reg,
|
||||
Address(kTsanUtilsReg, target::TsanUtils::exception_pc_offset()));
|
||||
__ movq(CallingConventions::kArg2Reg,
|
||||
Address(kTsanUtilsReg, target::TsanUtils::exception_sp_offset()));
|
||||
__ movq(CallingConventions::kArg3Reg,
|
||||
Address(kTsanUtilsReg, target::TsanUtils::exception_fp_offset()));
|
||||
__ MoveRegister(CallingConventions::kArg4Reg, THR);
|
||||
__ jmp(Address(THR, target::Thread::jump_to_frame_entry_point_offset()));
|
||||
|
||||
// We leave the created [jump_buf] structure on the stack as well as the
|
||||
// pushed old [Thread::tsan_utils_->setjmp_buffer_].
|
||||
__ Bind(&do_native_call);
|
||||
__ MoveRegister(kSavedRspReg, RSP);
|
||||
#endif // defined(TARGET_USES_THREAD_SANITIZER) && !defined(USING_SIMULATOR)
|
||||
#endif // !defined(USING_SIMULATOR)
|
||||
|
||||
fun();
|
||||
|
||||
#if defined(TARGET_USES_THREAD_SANITIZER) && !defined(USING_SIMULATOR)
|
||||
__ MoveRegister(RSP, kSavedRspReg);
|
||||
__ AddImmediate(RSP, Immediate(kJumpBufferSize));
|
||||
const Register kTsanUtilsReg2 = kSavedRspReg;
|
||||
__ movq(kTsanUtilsReg2, Address(THR, target::Thread::tsan_utils_offset()));
|
||||
__ popq(Address(kTsanUtilsReg2, target::TsanUtils::setjmp_buffer_offset()));
|
||||
#endif // defined(TARGET_USES_THREAD_SANITIZER) && !defined(USING_SIMULATOR)
|
||||
#if !defined(USING_SIMULATOR)
|
||||
if (FLAG_target_thread_sanitizer) {
|
||||
__ MoveRegister(RSP, kSavedRspReg);
|
||||
__ AddImmediate(RSP, Immediate(kJumpBufferSize));
|
||||
const Register kTsanUtilsReg2 = kSavedRspReg;
|
||||
__ movq(kTsanUtilsReg2, Address(THR, target::Thread::tsan_utils_offset()));
|
||||
__ popq(Address(kTsanUtilsReg2, target::TsanUtils::setjmp_buffer_offset()));
|
||||
}
|
||||
#endif // !defined(USING_SIMULATOR)
|
||||
}
|
||||
|
||||
// Input parameters:
|
||||
|
|
|
@ -288,7 +288,7 @@ ISOLATE_UNIT_TEST_CASE(EvalExpressionExhaustCIDs) {
|
|||
}
|
||||
|
||||
// Too slow in debug mode.
|
||||
#if !defined(DEBUG) && !defined(TARGET_USES_THREAD_SANITIZER)
|
||||
#if !defined(DEBUG) && !defined(USING_THREAD_SANITIZER)
|
||||
TEST_CASE(ManyClasses) {
|
||||
// Limit is 20 bits. Check only more than 16 bits so test completes in
|
||||
// reasonable time.
|
||||
|
@ -312,6 +312,6 @@ TEST_CASE(ManyClasses) {
|
|||
|
||||
EXPECT(IsolateGroup::Current()->class_table()->NumCids() >= kNumClasses);
|
||||
}
|
||||
#endif // !defined(DEBUG) && !defined(TARGET_USES_THREAD_SANITIZER)
|
||||
#endif // !defined(DEBUG) && !defined(USING_THREAD_SANITIZER)
|
||||
|
||||
} // namespace dart
|
||||
|
|
|
@ -1028,7 +1028,8 @@ char* Dart::FeaturesString(IsolateGroup* isolate_group,
|
|||
if (Snapshot::IncludesCode(kind)) {
|
||||
VM_GLOBAL_FLAG_LIST(ADD_P, ADD_R, ADD_C, ADD_D);
|
||||
|
||||
ADD_FLAG(tsan, kTargetUsesThreadSanitizer)
|
||||
ADD_FLAG(tsan, FLAG_target_thread_sanitizer)
|
||||
ADD_FLAG(msan, FLAG_target_memory_sanitizer)
|
||||
|
||||
if (kind == Snapshot::kFullJIT) {
|
||||
// Enabling assertions affects deopt ids.
|
||||
|
|
|
@ -72,6 +72,25 @@ FLAG_LIST(PRODUCT_FLAG_MACRO,
|
|||
#undef PRECOMPILE_FLAG_MACRO
|
||||
#undef DEBUG_FLAG_MACRO
|
||||
|
||||
#if defined(DART_PRECOMPILER)
|
||||
DEFINE_FLAG(bool,
|
||||
target_thread_sanitizer,
|
||||
#if defined(TARGET_USES_THREAD_SANITIZER)
|
||||
true,
|
||||
#else
|
||||
false,
|
||||
#endif
|
||||
"Generate Dart code compatible with Thread Sanitizer");
|
||||
DEFINE_FLAG(bool,
|
||||
target_memory_sanitizer,
|
||||
#if defined(TARGET_USES_MEMORY_SANITIZER)
|
||||
true,
|
||||
#else
|
||||
false,
|
||||
#endif
|
||||
"Generate Dart code compatible with Memory Sanitizer");
|
||||
#endif
|
||||
|
||||
bool Flags::initialized_ = false;
|
||||
|
||||
// List of registered flags.
|
||||
|
|
|
@ -159,6 +159,22 @@ FLAG_LIST(PRODUCT_FLAG_MACRO,
|
|||
#undef PRODUCT_FLAG_MACRO
|
||||
#undef PRECOMPILE_FLAG_MACRO
|
||||
|
||||
#if defined(DART_PRECOMPILER)
|
||||
DECLARE_FLAG(bool, target_thread_sanitizer);
|
||||
DECLARE_FLAG(bool, target_memory_sanitizer);
|
||||
#else
|
||||
#if defined(USING_THREAD_SANITIZER)
|
||||
constexpr bool FLAG_target_thread_sanitizer = true;
|
||||
#else
|
||||
constexpr bool FLAG_target_thread_sanitizer = false;
|
||||
#endif
|
||||
#if defined(USING_MEMORY_SANITIZER)
|
||||
constexpr bool FLAG_target_memory_sanitizer = true;
|
||||
#else
|
||||
constexpr bool FLAG_target_memory_sanitizer = false;
|
||||
#endif
|
||||
#endif
|
||||
|
||||
} // namespace dart
|
||||
|
||||
#endif // RUNTIME_VM_FLAGS_H_
|
||||
|
|
|
@ -1289,6 +1289,7 @@
|
|||
"name": "build dart",
|
||||
"script": "tools/build.py",
|
||||
"arguments": [
|
||||
"--sanitizer=none,${sanitizer}",
|
||||
"runtime",
|
||||
"runtime_precompiled"
|
||||
]
|
||||
|
|
Loading…
Reference in a new issue