Revert "[vm] When run under TSAN use longjmp() to skip over C++ frames before manually unwinding to the catch entry"

This reverts commit 20e6a4dc92.


Revert "[gardening] Fix Product TSAN build by fixing accidental ReleaseTSANX64 -> ProductTSANX64 issue"

This reverts commit e6c24f934e.

Reason for revert: to unblock Google3 roll.

TEST=revert

Change-Id: If22e17d514a81829a9eeb5585257fc477026ea63
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/221465
Auto-Submit: Slava Egorov <vegorov@google.com>
Commit-Queue: Slava Egorov <vegorov@google.com>
Reviewed-by: Emmanuel Pellereau <emmanuelp@google.com>
This commit is contained in:
Vyacheslav Egorov 2021-11-29 09:34:23 +00:00 committed by Commit Bot
parent 89f744c4e4
commit 3d34ec1dbf
13 changed files with 1197 additions and 3883 deletions

View file

@ -21,10 +21,4 @@ extern "C" void __tsan_release(void* addr);
#define NO_SANITIZE_THREAD
#endif
#if defined(USING_THREAD_SANITIZER)
#define DO_IF_TSAN(CODE) CODE
#else
#define DO_IF_TSAN(CODE)
#endif
#endif // RUNTIME_PLATFORM_THREAD_SANITIZER_H_

View file

@ -1,11 +0,0 @@
// Copyright (c) 2021, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
main() {
for (int i = 0; i < 1000000; ++i) {
try {
throw 'a';
} catch (e, s) {}
}
}

View file

@ -1,11 +0,0 @@
// Copyright (c) 2021, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
main() {
for (int i = 0; i < 1000000; ++i) {
try {
throw 'a';
} catch (e, s) {}
}
}

View file

@ -1154,9 +1154,8 @@ void NativeCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
// Push the result place holder initialized to NULL.
__ PushObject(Object::null_object());
// Pass a pointer to the first argument in R13 (we avoid using RAX here to
// simplify the stub code that will call native code).
__ leaq(R13, compiler::Address(RSP, ArgumentCount() * kWordSize));
// Pass a pointer to the first argument in RAX.
__ leaq(RAX, compiler::Address(RSP, ArgumentCount() * kWordSize));
__ LoadImmediate(R10, compiler::Immediate(argc_tag));
const Code* stub;

View file

@ -4,8 +4,6 @@
#include <iostream>
#include "platform/thread_sanitizer.h"
#include "vm/compiler/runtime_api.h"
#include "vm/compiler/runtime_offsets_list.h"
#include "vm/dart_api_state.h"
@ -31,18 +29,10 @@
#error Unknown architecture
#endif
#define SEP " \\\n && "
#if defined(USING_THREAD_SANITIZER)
#define ARCH_DEF_CPU_TSAN ARCH_DEF_CPU SEP "defined(USING_THREAD_SANITIZER)"
#else
#define ARCH_DEF_CPU_TSAN ARCH_DEF_CPU SEP "!defined(USING_THREAD_SANITIZER)"
#endif
#if defined(DART_COMPRESSED_POINTERS)
#define ARCH_DEF ARCH_DEF_CPU_TSAN SEP "defined(DART_COMPRESSED_POINTERS)"
#define ARCH_DEF ARCH_DEF_CPU " && defined(DART_COMPRESSED_POINTERS)"
#else
#define ARCH_DEF ARCH_DEF_CPU_TSAN SEP "!defined(DART_COMPRESSED_POINTERS)"
#define ARCH_DEF ARCH_DEF_CPU " && !defined(DART_COMPRESSED_POINTERS)"
#endif
namespace dart {
@ -159,9 +149,6 @@ class OffsetsExtractor : public AllStatic {
COMMON_OFFSETS_LIST(PRINT_FIELD_OFFSET, PRINT_ARRAY_LAYOUT, PRINT_SIZEOF,
PRINT_ARRAY_SIZEOF, PRINT_PAYLOAD_SIZEOF, PRINT_RANGE,
PRINT_CONSTANT)
DO_IF_TSAN(TSAN_OFFSETS_LIST(
PRINT_FIELD_OFFSET, PRINT_ARRAY_LAYOUT, PRINT_SIZEOF,
PRINT_ARRAY_SIZEOF, PRINT_PAYLOAD_SIZEOF, PRINT_RANGE, PRINT_CONSTANT))
#undef PRINT_FIELD_OFFSET
#undef PRINT_ARRAY_LAYOUT

View file

@ -2,8 +2,6 @@
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
#include "platform/thread_sanitizer.h"
#include "vm/compiler/runtime_api.h"
#include "vm/object.h"
@ -622,13 +620,6 @@ COMMON_OFFSETS_LIST(DEFINE_FIELD,
DEFINE_PAYLOAD_SIZEOF,
DEFINE_RANGE,
DEFINE_CONSTANT)
DO_IF_TSAN(TSAN_OFFSETS_LIST(DEFINE_FIELD,
DEFINE_ARRAY,
DEFINE_SIZEOF,
DEFINE_ARRAY_SIZEOF,
DEFINE_PAYLOAD_SIZEOF,
DEFINE_RANGE,
DEFINE_CONSTANT))
#else
@ -782,13 +773,6 @@ COMMON_OFFSETS_LIST(DEFINE_FIELD,
DEFINE_PAYLOAD_SIZEOF,
DEFINE_RANGE,
DEFINE_CONSTANT)
DO_IF_TSAN(TSAN_OFFSETS_LIST(DEFINE_FIELD,
DEFINE_ARRAY,
DEFINE_SIZEOF,
DEFINE_ARRAY_SIZEOF,
DEFINE_PAYLOAD_SIZEOF,
DEFINE_RANGE,
DEFINE_CONSTANT))
#endif

View file

@ -19,9 +19,7 @@
// in compiler::target namespace.
#include "platform/globals.h"
#include "platform/thread_sanitizer.h"
#include "platform/utils.h"
#include "vm/allocation.h"
#include "vm/bitfield.h"
#include "vm/bss_relocs.h"
@ -1125,15 +1123,6 @@ class Thread : public AllStatic {
static word callback_code_offset();
static word callback_stack_return_offset();
#if defined(USING_THREAD_SANITIZER)
static word setjmp_function_offset();
static word setjmp_buffer_offset();
static word exception_pc_offset();
static word exception_sp_offset();
static word exception_fp_offset();
static word jump_to_frame_entry_point_offset();
#endif // defined(USING_THREAD_SANITIZER)
static word AllocateArray_entry_point_offset();
static word write_barrier_code_offset();
static word array_write_barrier_code_offset();

File diff suppressed because it is too large Load diff

View file

@ -420,13 +420,4 @@
RANGE, CONSTANT) \
FIELD(Closure, entry_point_offset)
#define TSAN_OFFSETS_LIST(FIELD, ARRAY, SIZEOF, ARRAY_SIZEOF, PAYLOAD_SIZEOF, \
RANGE, CONSTANT) \
FIELD(Thread, jump_to_frame_entry_point_offset) \
FIELD(Thread, setjmp_function_offset) \
FIELD(Thread, setjmp_buffer_offset) \
FIELD(Thread, exception_pc_offset) \
FIELD(Thread, exception_sp_offset) \
FIELD(Thread, exception_fp_offset)
#endif // RUNTIME_VM_COMPILER_RUNTIME_OFFSETS_LIST_H_

View file

@ -2,8 +2,6 @@
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
#include <setjmp.h>
#include "vm/compiler/runtime_api.h"
#include "vm/globals.h"
@ -60,94 +58,6 @@ void StubCodeCompiler::EnsureIsNewOrRemembered(Assembler* assembler,
__ Bind(&done);
}
// In TSAN mode the runtime will throw an exception using an intermediary
// longjmp() call to unwind the C frames in a way that TSAN can understand.
//
// This wrapper will setup a [jmp_buf] on the stack and initialize it to be a
// target for a possible longjmp(). In the exceptional case we'll forward
// control of execution to the usual JumpToFrame stub.
//
// In non-TSAN mode this will do nothing and the runtime will call the
// JumpToFrame stub directly.
//
// The callback [fun] may be invoked with a modified [RSP] due to allocating
// a [jmp_buf] allocating structure on the stack (as well as the saved old
// [Thread::setjmp_buffer_]).
static void WithExceptionCatchingTrampoline(Assembler* assembler,
std::function<void()> fun) {
#if defined(USING_THREAD_SANITIZER)
// Reserve space for arguments and align frame before entering C++ world.
const intptr_t kJumpBufferSize = sizeof(jmp_buf);
// Save & Restore the volatile CPU registers across the setjmp() call.
const RegisterSet volatile_registers(
CallingConventions::kVolatileCpuRegisters & ~(1 << RAX),
/*fpu_registers=*/0);
const Register kSavedRspReg = R12;
COMPILE_ASSERT(
(CallingConventions::kCalleeSaveCpuRegisters & (1 << kSavedRspReg)) != 0);
// We rely on THR being preserved across the setjmp() call.
COMPILE_ASSERT((CallingConventions::kCalleeSaveCpuRegisters & (1 << THR)) !=
0);
Label do_native_call;
// Save old jmp_buf.
__ pushq(Address(THR, target::Thread::setjmp_buffer_offset()));
// Allocate jmp_buf struct on stack & remember pointer to it on the
// [Thread::setjmp_buffer] (which exceptions.cc will longjmp() to)
__ AddImmediate(RSP, Immediate(-kJumpBufferSize));
__ movq(Address(THR, target::Thread::setjmp_buffer_offset()), RSP);
// Call setjmp() with a pointer to the allocated jmp_buf struct.
__ MoveRegister(CallingConventions::kArg1Reg, RSP);
__ PushRegisters(volatile_registers);
if (OS::ActivationFrameAlignment() > 1) {
__ MoveRegister(kSavedRspReg, RSP);
__ andq(RSP, Immediate(~(OS::ActivationFrameAlignment() - 1)));
}
__ CallCFunction(Address(THR, target::Thread::setjmp_function_offset()),
/*restore_rsp=*/true);
if (OS::ActivationFrameAlignment() > 1) {
__ MoveRegister(RSP, kSavedRspReg);
}
__ PopRegisters(volatile_registers);
// We are the target of a longjmp() iff setjmp() returns non-0.
__ CompareImmediate(RAX, 0);
__ BranchIf(EQUAL, &do_native_call);
// We are the target of a longjmp: Cleanup the stack and tail-call the
// JumpToFrame stub which will take care of unwinding the stack and hand
// execution to the catch entry.
__ AddImmediate(RSP, Immediate(kJumpBufferSize));
__ popq(Address(THR, target::Thread::setjmp_buffer_offset()));
__ movq(CallingConventions::kArg1Reg,
Address(THR, target::Thread::exception_pc_offset()));
__ movq(CallingConventions::kArg2Reg,
Address(THR, target::Thread::exception_sp_offset()));
__ movq(CallingConventions::kArg3Reg,
Address(THR, target::Thread::exception_fp_offset()));
__ MoveRegister(CallingConventions::kArg4Reg, THR);
__ jmp(Address(THR, target::Thread::jump_to_frame_entry_point_offset()));
// We leave the created [jump_buf] structure on the stack as well as the
// pushed old [Thread::setjmp_buffer_].
__ Bind(&do_native_call);
__ MoveRegister(kSavedRspReg, RSP);
#endif // defined(USING_THREAD_SANITIZER)
fun();
#if defined(USING_THREAD_SANITIZER)
__ MoveRegister(RSP, kSavedRspReg);
__ AddImmediate(RSP, Immediate(kJumpBufferSize));
__ popq(Address(THR, target::Thread::setjmp_buffer_offset()));
#endif // defined(USING_THREAD_SANITIZER)
}
// Input parameters:
// RSP : points to return address.
// RSP + 8 : address of last argument in argument array.
@ -189,54 +99,51 @@ void StubCodeCompiler::GenerateCallToRuntimeStub(Assembler* assembler) {
// Mark that the thread is executing VM code.
__ movq(Assembler::VMTagAddress(), RBX);
WithExceptionCatchingTrampoline(assembler, [&]() {
// Reserve space for arguments and align frame before entering C++ world.
__ subq(RSP, Immediate(target::NativeArguments::StructSize()));
if (OS::ActivationFrameAlignment() > 1) {
__ andq(RSP, Immediate(~(OS::ActivationFrameAlignment() - 1)));
}
// Reserve space for arguments and align frame before entering C++ world.
__ subq(RSP, Immediate(target::NativeArguments::StructSize()));
if (OS::ActivationFrameAlignment() > 1) {
__ andq(RSP, Immediate(~(OS::ActivationFrameAlignment() - 1)));
}
// Pass target::NativeArguments structure by value and call runtime.
__ movq(Address(RSP, thread_offset), THR); // Set thread in NativeArgs.
// There are no runtime calls to closures, so we do not need to set the tag
// bits kClosureFunctionBit and kInstanceFunctionBit in argc_tag_.
__ movq(Address(RSP, argc_tag_offset),
R10); // Set argc in target::NativeArguments.
// Compute argv.
__ leaq(RAX, Address(RBP, R10, TIMES_8,
target::frame_layout.param_end_from_fp *
target::kWordSize));
__ movq(Address(RSP, argv_offset),
RAX); // Set argv in target::NativeArguments.
__ addq(
RAX,
Immediate(1 * target::kWordSize)); // Retval is next to 1st argument.
__ movq(Address(RSP, retval_offset),
RAX); // Set retval in target::NativeArguments.
// Pass target::NativeArguments structure by value and call runtime.
__ movq(Address(RSP, thread_offset), THR); // Set thread in NativeArgs.
// There are no runtime calls to closures, so we do not need to set the tag
// bits kClosureFunctionBit and kInstanceFunctionBit in argc_tag_.
__ movq(Address(RSP, argc_tag_offset),
R10); // Set argc in target::NativeArguments.
// Compute argv.
__ leaq(RAX,
Address(RBP, R10, TIMES_8,
target::frame_layout.param_end_from_fp * target::kWordSize));
__ movq(Address(RSP, argv_offset),
RAX); // Set argv in target::NativeArguments.
__ addq(RAX,
Immediate(1 * target::kWordSize)); // Retval is next to 1st argument.
__ movq(Address(RSP, retval_offset),
RAX); // Set retval in target::NativeArguments.
#if defined(DART_TARGET_OS_WINDOWS)
ASSERT(target::NativeArguments::StructSize() >
CallingConventions::kRegisterTransferLimit);
__ movq(CallingConventions::kArg1Reg, RSP);
ASSERT(target::NativeArguments::StructSize() >
CallingConventions::kRegisterTransferLimit);
__ movq(CallingConventions::kArg1Reg, RSP);
#endif
__ CallCFunction(RBX);
__ CallCFunction(RBX);
// Mark that the thread is executing Dart code.
__ movq(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId));
// Mark that the thread is executing Dart code.
__ movq(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId));
// Mark that the thread has not exited generated Dart code.
__ movq(Address(THR, target::Thread::exit_through_ffi_offset()),
Immediate(0));
// Mark that the thread has not exited generated Dart code.
__ movq(Address(THR, target::Thread::exit_through_ffi_offset()),
Immediate(0));
// Reset exit frame information in Isolate's mutator thread structure.
__ movq(Address(THR, target::Thread::top_exit_frame_info_offset()),
Immediate(0));
// Reset exit frame information in Isolate's mutator thread structure.
__ movq(Address(THR, target::Thread::top_exit_frame_info_offset()),
Immediate(0));
// Restore the global object pool after returning from runtime (old space is
// moving, so the GOP could have been relocated).
if (FLAG_precompiled_mode) {
__ movq(PP, Address(THR, target::Thread::global_object_pool_offset()));
}
});
// Restore the global object pool after returning from runtime (old space is
// moving, so the GOP could have been relocated).
if (FLAG_precompiled_mode) {
__ movq(PP, Address(THR, target::Thread::global_object_pool_offset()));
}
__ LeaveStubFrame();
@ -658,7 +565,7 @@ void StubCodeCompiler::GenerateRangeError(Assembler* assembler,
// Input parameters:
// RSP : points to return address.
// RSP + 8 : address of return value.
// R13 : address of first argument in argument array.
// RAX : address of first argument in argument array.
// RBX : address of the native function to call.
// R10 : argc_tag including number of arguments and function kind.
static void GenerateCallNativeWithWrapperStub(Assembler* assembler,
@ -698,51 +605,49 @@ static void GenerateCallNativeWithWrapperStub(Assembler* assembler,
// Mark that the thread is executing native code.
__ movq(Assembler::VMTagAddress(), RBX);
WithExceptionCatchingTrampoline(assembler, [&]() {
// Reserve space for the native arguments structure passed on the stack (the
// outgoing pointer parameter to the native arguments structure is passed in
// RDI) and align frame before entering the C++ world.
__ subq(RSP, Immediate(target::NativeArguments::StructSize()));
if (OS::ActivationFrameAlignment() > 1) {
__ andq(RSP, Immediate(~(OS::ActivationFrameAlignment() - 1)));
}
// Reserve space for the native arguments structure passed on the stack (the
// outgoing pointer parameter to the native arguments structure is passed in
// RDI) and align frame before entering the C++ world.
__ subq(RSP, Immediate(target::NativeArguments::StructSize()));
if (OS::ActivationFrameAlignment() > 1) {
__ andq(RSP, Immediate(~(OS::ActivationFrameAlignment() - 1)));
}
// Pass target::NativeArguments structure by value and call native function.
__ movq(Address(RSP, thread_offset), THR); // Set thread in NativeArgs.
__ movq(Address(RSP, argc_tag_offset),
R10); // Set argc in target::NativeArguments.
__ movq(Address(RSP, argv_offset),
R13); // Set argv in target::NativeArguments.
__ leaq(RAX,
Address(RBP, 2 * target::kWordSize)); // Compute return value addr.
__ movq(Address(RSP, retval_offset),
RAX); // Set retval in target::NativeArguments.
// Pass target::NativeArguments structure by value and call native function.
__ movq(Address(RSP, thread_offset), THR); // Set thread in NativeArgs.
__ movq(Address(RSP, argc_tag_offset),
R10); // Set argc in target::NativeArguments.
__ movq(Address(RSP, argv_offset),
RAX); // Set argv in target::NativeArguments.
__ leaq(RAX,
Address(RBP, 2 * target::kWordSize)); // Compute return value addr.
__ movq(Address(RSP, retval_offset),
RAX); // Set retval in target::NativeArguments.
// Pass the pointer to the target::NativeArguments.
__ movq(CallingConventions::kArg1Reg, RSP);
// Pass pointer to function entrypoint.
__ movq(CallingConventions::kArg2Reg, RBX);
// Pass the pointer to the target::NativeArguments.
__ movq(CallingConventions::kArg1Reg, RSP);
// Pass pointer to function entrypoint.
__ movq(CallingConventions::kArg2Reg, RBX);
__ movq(RAX, wrapper_address);
__ CallCFunction(RAX);
__ movq(RAX, wrapper_address);
__ CallCFunction(RAX);
// Mark that the thread is executing Dart code.
__ movq(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId));
// Mark that the thread is executing Dart code.
__ movq(Assembler::VMTagAddress(), Immediate(VMTag::kDartTagId));
// Mark that the thread has not exited generated Dart code.
__ movq(Address(THR, target::Thread::exit_through_ffi_offset()),
Immediate(0));
// Mark that the thread has not exited generated Dart code.
__ movq(Address(THR, target::Thread::exit_through_ffi_offset()),
Immediate(0));
// Reset exit frame information in Isolate's mutator thread structure.
__ movq(Address(THR, target::Thread::top_exit_frame_info_offset()),
Immediate(0));
// Reset exit frame information in Isolate's mutator thread structure.
__ movq(Address(THR, target::Thread::top_exit_frame_info_offset()),
Immediate(0));
// Restore the global object pool after returning from runtime (old space is
// moving, so the GOP could have been relocated).
if (FLAG_precompiled_mode) {
__ movq(PP, Address(THR, target::Thread::global_object_pool_offset()));
}
});
// Restore the global object pool after returning from runtime (old space is
// moving, so the GOP could have been relocated).
if (FLAG_precompiled_mode) {
__ movq(PP, Address(THR, target::Thread::global_object_pool_offset()));
}
__ LeaveStubFrame();
__ ret();

View file

@ -2,12 +2,9 @@
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
#include <setjmp.h>
#include "vm/exceptions.h"
#include "platform/address_sanitizer.h"
#include "platform/thread_sanitizer.h"
#include "lib/stacktrace.h"
@ -615,6 +612,13 @@ void Exceptions::JumpToFrame(Thread* thread,
// in the previous frames.
StackResource::Unwind(thread);
// Call a stub to set up the exception object in kExceptionObjectReg,
// to set up the stacktrace object in kStackTraceObjectReg, and to
// continue execution at the given pc in the given frame.
typedef void (*ExcpHandler)(uword, uword, uword, Thread*);
ExcpHandler func =
reinterpret_cast<ExcpHandler>(StubCode::JumpToFrame().EntryPoint());
// Unpoison the stack before we tear it down in the generated stub code.
uword current_sp = OSThread::GetCurrentStackPointer() - 1024;
ASAN_UNPOISON(reinterpret_cast<void*>(current_sp),
@ -631,24 +635,7 @@ void Exceptions::JumpToFrame(Thread* thread,
// The shadow call stack register will be restored by the JumpToFrame stub.
#endif
#if defined(USING_THREAD_SANITIZER)
if (thread->exit_through_ffi() == Thread::kExitThroughRuntimeCall) {
thread->exception_pc_ = program_counter;
thread->exception_sp_ = stack_pointer;
thread->exception_fp_ = frame_pointer;
longjmp(*(thread->setjmp_buffer_), 1);
}
#endif // defined(USING_THREAD_SANITIZER)
// Call a stub to set up the exception object in kExceptionObjectReg,
// to set up the stacktrace object in kStackTraceObjectReg, and to
// continue execution at the given pc in the given frame.
typedef void (*ExcpHandler)(uword, uword, uword, Thread*);
ExcpHandler func =
reinterpret_cast<ExcpHandler>(StubCode::JumpToFrame().EntryPoint());
func(program_counter, stack_pointer, frame_pointer, thread);
#endif
UNREACHABLE();
}

View file

@ -9,8 +9,6 @@
#error "Should not include runtime"
#endif
#include <setjmp.h>
#include "include/dart_api.h"
#include "platform/assert.h"
#include "platform/atomic.h"
@ -200,8 +198,6 @@ class Thread;
V(uword, deoptimize_entry_, StubCode::Deoptimize().EntryPoint(), 0) \
V(uword, call_native_through_safepoint_entry_point_, \
StubCode::CallNativeThroughSafepoint().EntryPoint(), 0) \
V(uword, jump_to_frame_entry_point_, StubCode::JumpToFrame().EntryPoint(), \
0) \
V(uword, slow_type_test_entry_point_, StubCode::SlowTypeTest().EntryPoint(), \
0)
@ -445,25 +441,6 @@ class Thread : public ThreadState {
return OFFSET_OF(Thread, double_truncate_round_supported_);
}
#if defined(USING_THREAD_SANITIZER)
uword exit_through_ffi() const { return exit_through_ffi_; }
static intptr_t setjmp_function_offset() {
return OFFSET_OF(Thread, setjmp_function_);
}
static intptr_t setjmp_buffer_offset() {
return OFFSET_OF(Thread, setjmp_buffer_);
}
static intptr_t exception_pc_offset() {
return OFFSET_OF(Thread, exception_pc_);
}
static intptr_t exception_sp_offset() {
return OFFSET_OF(Thread, exception_sp_);
}
static intptr_t exception_fp_offset() {
return OFFSET_OF(Thread, exception_fp_);
}
#endif // defined(USING_THREAD_SANITIZER)
// The isolate that this thread is operating on, or nullptr if none.
Isolate* isolate() const { return isolate_; }
static intptr_t isolate_offset() { return OFFSET_OF(Thread, isolate_); }
@ -1122,14 +1099,6 @@ class Thread : public ThreadState {
ApiLocalScope* api_top_scope_;
uint8_t double_truncate_round_supported_;
#if defined(USING_THREAD_SANITIZER)
void* setjmp_function_ = reinterpret_cast<void*>(&setjmp);
jmp_buf* setjmp_buffer_ = nullptr;
uword exception_pc_ = 0;
uword exception_sp_ = 0;
uword exception_fp_ = 0;
#endif // defined(USING_THREAD_SANITIZER)
// ---- End accessed from generated code. ----
// The layout of Thread object up to this point should not depend
@ -1273,7 +1242,6 @@ class Thread : public ThreadState {
friend Isolate* CreateWithinExistingIsolateGroup(IsolateGroup*,
const char*,
char**);
friend class Exceptions; // for setjmp_buffer_/exception_*
DISALLOW_COPY_AND_ASSIGN(Thread);
};

View file

@ -29,7 +29,7 @@ head -n $(expr $LINE - 1) "$FILE" >"$TEMP_HEADER"
# Run offsets_extractor for every architecture and append the results.
run() {
tools/build.py --mode=$1 --arch=$2 $4 offsets_extractor offsets_extractor_precompiled_runtime
tools/build.py --mode=$1 --arch=$2 offsets_extractor offsets_extractor_precompiled_runtime
echo "" >>"$TEMP_JIT"
out/$3/offsets_extractor >>"$TEMP_JIT"
echo "" >>"$TEMP_AOT"
@ -41,7 +41,6 @@ echo "#if !defined(PRODUCT)" >>"$TEMP_JIT"
echo "#if !defined(PRODUCT)" >>"$TEMP_AOT"
run release simarm ReleaseSIMARM
run release x64 ReleaseX64
run release x64 ReleaseTSANX64 --sanitizer=tsan
run release ia32 ReleaseIA32
run release simarm64 ReleaseSIMARM64
run release x64c ReleaseX64C
@ -52,7 +51,6 @@ echo "#else // !defined(PRODUCT)" >>"$TEMP_JIT"
echo "#else // !defined(PRODUCT)" >>"$TEMP_AOT"
run product simarm ProductSIMARM
run product x64 ProductX64
run product x64 ProductTSANX64 --sanitizer=tsan
run product ia32 ProductIA32
run product simarm64 ProductSIMARM64
run product x64c ProductX64C