[vm, compiler] Rename StoreInstanceFieldInstr to StoreFieldInstr to match LoadFieldInstr and GuardFieldXYZInstr.

TEST=ci
Change-Id: I3161cad413f2d7be2bd8269306a51b5ae6a7384d
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/252780
Commit-Queue: Ryan Macnak <rmacnak@google.com>
Reviewed-by: Alexander Markov <alexmarkov@google.com>
Reviewed-by: Martin Kustermann <kustermann@google.com>
This commit is contained in:
Ryan Macnak 2022-07-27 20:00:49 +00:00 committed by Commit Bot
parent 70e2d3d6e7
commit fb84b13e55
28 changed files with 219 additions and 232 deletions

View file

@ -292,8 +292,7 @@ void ConstantPropagator::VisitStoreIndexedUnsafe(
void ConstantPropagator::VisitStoreIndexed(StoreIndexedInstr* instr) {}
void ConstantPropagator::VisitStoreInstanceField(
StoreInstanceFieldInstr* instr) {}
void ConstantPropagator::VisitStoreField(StoreFieldInstr* instr) {}
void ConstantPropagator::VisitMemoryCopy(MemoryCopyInstr* instr) {}
@ -1710,8 +1709,8 @@ bool ConstantPropagator::TransformDefinition(Definition* defn) {
ASSERT((defn == nullptr) || !defn->IsPushArgument());
if ((defn != nullptr) && IsConstant(defn->constant_value()) &&
(defn->constant_value().IsSmi() || defn->constant_value().IsOld()) &&
!defn->IsConstant() && !defn->IsStoreIndexed() &&
!defn->IsStoreInstanceField() && !defn->IsStoreStaticField()) {
!defn->IsConstant() && !defn->IsStoreIndexed() && !defn->IsStoreField() &&
!defn->IsStoreStaticField()) {
if (FLAG_trace_constant_propagation && graph_->should_print()) {
THR_Print("Constant v%" Pd " = %s\n", defn->ssa_temp_index(),
defn->constant_value().ToCString());

View file

@ -339,12 +339,12 @@ class FlowGraph : public ZoneAllocated {
// to
//
// v2 <- AssertAssignable:<id>(v1, ...)
// StoreInstanceField(v0, v2)
// StoreField(v0, v2)
//
// If the [AssertAssignable] causes a lazy-deopt on return, we'll have to
// *re-try* the implicit setter call in unoptimized mode, i.e. lazy deopt to
// before-call (otherwise - if we continued after-call - the
// StoreInstanceField would not be performed).
// StoreField would not be performed).
void InsertSpeculativeAfter(Instruction* prev,
Instruction* instr,
Environment* env,

View file

@ -1008,7 +1008,7 @@ class FlowGraphCompiler : public ValueObject {
friend class NullErrorSlowPath; // For AddPcRelativeCallStubTarget().
friend class CheckStackOverflowInstr; // For AddPcRelativeCallStubTarget().
friend class StoreIndexedInstr; // For AddPcRelativeCallStubTarget().
friend class StoreInstanceFieldInstr; // For AddPcRelativeCallStubTarget().
friend class StoreFieldInstr; // For AddPcRelativeCallStubTarget().
friend class CheckStackOverflowSlowPath; // For pending_deoptimization_env_.
friend class GraphInstrinsicCodeGenScope; // For optimizing_.

View file

@ -963,17 +963,17 @@ void AllocateTypedDataInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
locs(), deopt_id(), env());
}
bool StoreInstanceFieldInstr::IsUnboxedDartFieldStore() const {
bool StoreFieldInstr::IsUnboxedDartFieldStore() const {
return slot().representation() == kTagged && slot().IsDartField() &&
slot().IsUnboxed();
}
bool StoreInstanceFieldInstr::IsPotentialUnboxedDartFieldStore() const {
bool StoreFieldInstr::IsPotentialUnboxedDartFieldStore() const {
return slot().representation() == kTagged && slot().IsDartField() &&
slot().IsPotentialUnboxed();
}
Representation StoreInstanceFieldInstr::RequiredInputRepresentation(
Representation StoreFieldInstr::RequiredInputRepresentation(
intptr_t index) const {
ASSERT((index == 0) || (index == 1));
if (index == 0) {
@ -986,7 +986,7 @@ Representation StoreInstanceFieldInstr::RequiredInputRepresentation(
return slot().representation();
}
Instruction* StoreInstanceFieldInstr::Canonicalize(FlowGraph* flow_graph) {
Instruction* StoreFieldInstr::Canonicalize(FlowGraph* flow_graph) {
// Dart objects are allocated null-initialized, which means we can eliminate
// all initializing stores which store null value.
// Context objects can be allocated uninitialized as a performance

View file

@ -447,7 +447,7 @@ struct InstrAttrs {
M(LoadIndexed, kNoGC) \
M(LoadCodeUnits, kNoGC) \
M(StoreIndexed, kNoGC) \
M(StoreInstanceField, _) \
M(StoreField, _) \
M(LoadStaticField, _) \
M(StoreStaticField, kNoGC) \
M(BooleanNegate, kNoGC) \
@ -1173,7 +1173,7 @@ class Instruction : public ZoneAllocated {
virtual bool AllowsCSE() const { return false; }
// Returns true if this instruction has any side-effects besides storing.
// See StoreInstanceFieldInstr::HasUnknownSideEffects() for rationale.
// See StoreFieldInstr::HasUnknownSideEffects() for rationale.
virtual bool HasUnknownSideEffects() const = 0;
// Whether this instruction can call Dart code without going through
@ -5452,7 +5452,7 @@ class DebugStepCheckInstr : public TemplateInstruction<0, NoThrow> {
enum StoreBarrierType { kNoStoreBarrier, kEmitStoreBarrier };
// StoreInstanceField instruction represents a store of the given [value] into
// StoreField instruction represents a store of the given [value] into
// the specified [slot] on the [instance] object. [emit_store_barrier] allows to
// specify whether the store should omit the write barrier. [kind] specifies
// whether this store is an initializing store, i.e. the first store into a
@ -5476,12 +5476,12 @@ enum StoreBarrierType { kNoStoreBarrier, kEmitStoreBarrier };
// start of internal typed data array backing) then this instruction cannot be
// moved across instructions which can trigger GC, to ensure that
//
// LoadUntagged + Arithmetic + StoreInstanceField
// LoadUntagged + Arithmetic + StoreField
//
// are performed as an effectively atomic set of instructions.
//
// See kernel_to_il.cc:BuildTypedDataViewFactoryConstructor.
class StoreInstanceFieldInstr : public TemplateInstruction<2, NoThrow> {
class StoreFieldInstr : public TemplateInstruction<2, NoThrow> {
public:
enum class Kind {
// Store is known to be the first store into a slot of an object after
@ -5493,14 +5493,14 @@ class StoreInstanceFieldInstr : public TemplateInstruction<2, NoThrow> {
kOther,
};
StoreInstanceFieldInstr(const Slot& slot,
Value* instance,
Value* value,
StoreBarrierType emit_store_barrier,
const InstructionSource& source,
Kind kind = Kind::kOther,
compiler::Assembler::MemoryOrder memory_order =
compiler::Assembler::kRelaxedNonAtomic)
StoreFieldInstr(const Slot& slot,
Value* instance,
Value* value,
StoreBarrierType emit_store_barrier,
const InstructionSource& source,
Kind kind = Kind::kOther,
compiler::Assembler::MemoryOrder memory_order =
compiler::Assembler::kRelaxedNonAtomic)
: TemplateInstruction(source),
slot_(slot),
emit_store_barrier_(emit_store_barrier),
@ -5512,19 +5512,19 @@ class StoreInstanceFieldInstr : public TemplateInstruction<2, NoThrow> {
}
// Convenience constructor that looks up an IL Slot for the given [field].
StoreInstanceFieldInstr(const Field& field,
Value* instance,
Value* value,
StoreBarrierType emit_store_barrier,
const InstructionSource& source,
const ParsedFunction* parsed_function,
Kind kind = Kind::kOther)
: StoreInstanceFieldInstr(Slot::Get(field, parsed_function),
instance,
value,
emit_store_barrier,
source,
kind) {}
StoreFieldInstr(const Field& field,
Value* instance,
Value* value,
StoreBarrierType emit_store_barrier,
const InstructionSource& source,
const ParsedFunction* parsed_function,
Kind kind = Kind::kOther)
: StoreFieldInstr(Slot::Get(field, parsed_function),
instance,
value,
emit_store_barrier,
source,
kind) {}
virtual SpeculativeMode SpeculativeModeOfInput(intptr_t index) const {
// In AOT unbox is done based on TFA, therefore it was proven to be correct
@ -5535,7 +5535,7 @@ class StoreInstanceFieldInstr : public TemplateInstruction<2, NoThrow> {
: kGuardInputs;
}
DECLARE_INSTRUCTION(StoreInstanceField)
DECLARE_INSTRUCTION(StoreField)
enum { kInstancePos = 0, kValuePos = 1 };
@ -5616,7 +5616,7 @@ class StoreInstanceFieldInstr : public TemplateInstruction<2, NoThrow> {
// Marks initializing stores. E.g. in the constructor.
const bool is_initialization_;
DISALLOW_COPY_AND_ASSIGN(StoreInstanceFieldInstr);
DISALLOW_COPY_AND_ASSIGN(StoreFieldInstr);
};
class GuardFieldInstr : public TemplateInstruction<1, NoThrow, Pure> {

View file

@ -2827,8 +2827,8 @@ void LoadCodeUnitsInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
}
}
LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(Zone* zone,
bool opt) const {
LocationSummary* StoreFieldInstr::MakeLocationSummary(Zone* zone,
bool opt) const {
const intptr_t kNumInputs = 2;
const intptr_t kNumTemps =
((IsUnboxedDartFieldStore() && opt)
@ -2878,7 +2878,7 @@ LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(Zone* zone,
}
static void EnsureMutableBox(FlowGraphCompiler* compiler,
StoreInstanceFieldInstr* instruction,
StoreFieldInstr* instruction,
Register box_reg,
const Class& cls,
Register instance_reg,
@ -2897,7 +2897,7 @@ static void EnsureMutableBox(FlowGraphCompiler* compiler,
__ Bind(&done);
}
void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
void StoreFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
ASSERT(compiler::target::UntaggedObject::kClassIdTagSize == 16);
ASSERT(sizeof(UntaggedField::guarded_cid_) == 2);
ASSERT(sizeof(UntaggedField::is_nullable_) == 2);
@ -2913,7 +2913,7 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
auto const rep = slot().representation();
ASSERT(RepresentationUtils::IsUnboxedInteger(rep));
const size_t value_size = RepresentationUtils::ValueSize(rep);
__ Comment("NativeUnboxedStoreInstanceFieldInstr");
__ Comment("NativeUnboxedStoreFieldInstr");
if (value_size <= compiler::target::kWordSize) {
const Register value = locs()->in(kValuePos).reg();
__ StoreFieldToOffset(value, instance_reg, offset_in_bytes,
@ -2938,17 +2938,17 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
if (FLAG_precompiled_mode) {
switch (cid) {
case kDoubleCid:
__ Comment("UnboxedDoubleStoreInstanceFieldInstr");
__ Comment("UnboxedDoubleStoreFieldInstr");
__ StoreDToOffset(value, instance_reg,
offset_in_bytes - kHeapObjectTag);
return;
case kFloat32x4Cid:
__ Comment("UnboxedFloat32x4StoreInstanceFieldInstr");
__ Comment("UnboxedFloat32x4StoreFieldInstr");
__ StoreMultipleDToOffset(value, 2, instance_reg,
offset_in_bytes - kHeapObjectTag);
return;
case kFloat64x2Cid:
__ Comment("UnboxedFloat64x2StoreInstanceFieldInstr");
__ Comment("UnboxedFloat64x2StoreFieldInstr");
__ StoreMultipleDToOffset(value, 2, instance_reg,
offset_in_bytes - kHeapObjectTag);
return;
@ -2985,19 +2985,19 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
}
switch (cid) {
case kDoubleCid:
__ Comment("UnboxedDoubleStoreInstanceFieldInstr");
__ Comment("UnboxedDoubleStoreFieldInstr");
__ StoreDToOffset(
value, temp,
compiler::target::Double::value_offset() - kHeapObjectTag);
break;
case kFloat32x4Cid:
__ Comment("UnboxedFloat32x4StoreInstanceFieldInstr");
__ Comment("UnboxedFloat32x4StoreFieldInstr");
__ StoreMultipleDToOffset(
value, 2, temp,
compiler::target::Float32x4::value_offset() - kHeapObjectTag);
break;
case kFloat64x2Cid:
__ Comment("UnboxedFloat64x2StoreInstanceFieldInstr");
__ Comment("UnboxedFloat64x2StoreFieldInstr");
__ StoreMultipleDToOffset(
value, 2, temp,
compiler::target::Float64x2::value_offset() - kHeapObjectTag);

View file

@ -2458,7 +2458,7 @@ void GuardFieldLengthInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
}
static void EnsureMutableBox(FlowGraphCompiler* compiler,
StoreInstanceFieldInstr* instruction,
StoreFieldInstr* instruction,
Register box_reg,
const Class& cls,
Register instance_reg,
@ -2475,8 +2475,8 @@ static void EnsureMutableBox(FlowGraphCompiler* compiler,
__ Bind(&done);
}
LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(Zone* zone,
bool opt) const {
LocationSummary* StoreFieldInstr::MakeLocationSummary(Zone* zone,
bool opt) const {
const intptr_t kNumInputs = 2;
const intptr_t kNumTemps = (IsUnboxedDartFieldStore() && opt)
? (FLAG_precompiled_mode ? 0 : 2)
@ -2521,7 +2521,7 @@ LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(Zone* zone,
return summary;
}
void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
void StoreFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
ASSERT(compiler::target::UntaggedObject::kClassIdTagSize == 16);
ASSERT(sizeof(UntaggedField::guarded_cid_) == 2);
ASSERT(sizeof(UntaggedField::is_nullable_) == 2);
@ -2536,7 +2536,7 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
ASSERT(memory_order_ != compiler::AssemblerBase::kRelease);
ASSERT(RepresentationUtils::IsUnboxedInteger(slot().representation()));
const Register value = locs()->in(kValuePos).reg();
__ Comment("NativeUnboxedStoreInstanceFieldInstr");
__ Comment("NativeUnboxedStoreFieldInstr");
__ StoreFieldToOffset(
value, instance_reg, offset_in_bytes,
RepresentationUtils::OperandSize(slot().representation()));
@ -2550,7 +2550,7 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
if (FLAG_precompiled_mode) {
switch (cid) {
case kDoubleCid:
__ Comment("UnboxedDoubleStoreInstanceFieldInstr");
__ Comment("UnboxedDoubleStoreFieldInstr");
if (locs()->in(kValuePos).IsConstant()) {
ASSERT(locs()
->in(kValuePos)
@ -2564,12 +2564,12 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
}
return;
case kFloat32x4Cid:
__ Comment("UnboxedFloat32x4StoreInstanceFieldInstr");
__ Comment("UnboxedFloat32x4StoreFieldInstr");
__ StoreQFieldToOffset(locs()->in(kValuePos).fpu_reg(), instance_reg,
offset_in_bytes);
return;
case kFloat64x2Cid:
__ Comment("UnboxedFloat64x2StoreInstanceFieldInstr");
__ Comment("UnboxedFloat64x2StoreFieldInstr");
__ StoreQFieldToOffset(locs()->in(kValuePos).fpu_reg(), instance_reg,
offset_in_bytes);
return;
@ -2608,15 +2608,15 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
const VRegister value = locs()->in(kValuePos).fpu_reg();
switch (cid) {
case kDoubleCid:
__ Comment("UnboxedDoubleStoreInstanceFieldInstr");
__ Comment("UnboxedDoubleStoreFieldInstr");
__ StoreDFieldToOffset(value, temp, Double::value_offset());
break;
case kFloat32x4Cid:
__ Comment("UnboxedFloat32x4StoreInstanceFieldInstr");
__ Comment("UnboxedFloat32x4StoreFieldInstr");
__ StoreQFieldToOffset(value, temp, Float32x4::value_offset());
break;
case kFloat64x2Cid:
__ Comment("UnboxedFloat64x2StoreInstanceFieldInstr");
__ Comment("UnboxedFloat64x2StoreFieldInstr");
__ StoreQFieldToOffset(value, temp, Float64x2::value_offset());
break;
default:

View file

@ -2120,8 +2120,8 @@ void GuardFieldLengthInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
}
}
LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(Zone* zone,
bool opt) const {
LocationSummary* StoreFieldInstr::MakeLocationSummary(Zone* zone,
bool opt) const {
const intptr_t kNumInputs = 2;
const intptr_t kNumTemps =
(IsUnboxedDartFieldStore() && opt)
@ -2167,7 +2167,7 @@ LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(Zone* zone,
}
static void EnsureMutableBox(FlowGraphCompiler* compiler,
StoreInstanceFieldInstr* instruction,
StoreFieldInstr* instruction,
Register box_reg,
const Class& cls,
Register instance_reg,
@ -2187,7 +2187,7 @@ static void EnsureMutableBox(FlowGraphCompiler* compiler,
__ Bind(&done);
}
void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
void StoreFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
ASSERT(compiler::target::UntaggedObject::kClassIdTagSize == 16);
ASSERT(sizeof(UntaggedField::guarded_cid_) == 2);
ASSERT(sizeof(UntaggedField::is_nullable_) == 2);
@ -2203,7 +2203,7 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
auto const rep = slot().representation();
ASSERT(RepresentationUtils::IsUnboxedInteger(rep));
const size_t value_size = RepresentationUtils::ValueSize(rep);
__ Comment("NativeUnboxedStoreInstanceFieldInstr");
__ Comment("NativeUnboxedStoreFieldInstr");
if (value_size <= compiler::target::kWordSize) {
const Register value = locs()->in(kValuePos).reg();
__ StoreFieldToOffset(value, instance_reg, offset_in_bytes,
@ -2253,16 +2253,16 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
}
switch (cid) {
case kDoubleCid:
__ Comment("UnboxedDoubleStoreInstanceFieldInstr");
__ Comment("UnboxedDoubleStoreFieldInstr");
__ movsd(compiler::FieldAddress(temp, Double::value_offset()), value);
break;
case kFloat32x4Cid:
__ Comment("UnboxedFloat32x4StoreInstanceFieldInstr");
__ Comment("UnboxedFloat32x4StoreFieldInstr");
__ movups(compiler::FieldAddress(temp, Float32x4::value_offset()),
value);
break;
case kFloat64x2Cid:
__ Comment("UnboxedFloat64x2StoreInstanceFieldInstr");
__ Comment("UnboxedFloat64x2StoreFieldInstr");
__ movups(compiler::FieldAddress(temp, Float64x2::value_offset()),
value);
break;

View file

@ -784,7 +784,7 @@ void GuardFieldInstr::PrintOperandsTo(BaseTextBuffer* f) const {
value()->PrintTo(f);
}
void StoreInstanceFieldInstr::PrintOperandsTo(BaseTextBuffer* f) const {
void StoreFieldInstr::PrintOperandsTo(BaseTextBuffer* f) const {
instance()->PrintTo(f);
f->Printf(" . %s = ", slot().Name());
value()->PrintTo(f);

View file

@ -2784,7 +2784,7 @@ void GuardFieldLengthInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
}
static void EnsureMutableBox(FlowGraphCompiler* compiler,
StoreInstanceFieldInstr* instruction,
StoreFieldInstr* instruction,
Register box_reg,
const Class& cls,
Register instance_reg,
@ -2801,8 +2801,8 @@ static void EnsureMutableBox(FlowGraphCompiler* compiler,
__ Bind(&done);
}
LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(Zone* zone,
bool opt) const {
LocationSummary* StoreFieldInstr::MakeLocationSummary(Zone* zone,
bool opt) const {
const intptr_t kNumInputs = 2;
const intptr_t kNumTemps = (IsUnboxedDartFieldStore() && opt)
? (FLAG_precompiled_mode ? 0 : 2)
@ -2859,7 +2859,7 @@ LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(Zone* zone,
return summary;
}
void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
void StoreFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
ASSERT(compiler::target::UntaggedObject::kClassIdTagSize == 16);
ASSERT(sizeof(UntaggedField::guarded_cid_) == 2);
ASSERT(sizeof(UntaggedField::is_nullable_) == 2);
@ -2875,7 +2875,7 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
auto const rep = slot().representation();
ASSERT(RepresentationUtils::IsUnboxedInteger(rep));
const size_t value_size = RepresentationUtils::ValueSize(rep);
__ Comment("NativeUnboxedStoreInstanceFieldInstr");
__ Comment("NativeUnboxedStoreFieldInstr");
if (value_size <= compiler::target::kWordSize) {
const Register value = locs()->in(kValuePos).reg();
__ StoreFieldToOffset(value, instance_reg, offset_in_bytes,
@ -2902,7 +2902,7 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
if (FLAG_precompiled_mode) {
switch (cid) {
case kDoubleCid:
__ Comment("UnboxedDoubleStoreInstanceFieldInstr");
__ Comment("UnboxedDoubleStoreFieldInstr");
#if XLEN >= 64
if (locs()->in(kValuePos).IsConstant()) {
ASSERT(locs()
@ -2918,11 +2918,11 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
offset_in_bytes);
return;
case kFloat32x4Cid:
__ Comment("UnboxedFloat32x4StoreInstanceFieldInstr");
__ Comment("UnboxedFloat32x4StoreFieldInstr");
UNIMPLEMENTED();
return;
case kFloat64x2Cid:
__ Comment("UnboxedFloat64x2StoreInstanceFieldInstr");
__ Comment("UnboxedFloat64x2StoreFieldInstr");
UNIMPLEMENTED();
return;
default:
@ -2960,15 +2960,15 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
const FRegister value = locs()->in(kValuePos).fpu_reg();
switch (cid) {
case kDoubleCid:
__ Comment("UnboxedDoubleStoreInstanceFieldInstr");
__ Comment("UnboxedDoubleStoreFieldInstr");
__ StoreDFieldToOffset(value, temp, Double::value_offset());
break;
case kFloat32x4Cid:
__ Comment("UnboxedFloat32x4StoreInstanceFieldInstr");
__ Comment("UnboxedFloat32x4StoreFieldInstr");
UNIMPLEMENTED();
break;
case kFloat64x2Cid:
__ Comment("UnboxedFloat64x2StoreInstanceFieldInstr");
__ Comment("UnboxedFloat64x2StoreFieldInstr");
UNIMPLEMENTED();
break;
default:

View file

@ -104,7 +104,7 @@ static void ExpectStores(FlowGraph* flow_graph,
!block_it.Done(); block_it.Advance()) {
for (ForwardInstructionIterator it(block_it.Current()); !it.Done();
it.Advance()) {
if (auto store = it.Current()->AsStoreInstanceField()) {
if (auto store = it.Current()->AsStoreField()) {
EXPECT_LT(next_expected_store, expected_stores.size());
EXPECT_STREQ(expected_stores[next_expected_store],
store->slot().Name());

View file

@ -2461,8 +2461,8 @@ void GuardFieldTypeInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
__ Bind(&ok);
}
LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(Zone* zone,
bool opt) const {
LocationSummary* StoreFieldInstr::MakeLocationSummary(Zone* zone,
bool opt) const {
const intptr_t kNumInputs = 2;
const intptr_t kNumTemps = (IsUnboxedDartFieldStore() && opt)
? (FLAG_precompiled_mode ? 0 : 2)
@ -2505,7 +2505,7 @@ LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(Zone* zone,
}
static void EnsureMutableBox(FlowGraphCompiler* compiler,
StoreInstanceFieldInstr* instruction,
StoreFieldInstr* instruction,
Register box_reg,
const Class& cls,
Register instance_reg,
@ -2524,7 +2524,7 @@ static void EnsureMutableBox(FlowGraphCompiler* compiler,
__ Bind(&done);
}
void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
void StoreFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
ASSERT(compiler::target::UntaggedObject::kClassIdTagSize == 16);
ASSERT(sizeof(UntaggedField::guarded_cid_) == 2);
ASSERT(sizeof(UntaggedField::is_nullable_) == 2);
@ -2539,7 +2539,7 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
ASSERT(memory_order_ != compiler::AssemblerBase::kRelease);
ASSERT(RepresentationUtils::IsUnboxedInteger(slot().representation()));
const Register value = locs()->in(kValuePos).reg();
__ Comment("NativeUnboxedStoreInstanceFieldInstr");
__ Comment("NativeUnboxedStoreFieldInstr");
__ StoreFieldToOffset(
value, instance_reg, offset_in_bytes,
RepresentationUtils::OperandSize(slot().representation()));
@ -2555,17 +2555,17 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
if (FLAG_precompiled_mode) {
switch (cid) {
case kDoubleCid:
__ Comment("UnboxedDoubleStoreInstanceFieldInstr");
__ Comment("UnboxedDoubleStoreFieldInstr");
__ movsd(compiler::FieldAddress(instance_reg, offset_in_bytes),
value);
return;
case kFloat32x4Cid:
__ Comment("UnboxedFloat32x4StoreInstanceFieldInstr");
__ Comment("UnboxedFloat32x4StoreFieldInstr");
__ movups(compiler::FieldAddress(instance_reg, offset_in_bytes),
value);
return;
case kFloat64x2Cid:
__ Comment("UnboxedFloat64x2StoreInstanceFieldInstr");
__ Comment("UnboxedFloat64x2StoreFieldInstr");
__ movups(compiler::FieldAddress(instance_reg, offset_in_bytes),
value);
return;
@ -2604,16 +2604,16 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
}
switch (cid) {
case kDoubleCid:
__ Comment("UnboxedDoubleStoreInstanceFieldInstr");
__ Comment("UnboxedDoubleStoreFieldInstr");
__ movsd(compiler::FieldAddress(temp, Double::value_offset()), value);
break;
case kFloat32x4Cid:
__ Comment("UnboxedFloat32x4StoreInstanceFieldInstr");
__ Comment("UnboxedFloat32x4StoreFieldInstr");
__ movups(compiler::FieldAddress(temp, Float32x4::value_offset()),
value);
break;
case kFloat64x2Cid:
__ Comment("UnboxedFloat64x2StoreInstanceFieldInstr");
__ Comment("UnboxedFloat64x2StoreFieldInstr");
__ movups(compiler::FieldAddress(temp, Float64x2::value_offset()),
value);
break;

View file

@ -2938,8 +2938,8 @@ static bool InlineGrowableArraySetter(FlowGraph* flow_graph,
(*entry)->InheritDeoptTarget(Z, call);
// This is an internal method, no need to check argument types.
StoreInstanceFieldInstr* store = new (Z)
StoreInstanceFieldInstr(field, new (Z) Value(array), new (Z) Value(value),
StoreFieldInstr* store =
new (Z) StoreFieldInstr(field, new (Z) Value(array), new (Z) Value(value),
store_barrier_type, call->source());
flow_graph->AppendTo(*entry, store, call->env(), FlowGraph::kEffect);
*last = store;

View file

@ -3209,7 +3209,7 @@ void FlowGraphAllocator::RemoveFrameIfNotNeeded() {
// sense to make function frameless if it contains more than 1
// write barrier invocation.
#if defined(TARGET_ARCH_ARM64) || defined(TARGET_ARCH_ARM)
if (auto store_field = instruction->AsStoreInstanceField()) {
if (auto store_field = instruction->AsStoreField()) {
if (store_field->ShouldEmitStoreBarrier()) {
if (has_write_barrier_call) {
// We already have at least one write barrier call.

View file

@ -229,10 +229,10 @@ class Place : public ValueObject {
break;
}
case Instruction::kStoreInstanceField: {
StoreInstanceFieldInstr* store = instr->AsStoreInstanceField();
set_representation(store->RequiredInputRepresentation(
StoreInstanceFieldInstr::kValuePos));
case Instruction::kStoreField: {
StoreFieldInstr* store = instr->AsStoreField();
set_representation(
store->RequiredInputRepresentation(StoreFieldInstr::kValuePos));
instance_ = store->instance()->definition()->OriginalDefinition();
set_kind(kInstanceField);
instance_field_ = &store->slot();
@ -1120,13 +1120,12 @@ class AliasedSet : public ZoneAllocated {
} else if (UseIsARedefinition(use) &&
AnyUseCreatesAlias(instr->Cast<Definition>())) {
return true;
} else if ((instr->IsStoreInstanceField() &&
(use->use_index() !=
StoreInstanceFieldInstr::kInstancePos))) {
ASSERT(use->use_index() == StoreInstanceFieldInstr::kValuePos);
} else if ((instr->IsStoreField() &&
(use->use_index() != StoreFieldInstr::kInstancePos))) {
ASSERT(use->use_index() == StoreFieldInstr::kValuePos);
// If we store this value into an object that is not aliased itself
// and we never load again then the store does not create an alias.
StoreInstanceFieldInstr* store = instr->AsStoreInstanceField();
StoreFieldInstr* store = instr->AsStoreField();
Definition* instance =
store->instance()->definition()->OriginalDefinition();
if (Place::IsAllocation(instance) &&
@ -1148,7 +1147,7 @@ class AliasedSet : public ZoneAllocated {
return true;
} else if (auto* const alloc = instr->AsAllocation()) {
// Treat inputs to an allocation instruction exactly as if they were
// manually stored using a StoreInstanceField instruction.
// manually stored using a StoreField instruction.
if (alloc->Identity().IsAliased()) {
return true;
}
@ -1194,10 +1193,9 @@ class AliasedSet : public ZoneAllocated {
MarkStoredValuesEscaping(instr->AsDefinition());
continue;
}
if ((use->use_index() == StoreInstanceFieldInstr::kInstancePos) &&
instr->IsStoreInstanceField()) {
MarkDefinitionAsAliased(
instr->AsStoreInstanceField()->value()->definition());
if ((use->use_index() == StoreFieldInstr::kInstancePos) &&
instr->IsStoreField()) {
MarkDefinitionAsAliased(instr->AsStoreField()->value()->definition());
}
}
}
@ -1274,7 +1272,7 @@ static Definition* GetStoredValue(Instruction* instr) {
return instr->AsStoreIndexed()->value()->definition();
}
StoreInstanceFieldInstr* store_instance_field = instr->AsStoreInstanceField();
StoreFieldInstr* store_instance_field = instr->AsStoreField();
if (store_instance_field != NULL) {
return store_instance_field->value()->definition();
}
@ -1538,7 +1536,7 @@ void LICM::OptimisticallySpecializeSmiPhis() {
// Returns true if instruction may have a "visible" effect,
static bool MayHaveVisibleEffect(Instruction* instr) {
switch (instr->tag()) {
case Instruction::kStoreInstanceField:
case Instruction::kStoreField:
case Instruction::kStoreStaticField:
case Instruction::kStoreIndexed:
case Instruction::kStoreIndexedUnsafe:
@ -1857,7 +1855,7 @@ class LoadOptimizer : public ValueObject {
// Such a store doesn't initialize corresponding field.
bool IsSentinelStore(Instruction* instr) {
Value* value = nullptr;
if (auto* store_field = instr->AsStoreInstanceField()) {
if (auto* store_field = instr->AsStoreField()) {
value = store_field->value();
} else if (auto* store_static = instr->AsStoreStaticField()) {
value = store_static->value();
@ -2200,8 +2198,7 @@ class LoadOptimizer : public ValueObject {
if (auto* const load = use->instruction()->AsLoadField()) {
place_id = GetPlaceId(load);
slot = &load->slot();
} else if (auto* const store =
use->instruction()->AsStoreInstanceField()) {
} else if (auto* const store = use->instruction()->AsStoreField()) {
ASSERT(!alloc->IsArrayAllocation());
place_id = GetPlaceId(store);
slot = &store->slot();
@ -3042,8 +3039,8 @@ class StoreOptimizer : public LivenessAnalysis {
bool CanEliminateStore(Instruction* instr) {
switch (instr->tag()) {
case Instruction::kStoreInstanceField: {
StoreInstanceFieldInstr* store_instance = instr->AsStoreInstanceField();
case Instruction::kStoreField: {
StoreFieldInstr* store_instance = instr->AsStoreField();
// Can't eliminate stores that initialize fields.
return !store_instance->is_initialization();
}
@ -3261,7 +3258,7 @@ static bool IsSafeUse(Value* use, SafeUseCheck check_type) {
alloc->Identity().IsAllocationSinkingCandidate());
}
if (auto* store = use->instruction()->AsStoreInstanceField()) {
if (auto* store = use->instruction()->AsStoreField()) {
if (use == store->value()) {
Definition* instance = store->instance()->definition();
return IsSupportedAllocation(instance) &&
@ -3307,7 +3304,7 @@ static bool IsSafeUse(Value* use, SafeUseCheck check_type) {
}
// Right now we are attempting to sink allocation only into
// deoptimization exit. So candidate should only be used in StoreInstanceField
// deoptimization exit. So candidate should only be used in StoreField
// instructions that write into fields of the allocated object.
static bool IsAllocationSinkingCandidate(Definition* alloc,
SafeUseCheck check_type) {
@ -3331,7 +3328,7 @@ static Definition* StoreDestination(Value* use) {
if (auto* const alloc = use->instruction()->AsAllocation()) {
return alloc;
}
if (auto* const store = use->instruction()->AsStoreInstanceField()) {
if (auto* const store = use->instruction()->AsStoreField()) {
return store->instance()->definition();
}
if (auto* const store = use->instruction()->AsStoreIndexed()) {
@ -3365,7 +3362,7 @@ void AllocationSinking::EliminateAllocation(Definition* alloc) {
// As an allocation sinking candidate, remove stores to this candidate.
// Do this in a two-step process, as this allocation may be used multiple
// times in a single instruction (e.g., as the instance and the value in
// a StoreInstanceField). This means multiple entries may be removed from the
// a StoreField). This means multiple entries may be removed from the
// use list when removing instructions, not just the current one, so
// Value::Iterator cannot be safely used.
GrowableArray<Instruction*> stores_to_remove;
@ -3564,7 +3561,7 @@ void AllocationSinking::DiscoverFailedCandidates() {
} else {
ASSERT(use->instruction()->IsMaterializeObject() ||
use->instruction()->IsPhi() ||
use->instruction()->IsStoreInstanceField() ||
use->instruction()->IsStoreField() ||
use->instruction()->IsStoreIndexed());
}
}
@ -3884,7 +3881,7 @@ void AllocationSinking::InsertMaterializations(Definition* alloc) {
if (StoreDestination(use) == alloc) {
// Allocation instructions cannot be used in as inputs to themselves.
ASSERT(!use->instruction()->AsAllocation());
if (auto store = use->instruction()->AsStoreInstanceField()) {
if (auto store = use->instruction()->AsStoreField()) {
AddSlot(slots, store->slot());
} else if (auto store = use->instruction()->AsStoreIndexed()) {
const intptr_t index = store->index()->BoundSmiConstant();

View file

@ -446,8 +446,8 @@ static void TestAliasingViaStore(
new AllocateObjectInstr(InstructionSource(), cls, S.GetNextDeoptId()));
if (!make_host_escape) {
builder.AddInstruction(
new StoreInstanceFieldInstr(slot, new Value(v5), new Value(v0),
kEmitStoreBarrier, InstructionSource()));
new StoreFieldInstr(slot, new Value(v5), new Value(v0),
kEmitStoreBarrier, InstructionSource()));
}
v1 = builder.AddDefinition(
new LoadFieldInstr(new Value(v0), slot, InstructionSource()));
@ -460,8 +460,8 @@ static void TestAliasingViaStore(
args->Add(new Value(v6));
} else if (make_host_escape) {
builder.AddInstruction(
new StoreInstanceFieldInstr(slot, new Value(v2), new Value(v0),
kEmitStoreBarrier, InstructionSource()));
new StoreFieldInstr(slot, new Value(v2), new Value(v0),
kEmitStoreBarrier, InstructionSource()));
args->Add(new Value(v5));
}
call = builder.AddInstruction(new StaticCallInstr(
@ -857,7 +857,7 @@ static void CountLoadsStores(FlowGraph* flow_graph,
it.Advance()) {
if (it.Current()->IsLoadField()) {
(*loads)++;
} else if (it.Current()->IsStoreInstanceField()) {
} else if (it.Current()->IsStoreField()) {
(*stores)++;
}
}
@ -1275,8 +1275,8 @@ ISOLATE_UNIT_TEST_CASE(DelayAllocations_DelayAcrossCalls) {
StaticCallInstr* call1;
StaticCallInstr* call2;
AllocateObjectInstr* allocate;
StoreInstanceFieldInstr* store1;
StoreInstanceFieldInstr* store2;
StoreFieldInstr* store1;
StoreFieldInstr* store2;
ILMatcher cursor(flow_graph, entry, true, ParallelMovesHandling::kSkip);
RELEASE_ASSERT(cursor.TryMatch({
@ -1286,8 +1286,8 @@ ISOLATE_UNIT_TEST_CASE(DelayAllocations_DelayAcrossCalls) {
{kMatchAndMoveStaticCall, &call2},
kMoveGlob,
{kMatchAndMoveAllocateObject, &allocate},
{kMatchAndMoveStoreInstanceField, &store1},
{kMatchAndMoveStoreInstanceField, &store2},
{kMatchAndMoveStoreField, &store1},
{kMatchAndMoveStoreField, &store2},
}));
EXPECT(strcmp(call1->function().UserVisibleNameCString(), "foo") == 0);

View file

@ -912,10 +912,10 @@ bool CallSpecializer::TryInlineInstanceSetter(InstanceCallInstr* instr) {
// Field guard was detached.
ASSERT(instr->FirstArgIndex() == 0);
StoreInstanceFieldInstr* store = new (Z) StoreInstanceFieldInstr(
field, new (Z) Value(instr->ArgumentAt(0)),
new (Z) Value(instr->ArgumentAt(1)), kEmitStoreBarrier, instr->source(),
&flow_graph()->parsed_function());
StoreFieldInstr* store = new (Z)
StoreFieldInstr(field, new (Z) Value(instr->ArgumentAt(0)),
new (Z) Value(instr->ArgumentAt(1)), kEmitStoreBarrier,
instr->source(), &flow_graph()->parsed_function());
// Discard the environment from the original instruction because the store
// can't deoptimize.

View file

@ -517,34 +517,31 @@ const Field& BaseFlowGraphBuilder::MayCloneField(Zone* zone,
Fragment BaseFlowGraphBuilder::StoreNativeField(
TokenPosition position,
const Slot& slot,
StoreInstanceFieldInstr::Kind
kind /* = StoreInstanceFieldInstr::Kind::kOther */,
StoreFieldInstr::Kind kind /* = StoreFieldInstr::Kind::kOther */,
StoreBarrierType emit_store_barrier /* = kEmitStoreBarrier */,
compiler::Assembler::MemoryOrder memory_order /* = kRelaxed */) {
Value* value = Pop();
if (value->BindsToConstant()) {
emit_store_barrier = kNoStoreBarrier;
}
StoreInstanceFieldInstr* store =
new (Z) StoreInstanceFieldInstr(slot, Pop(), value, emit_store_barrier,
InstructionSource(position), kind);
StoreFieldInstr* store =
new (Z) StoreFieldInstr(slot, Pop(), value, emit_store_barrier,
InstructionSource(position), kind);
return Fragment(store);
}
Fragment BaseFlowGraphBuilder::StoreInstanceField(
Fragment BaseFlowGraphBuilder::StoreField(
const Field& field,
StoreInstanceFieldInstr::Kind
kind /* = StoreInstanceFieldInstr::Kind::kOther */,
StoreFieldInstr::Kind kind /* = StoreFieldInstr::Kind::kOther */,
StoreBarrierType emit_store_barrier) {
return StoreNativeField(TokenPosition::kNoSource,
Slot::Get(MayCloneField(Z, field), parsed_function_),
kind, emit_store_barrier);
}
Fragment BaseFlowGraphBuilder::StoreInstanceFieldGuarded(
Fragment BaseFlowGraphBuilder::StoreFieldGuarded(
const Field& field,
StoreInstanceFieldInstr::Kind
kind /* = StoreInstanceFieldInstr::Kind::kOther */) {
StoreFieldInstr::Kind kind /* = StoreFieldInstr::Kind::kOther */) {
Fragment instructions;
const Field& field_clone = MayCloneField(Z, field);
if (IG->use_field_guards()) {

View file

@ -201,29 +201,26 @@ class BaseFlowGraphBuilder {
Fragment StoreNativeField(
TokenPosition position,
const Slot& slot,
StoreInstanceFieldInstr::Kind kind =
StoreInstanceFieldInstr::Kind::kOther,
StoreFieldInstr::Kind kind = StoreFieldInstr::Kind::kOther,
StoreBarrierType emit_store_barrier = kEmitStoreBarrier,
compiler::Assembler::MemoryOrder memory_order =
compiler::Assembler::kRelaxedNonAtomic);
Fragment StoreNativeField(
const Slot& slot,
StoreInstanceFieldInstr::Kind kind =
StoreInstanceFieldInstr::Kind::kOther,
StoreFieldInstr::Kind kind = StoreFieldInstr::Kind::kOther,
StoreBarrierType emit_store_barrier = kEmitStoreBarrier,
compiler::Assembler::MemoryOrder memory_order =
compiler::Assembler::kRelaxedNonAtomic) {
return StoreNativeField(TokenPosition::kNoSource, slot, kind,
emit_store_barrier, memory_order);
}
Fragment StoreInstanceField(
Fragment StoreField(
const Field& field,
StoreInstanceFieldInstr::Kind kind =
StoreInstanceFieldInstr::Kind::kOther,
StoreFieldInstr::Kind kind = StoreFieldInstr::Kind::kOther,
StoreBarrierType emit_store_barrier = kEmitStoreBarrier);
Fragment StoreInstanceFieldGuarded(const Field& field,
StoreInstanceFieldInstr::Kind kind =
StoreInstanceFieldInstr::Kind::kOther);
Fragment StoreFieldGuarded(
const Field& field,
StoreFieldInstr::Kind kind = StoreFieldInstr::Kind::kOther);
Fragment LoadStaticField(const Field& field, bool calls_initializer);
Fragment RedefinitionWithType(const AbstractType& type);
Fragment ReachabilityFence();

View file

@ -173,8 +173,8 @@ Fragment StreamingFlowGraphBuilder::BuildFieldInitializer(
if (only_for_side_effects) {
instructions += Drop();
} else {
instructions += flow_graph_builder_->StoreInstanceFieldGuarded(
field, StoreInstanceFieldInstr::Kind::kInitializing);
instructions += flow_graph_builder_->StoreFieldGuarded(
field, StoreFieldInstr::Kind::kInitializing);
}
return instructions;
}
@ -196,8 +196,8 @@ Fragment StreamingFlowGraphBuilder::BuildLateFieldInitializer(
Fragment instructions;
instructions += LoadLocal(parsed_function()->receiver_var());
instructions += flow_graph_builder_->Constant(Object::sentinel());
instructions += flow_graph_builder_->StoreInstanceField(
field, StoreInstanceFieldInstr::Kind::kInitializing);
instructions += flow_graph_builder_->StoreField(
field, StoreFieldInstr::Kind::kInitializing);
return instructions;
}
@ -260,7 +260,7 @@ Fragment StreamingFlowGraphBuilder::BuildInitializers(
if (!is_redirecting_constructor) {
// Sort list of fields (represented as their kernel offsets) which will
// be initialized by the constructor initializer list. We will not emit
// StoreInstanceField instructions for those initializers though we will
// StoreField instructions for those initializers though we will
// still evaluate initialization expression for its side effects.
GrowableArray<intptr_t> constructor_initialized_field_offsets(
initializer_fields.length());
@ -548,7 +548,7 @@ Fragment StreamingFlowGraphBuilder::SetupCapturedParameters(
body += LoadLocal(&raw_parameter);
body += flow_graph_builder_->StoreNativeField(
Slot::GetContextVariableSlotFor(thread(), *variable),
StoreInstanceFieldInstr::Kind::kInitializing);
StoreFieldInstr::Kind::kInitializing);
}
}
body += Drop(); // The context.
@ -4196,7 +4196,7 @@ Fragment StreamingFlowGraphBuilder::BuildPartialTearoffInstantiation(
instructions += LoadLocal(type_args_vec);
instructions += flow_graph_builder_->StoreNativeField(
Slot::Closure_delayed_type_arguments(),
StoreInstanceFieldInstr::Kind::kInitializing);
StoreFieldInstr::Kind::kInitializing);
instructions += DropTemporary(&type_args_vec);
// Copy over the instantiator type arguments.
@ -4206,7 +4206,7 @@ Fragment StreamingFlowGraphBuilder::BuildPartialTearoffInstantiation(
Slot::Closure_instantiator_type_arguments());
instructions += flow_graph_builder_->StoreNativeField(
Slot::Closure_instantiator_type_arguments(),
StoreInstanceFieldInstr::Kind::kInitializing);
StoreFieldInstr::Kind::kInitializing);
// Copy over the function type arguments.
instructions += LoadLocal(new_closure);
@ -4215,7 +4215,7 @@ Fragment StreamingFlowGraphBuilder::BuildPartialTearoffInstantiation(
Slot::Closure_function_type_arguments());
instructions += flow_graph_builder_->StoreNativeField(
Slot::Closure_function_type_arguments(),
StoreInstanceFieldInstr::Kind::kInitializing);
StoreFieldInstr::Kind::kInitializing);
instructions += DropTempsPreserveTop(1); // Drop old closure.
@ -5258,7 +5258,7 @@ Fragment StreamingFlowGraphBuilder::BuildYieldStatement(
} else {
field = IG->object_store()->sync_star_iterator_current();
}
instructions += B->StoreInstanceFieldGuarded(field);
instructions += B->StoreFieldGuarded(field);
instructions += B->Constant(Bool::True());
instructions +=
B->Suspend(pos, SuspendInstr::StubId::kSuspendSyncStarAtYield);
@ -5506,7 +5506,7 @@ Fragment StreamingFlowGraphBuilder::BuildFunctionNode(
instructions += LoadInstantiatorTypeArguments();
instructions += flow_graph_builder_->StoreNativeField(
Slot::Closure_instantiator_type_arguments(),
StoreInstanceFieldInstr::Kind::kInitializing);
StoreFieldInstr::Kind::kInitializing);
}
// TODO(30455): We only need to save these if the closure uses any captured
@ -5515,7 +5515,7 @@ Fragment StreamingFlowGraphBuilder::BuildFunctionNode(
instructions += LoadFunctionTypeArguments();
instructions += flow_graph_builder_->StoreNativeField(
Slot::Closure_function_type_arguments(),
StoreInstanceFieldInstr::Kind::kInitializing);
StoreFieldInstr::Kind::kInitializing);
if (function.IsGeneric()) {
// Only generic functions need to have properly initialized
@ -5524,7 +5524,7 @@ Fragment StreamingFlowGraphBuilder::BuildFunctionNode(
instructions += Constant(Object::empty_type_arguments());
instructions += flow_graph_builder_->StoreNativeField(
Slot::Closure_delayed_type_arguments(),
StoreInstanceFieldInstr::Kind::kInitializing);
StoreFieldInstr::Kind::kInitializing);
}
return instructions;

View file

@ -138,8 +138,8 @@ Fragment FlowGraphBuilder::PushContext(const LocalScope* scope) {
LocalVariable* context = MakeTemporary();
instructions += LoadLocal(context);
instructions += LoadLocal(parsed_function_->current_context_var());
instructions += StoreNativeField(
Slot::Context_parent(), StoreInstanceFieldInstr::Kind::kInitializing);
instructions += StoreNativeField(Slot::Context_parent(),
StoreFieldInstr::Kind::kInitializing);
instructions += StoreLocal(TokenPosition::kNoSource,
parsed_function_->current_context_var());
++context_depth_;
@ -553,7 +553,7 @@ Fragment FlowGraphBuilder::StoreLateField(const Field& field,
if (is_static) {
instructions += StoreStaticField(position, field);
} else {
instructions += StoreInstanceFieldGuarded(field);
instructions += StoreFieldGuarded(field);
}
return instructions;
@ -1212,12 +1212,12 @@ FlowGraph* FlowGraphBuilder::BuildGraphOfRecognizedMethod(
body += LoadLocal(object);
body += LoadLocal(parsed_function_->RawParameterVariable(1));
body += StoreNativeField(Slot::GrowableObjectArray_data(),
StoreInstanceFieldInstr::Kind::kInitializing,
StoreFieldInstr::Kind::kInitializing,
kNoStoreBarrier);
body += LoadLocal(object);
body += IntConstant(0);
body += StoreNativeField(Slot::GrowableObjectArray_length(),
StoreInstanceFieldInstr::Kind::kInitializing,
StoreFieldInstr::Kind::kInitializing,
kNoStoreBarrier);
break;
}
@ -1312,10 +1312,9 @@ FlowGraph* FlowGraphBuilder::BuildGraphOfRecognizedMethod(
body += LoadLocal(parsed_function_->RawParameterVariable(1));
// Uses a store-release barrier so that other isolates will see the
// contents of the index after seeing the index itself.
body +=
StoreNativeField(Slot::ImmutableLinkedHashBase_index(),
StoreInstanceFieldInstr::Kind::kOther,
kEmitStoreBarrier, compiler::Assembler::kRelease);
body += StoreNativeField(Slot::ImmutableLinkedHashBase_index(),
StoreFieldInstr::Kind::kOther, kEmitStoreBarrier,
compiler::Assembler::kRelease);
body += NullConstant();
break;
case MethodRecognizer::kUtf8DecoderScan:
@ -1580,7 +1579,7 @@ FlowGraph* FlowGraphBuilder::BuildGraphOfRecognizedMethod(
body += LoadLocal(typed_data_object);
body += LoadLocal(arg_length);
body += StoreNativeField(Slot::TypedDataBase_length(),
StoreInstanceFieldInstr::Kind::kInitializing,
StoreFieldInstr::Kind::kInitializing,
kNoStoreBarrier);
// Initialize the result's data pointer field.
@ -1589,7 +1588,7 @@ FlowGraph* FlowGraphBuilder::BuildGraphOfRecognizedMethod(
body += LoadUntagged(compiler::target::PointerBase::data_offset());
body += ConvertUntaggedToUnboxed(kUnboxedIntPtr);
body += StoreNativeField(Slot::PointerBase_data(),
StoreInstanceFieldInstr::Kind::kInitializing,
StoreFieldInstr::Kind::kInitializing,
kNoStoreBarrier);
} break;
case MethodRecognizer::kGetNativeField: {
@ -1738,8 +1737,8 @@ FlowGraph* FlowGraphBuilder::BuildGraphOfRecognizedMethod(
ASSERT_EQUAL(function.NumParameters(), 2); \
body += LoadLocal(parsed_function_->RawParameterVariable(0)); \
body += LoadLocal(parsed_function_->RawParameterVariable(1)); \
body += StoreNativeField( \
Slot::slot(), StoreInstanceFieldInstr::Kind::kOther, kNoStoreBarrier); \
body += StoreNativeField(Slot::slot(), StoreFieldInstr::Kind::kOther, \
kNoStoreBarrier); \
body += NullConstant(); \
break;
STORE_NATIVE_FIELD_NO_BARRIER(IL_BODY)
@ -1781,19 +1780,19 @@ Fragment FlowGraphBuilder::BuildTypedDataViewFactoryConstructor(
body += LoadLocal(view_object);
body += LoadLocal(typed_data);
body += StoreNativeField(token_pos, Slot::TypedDataView_typed_data(),
StoreInstanceFieldInstr::Kind::kInitializing);
StoreFieldInstr::Kind::kInitializing);
body += LoadLocal(view_object);
body += LoadLocal(offset_in_bytes);
body += StoreNativeField(token_pos, Slot::TypedDataView_offset_in_bytes(),
StoreInstanceFieldInstr::Kind::kInitializing,
kNoStoreBarrier);
body +=
StoreNativeField(token_pos, Slot::TypedDataView_offset_in_bytes(),
StoreFieldInstr::Kind::kInitializing, kNoStoreBarrier);
body += LoadLocal(view_object);
body += LoadLocal(length);
body += StoreNativeField(token_pos, Slot::TypedDataBase_length(),
StoreInstanceFieldInstr::Kind::kInitializing,
kNoStoreBarrier);
body +=
StoreNativeField(token_pos, Slot::TypedDataBase_length(),
StoreFieldInstr::Kind::kInitializing, kNoStoreBarrier);
// Update the inner pointer.
//
@ -1870,7 +1869,7 @@ Fragment FlowGraphBuilder::BuildImplicitClosureCreation(
fragment += StoreNativeField(
Slot::GetContextVariableSlotFor(
thread_, *implicit_closure_scope->context_variables()[0]),
StoreInstanceFieldInstr::Kind::kInitializing);
StoreFieldInstr::Kind::kInitializing);
fragment += AllocateClosure();
LocalVariable* closure = MakeTemporary();
@ -1880,7 +1879,7 @@ Fragment FlowGraphBuilder::BuildImplicitClosureCreation(
fragment += LoadLocal(closure);
fragment += LoadInstantiatorTypeArguments();
fragment += StoreNativeField(Slot::Closure_instantiator_type_arguments(),
StoreInstanceFieldInstr::Kind::kInitializing);
StoreFieldInstr::Kind::kInitializing);
}
if (target.IsGeneric()) {
@ -1889,7 +1888,7 @@ Fragment FlowGraphBuilder::BuildImplicitClosureCreation(
fragment += LoadLocal(closure);
fragment += Constant(Object::empty_type_arguments());
fragment += StoreNativeField(Slot::Closure_delayed_type_arguments(),
StoreInstanceFieldInstr::Kind::kInitializing);
StoreFieldInstr::Kind::kInitializing);
}
return fragment;
@ -3801,8 +3800,7 @@ FlowGraph* FlowGraphBuilder::BuildGraphOfFieldAccessor(
setter_value);
} else {
if (is_method) {
body += StoreInstanceFieldGuarded(
field, StoreInstanceFieldInstr::Kind::kOther);
body += StoreFieldGuarded(field, StoreFieldInstr::Kind::kOther);
} else {
body += StoreStaticField(TokenPosition::kNoSource, field);
}
@ -4126,15 +4124,15 @@ Fragment FlowGraphBuilder::UnhandledException() {
body += LoadLocal(error_instance);
body += LoadLocal(CurrentException());
body += StoreNativeField(Slot::UnhandledException_exception(),
StoreInstanceFieldInstr::Kind::kInitializing,
kNoStoreBarrier);
body +=
StoreNativeField(Slot::UnhandledException_exception(),
StoreFieldInstr::Kind::kInitializing, kNoStoreBarrier);
body += LoadLocal(error_instance);
body += LoadLocal(CurrentStackTrace());
body += StoreNativeField(Slot::UnhandledException_stacktrace(),
StoreInstanceFieldInstr::Kind::kInitializing,
kNoStoreBarrier);
body +=
StoreNativeField(Slot::UnhandledException_stacktrace(),
StoreFieldInstr::Kind::kInitializing, kNoStoreBarrier);
return body;
}
@ -4289,8 +4287,8 @@ Fragment FlowGraphBuilder::WrapTypedDataBaseInCompound(
body += AllocateObject(TokenPosition::kNoSource, compound_sub_class, 0);
body += LoadLocal(MakeTemporary("compound")); // Duplicate Struct or Union.
body += LoadLocal(typed_data);
body += StoreInstanceField(compound_typed_data_base,
StoreInstanceFieldInstr::Kind::kInitializing);
body += StoreField(compound_typed_data_base,
StoreFieldInstr::Kind::kInitializing);
body += DropTempsPreserveTop(1); // Drop TypedData.
return body;
}

View file

@ -872,7 +872,7 @@ bool GraphIntrinsifier::Build_GrowableArraySetData(FlowGraph* flow_graph) {
builder.AddInstruction(new CheckClassInstr(new Value(data), DeoptId::kNone,
*value_check, builder.Source()));
builder.AddInstruction(new StoreInstanceFieldInstr(
builder.AddInstruction(new StoreFieldInstr(
Slot::GrowableObjectArray_data(), new Value(growable_array),
new Value(data), kEmitStoreBarrier, builder.Source()));
// Return null.
@ -894,7 +894,7 @@ bool GraphIntrinsifier::Build_GrowableArraySetLength(FlowGraph* flow_graph) {
builder.AddInstruction(
new CheckSmiInstr(new Value(length), DeoptId::kNone, builder.Source()));
builder.AddInstruction(new StoreInstanceFieldInstr(
builder.AddInstruction(new StoreFieldInstr(
Slot::GrowableObjectArray_length(), new Value(growable_array),
new Value(length), kNoStoreBarrier, builder.Source()));
Definition* null_def = builder.AddNullDefinition();
@ -1105,7 +1105,7 @@ bool GraphIntrinsifier::Build_ImplicitSetter(FlowGraph* flow_graph) {
/*is_checked=*/true);
}
builder.AddInstruction(new (zone) StoreInstanceFieldInstr(
builder.AddInstruction(new (zone) StoreFieldInstr(
slot, new (zone) Value(receiver), new (zone) Value(value), barrier_mode,
builder.Source()));

View file

@ -190,10 +190,9 @@ void JitCallSpecializer::LowerContextAllocation(
} else {
initial_value = new (Z) Value(flow_graph()->constant_null());
}
StoreInstanceFieldInstr* store = new (Z) StoreInstanceFieldInstr(
StoreFieldInstr* store = new (Z) StoreFieldInstr(
Slot::Context_parent(), new (Z) Value(replacement), initial_value,
kNoStoreBarrier, alloc->source(),
StoreInstanceFieldInstr::Kind::kInitializing);
kNoStoreBarrier, alloc->source(), StoreFieldInstr::Kind::kInitializing);
flow_graph()->InsertAfter(cursor, store, nullptr, FlowGraph::kEffect);
cursor = replacement;
@ -208,9 +207,9 @@ void JitCallSpecializer::LowerContextAllocation(
initial_value = new (Z) Value(flow_graph()->constant_null());
}
store = new (Z) StoreInstanceFieldInstr(
store = new (Z) StoreFieldInstr(
*slot, new (Z) Value(replacement), initial_value, kNoStoreBarrier,
alloc->source(), StoreInstanceFieldInstr::Kind::kInitializing);
alloc->source(), StoreFieldInstr::Kind::kInitializing);
flow_graph()->InsertAfter(cursor, store, nullptr, FlowGraph::kEffect);
cursor = store;
}

View file

@ -44,7 +44,7 @@ typedef DirectChainedHashMap<DefinitionIndexPairTrait> DefinitionIndexMap;
// This optimization removes write barriers from some store instructions under
// certain assumptions which the runtime is responsible to sustain.
//
// We can skip a write barrier on a StoreInstanceField to a container object X
// We can skip a write barrier on a StoreField to a container object X
// if we know that either:
// - X is in new-space, or
// - X is in old-space, and:
@ -385,7 +385,7 @@ void WriteBarrierElimination::UpdateVectorForBlock(BlockEntryInstr* entry,
Instruction* const current = it.Current();
if (finalize) {
if (StoreInstanceFieldInstr* instr = current->AsStoreInstanceField()) {
if (StoreFieldInstr* instr = current->AsStoreField()) {
Definition* const container = instr->instance()->definition();
if (IsUsable(container) && vector_->Contains(Index(container))) {
DEBUG_ASSERT(SlotEligibleForWBE(instr->slot()));

View file

@ -63,8 +63,8 @@ ISOLATE_UNIT_TEST_CASE(IRTest_WriteBarrierElimination_JoinSuccessors) {
auto entry = flow_graph->graph_entry()->normal_entry();
EXPECT(entry != nullptr);
StoreInstanceFieldInstr* store1 = nullptr;
StoreInstanceFieldInstr* store2 = nullptr;
StoreFieldInstr* store1 = nullptr;
StoreFieldInstr* store2 = nullptr;
ILMatcher cursor(flow_graph, entry);
RELEASE_ASSERT(cursor.TryMatch({
@ -73,9 +73,9 @@ ISOLATE_UNIT_TEST_CASE(IRTest_WriteBarrierElimination_JoinSuccessors) {
kMoveGlob,
kMatchAndMoveBranchTrue,
kMoveGlob,
{kMatchAndMoveStoreInstanceField, &store1},
{kMatchAndMoveStoreField, &store1},
kMoveGlob,
{kMatchAndMoveStoreInstanceField, &store2},
{kMatchAndMoveStoreField, &store2},
}));
EXPECT(store1->ShouldEmitStoreBarrier() == false);
@ -123,7 +123,7 @@ ISOLATE_UNIT_TEST_CASE(IRTest_WriteBarrierElimination_AtLeastOnce) {
auto entry = flow_graph->graph_entry()->normal_entry();
EXPECT(entry != nullptr);
StoreInstanceFieldInstr* store = nullptr;
StoreFieldInstr* store = nullptr;
ILMatcher cursor(flow_graph, entry);
RELEASE_ASSERT(cursor.TryMatch({
@ -132,7 +132,7 @@ ISOLATE_UNIT_TEST_CASE(IRTest_WriteBarrierElimination_AtLeastOnce) {
kMoveGlob,
kMatchAndMoveGoto,
kMoveGlob,
{kMatchAndMoveStoreInstanceField, &store},
{kMatchAndMoveStoreField, &store},
}));
EXPECT(store->ShouldEmitStoreBarrier() == true);
@ -190,7 +190,7 @@ static void TestWBEForArrays(int length) {
auto entry = flow_graph->graph_entry()->normal_entry();
EXPECT(entry != nullptr);
StoreInstanceFieldInstr* store_into_c = nullptr;
StoreFieldInstr* store_into_c = nullptr;
StoreIndexedInstr* store_into_array_before_loop = nullptr;
StoreIndexedInstr* store_into_array_after_loop = nullptr;
@ -203,7 +203,7 @@ static void TestWBEForArrays(int length) {
kMoveGlob,
kMatchAndMoveBranchTrue,
kMoveGlob,
{kMatchAndMoveStoreInstanceField, &store_into_c},
{kMatchAndMoveStoreField, &store_into_c},
kMoveGlob,
kMatchAndMoveGoto,
kMoveGlob,
@ -302,16 +302,16 @@ ISOLATE_UNIT_TEST_CASE(IRTest_WriteBarrierElimination_LoadLateField) {
auto entry = flow_graph->graph_entry()->normal_entry();
EXPECT(entry != nullptr);
StoreInstanceFieldInstr* store1 = nullptr;
StoreInstanceFieldInstr* store2 = nullptr;
StoreFieldInstr* store1 = nullptr;
StoreFieldInstr* store2 = nullptr;
ILMatcher cursor(flow_graph, entry);
RELEASE_ASSERT(cursor.TryMatch(
{
kMatchAndMoveAllocateObject,
kMatchAndMoveLoadField,
{kMatchAndMoveStoreInstanceField, &store1},
{kMatchAndMoveStoreInstanceField, &store2},
{kMatchAndMoveStoreField, &store1},
{kMatchAndMoveStoreField, &store2},
},
kMoveGlob));
@ -353,16 +353,16 @@ ISOLATE_UNIT_TEST_CASE(IRTest_WriteBarrierElimination_LoadLateStaticField) {
auto entry = flow_graph->graph_entry()->normal_entry();
EXPECT(entry != nullptr);
StoreInstanceFieldInstr* store1 = nullptr;
StoreInstanceFieldInstr* store2 = nullptr;
StoreFieldInstr* store1 = nullptr;
StoreFieldInstr* store2 = nullptr;
ILMatcher cursor(flow_graph, entry);
RELEASE_ASSERT(cursor.TryMatch(
{
kMatchAndMoveAllocateObject,
kMatchAndMoveLoadStaticField,
{kMatchAndMoveStoreInstanceField, &store1},
{kMatchAndMoveStoreInstanceField, &store2},
{kMatchAndMoveStoreField, &store1},
{kMatchAndMoveStoreField, &store2},
},
kMoveGlob));

View file

@ -2194,7 +2194,7 @@ ObjectPtr KernelLoader::GenerateFieldAccessors(const Class& klass,
return converter.SimpleValue().ptr();
} else {
// Note: optimizer relies on DoubleInitialized bit in its field-unboxing
// heuristics. See JitCallSpecializer::VisitStoreInstanceField for more
// heuristics. See JitCallSpecializer::VisitStoreField for more
// details.
field.RecordStore(converter.SimpleValue());
if (!converter.SimpleValue().IsNull() &&

View file

@ -4482,7 +4482,7 @@ class Field : public Object {
const Object& owner,
TokenPosition token_pos,
TokenPosition end_token_pos);
friend class StoreInstanceFieldInstr; // Generated code access to bit field.
friend class StoreFieldInstr; // Generated code access to bit field.
enum {
kConstBit = 0,

View file

@ -1501,7 +1501,7 @@ class UntaggedField : public UntaggedObject {
friend class CidRewriteVisitor;
friend class GuardFieldClassInstr; // For sizeof(guarded_cid_/...)
friend class LoadFieldInstr; // For sizeof(guarded_cid_/...)
friend class StoreInstanceFieldInstr; // For sizeof(guarded_cid_/...)
friend class StoreFieldInstr; // For sizeof(guarded_cid_/...)
};
class alignas(8) UntaggedScript : public UntaggedObject {