arm: Better document memory barriers for Unsafe/VarHandle.
Defer to the codegen for the memory barriers instead of
explicit calls to assembler to emit the DMB instruction.
This use of MemBarrierKind allows us to better express
the required ordering even though codegen then generates
the same code anyway.
Test: testrunner.py --target --32 --optimizing
Bug: 71781600
Change-Id: I80cd8a6040b9f58ade4aa2adc9f98cfdfbdc758e
diff --git a/compiler/optimizing/intrinsics_arm_vixl.cc b/compiler/optimizing/intrinsics_arm_vixl.cc
index 3806fa2..2acb9fa 100644
--- a/compiler/optimizing/intrinsics_arm_vixl.cc
+++ b/compiler/optimizing/intrinsics_arm_vixl.cc
@@ -2595,8 +2595,9 @@
Location maybe_temp,
Location maybe_temp2,
Location maybe_temp3) {
- bool emit_barrier = (order == std::memory_order_acquire) || (order == std::memory_order_seq_cst);
- DCHECK(emit_barrier || order == std::memory_order_relaxed);
+ bool seq_cst_barrier = (order == std::memory_order_seq_cst);
+ bool acquire_barrier = seq_cst_barrier || (order == std::memory_order_acquire);
+ DCHECK(acquire_barrier || order == std::memory_order_relaxed);
DCHECK(atomic || order == std::memory_order_relaxed);
ArmVIXLAssembler* assembler = codegen->GetAssembler();
@@ -2675,8 +2676,9 @@
LOG(FATAL) << "Unexpected type " << type;
UNREACHABLE();
}
- if (emit_barrier) {
- __ Dmb(vixl32::ISH);
+ if (acquire_barrier) {
+ codegen->GenerateMemoryBarrier(
+ seq_cst_barrier ? MemBarrierKind::kAnyAny : MemBarrierKind::kLoadAny);
}
if (type == DataType::Type::kReference && !(kEmitCompilerReadBarrier && kUseBakerReadBarrier)) {
Location base_loc = LocationFrom(base);
@@ -2814,7 +2816,7 @@
ArmVIXLAssembler* assembler = codegen->GetAssembler();
if (release_barrier) {
- __ Dmb(vixl32::ISH);
+ codegen->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
}
MemOperand address(base, offset);
UseScratchRegisterScope temps(assembler->GetVIXLAssembler());
@@ -2883,7 +2885,7 @@
UNREACHABLE();
}
if (seq_cst_barrier) {
- __ Dmb(vixl32::ISH);
+ codegen->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
}
}
@@ -3436,7 +3438,8 @@
cmp_failure = slow_path->GetEntryLabel();
}
- __ Dmb(vixl32::ISH);
+ // Unsafe CAS operations have std::memory_order_seq_cst semantics.
+ codegen->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
__ Add(tmp_ptr, base, offset);
GenerateCompareAndSet(codegen,
type,
@@ -3453,7 +3456,7 @@
expected,
/*expected2=*/ vixl32::Register());
__ Bind(exit_loop);
- __ Dmb(vixl32::ISH);
+ codegen->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
if (type == DataType::Type::kReference) {
codegen->MaybeGenerateMarkingRegisterCheck(/*code=*/ 128, /*temp_loc=*/ LocationFrom(tmp_ptr));
@@ -4173,6 +4176,7 @@
if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
// We need callee-save registers for both the class object and offset instead of
// the temporaries reserved in CreateVarHandleFieldLocations().
+ static_assert(POPCOUNT(kArmCalleeSaveRefSpills) >= 2u);
constexpr int first_callee_save = CTZ(kArmCalleeSaveRefSpills);
constexpr int second_callee_save = CTZ(kArmCalleeSaveRefSpills ^ (1u << first_callee_save));
if (GetExpectedVarHandleCoordinatesCount(invoke) == 0u) { // For static fields.
@@ -4232,14 +4236,14 @@
VarHandleTarget target = GenerateVarHandleTarget(invoke, codegen);
- bool release_barrier =
- (order == std::memory_order_release) || (order == std::memory_order_seq_cst);
- bool acquire_barrier =
- (order == std::memory_order_acquire) || (order == std::memory_order_seq_cst);
+ bool seq_cst_barrier = (order == std::memory_order_seq_cst);
+ bool release_barrier = seq_cst_barrier || (order == std::memory_order_release);
+ bool acquire_barrier = seq_cst_barrier || (order == std::memory_order_acquire);
DCHECK(release_barrier || acquire_barrier || order == std::memory_order_relaxed);
if (release_barrier) {
- __ Dmb(vixl32::ISH);
+ codegen->GenerateMemoryBarrier(
+ seq_cst_barrier ? MemBarrierKind::kAnyAny : MemBarrierKind::kAnyStore);
}
// Calculate the pointer to the value.
@@ -4343,7 +4347,8 @@
__ Bind(exit_loop);
if (acquire_barrier) {
- __ Dmb(vixl32::ISH);
+ codegen->GenerateMemoryBarrier(
+ seq_cst_barrier ? MemBarrierKind::kAnyAny : MemBarrierKind::kLoadAny);
}
if (!return_success && value_type == DataType::Type::kFloat64) {
@@ -4504,14 +4509,14 @@
VarHandleTarget target = GenerateVarHandleTarget(invoke, codegen);
- bool release_barrier =
- (order == std::memory_order_release) || (order == std::memory_order_seq_cst);
- bool acquire_barrier =
- (order == std::memory_order_acquire) || (order == std::memory_order_seq_cst);
+ bool seq_cst_barrier = (order == std::memory_order_seq_cst);
+ bool release_barrier = seq_cst_barrier || (order == std::memory_order_release);
+ bool acquire_barrier = seq_cst_barrier || (order == std::memory_order_acquire);
DCHECK(release_barrier || acquire_barrier || order == std::memory_order_relaxed);
if (release_barrier) {
- __ Dmb(vixl32::ISH);
+ codegen->GenerateMemoryBarrier(
+ seq_cst_barrier ? MemBarrierKind::kAnyAny : MemBarrierKind::kAnyStore);
}
// Use the scratch register for the pointer to the field.
@@ -4591,7 +4596,8 @@
maybe_vreg_temp);
if (acquire_barrier) {
- __ Dmb(vixl32::ISH);
+ codegen->GenerateMemoryBarrier(
+ seq_cst_barrier ? MemBarrierKind::kAnyAny : MemBarrierKind::kLoadAny);
}
if (get_and_update_op == GetAndUpdateOp::kSet && DataType::IsFloatingPointType(value_type)) {