Allow devirtualized method to be intrinsified.
For a invocation that's devirtualized to a different method, try
to give intrinsics matching an opportunity before trying to inline it.
Test: run-test on host. 638-checker-inline-cache-intrinsic.
Change-Id: I51f70835db4c07575c58872a64a603a38dbcb89c
diff --git a/compiler/optimizing/inliner.cc b/compiler/optimizing/inliner.cc
index 2444e43..560372e 100644
--- a/compiler/optimizing/inliner.cc
+++ b/compiler/optimizing/inliner.cc
@@ -1211,11 +1211,49 @@
ReferenceTypeInfo receiver_type,
bool do_rtp,
bool cha_devirtualize) {
+ DCHECK(!invoke_instruction->IsIntrinsic());
HInstruction* return_replacement = nullptr;
uint32_t dex_pc = invoke_instruction->GetDexPc();
HInstruction* cursor = invoke_instruction->GetPrevious();
HBasicBlock* bb_cursor = invoke_instruction->GetBlock();
- if (!TryBuildAndInline(invoke_instruction, method, receiver_type, &return_replacement)) {
+ bool should_remove_invoke_instruction = false;
+
+ // If invoke_instruction is devirtualized to a different method, give intrinsics
+ // another chance before we try to inline it.
+ bool wrong_invoke_type = false;
+ if (invoke_instruction->GetResolvedMethod() != method &&
+ IntrinsicsRecognizer::Recognize(invoke_instruction, method, &wrong_invoke_type)) {
+ MaybeRecordStat(stats_, MethodCompilationStat::kIntrinsicRecognized);
+ if (invoke_instruction->IsInvokeInterface()) {
+ // We don't intrinsify an invoke-interface directly.
+ // Replace the invoke-interface with an invoke-virtual.
+ HInvokeVirtual* new_invoke = new (graph_->GetAllocator()) HInvokeVirtual(
+ graph_->GetAllocator(),
+ invoke_instruction->GetNumberOfArguments(),
+ invoke_instruction->GetType(),
+ invoke_instruction->GetDexPc(),
+ invoke_instruction->GetDexMethodIndex(), // Use interface method's dex method index.
+ method,
+ method->GetMethodIndex());
+ HInputsRef inputs = invoke_instruction->GetInputs();
+ for (size_t index = 0; index != inputs.size(); ++index) {
+ new_invoke->SetArgumentAt(index, inputs[index]);
+ }
+ invoke_instruction->GetBlock()->InsertInstructionBefore(new_invoke, invoke_instruction);
+ new_invoke->CopyEnvironmentFrom(invoke_instruction->GetEnvironment());
+ if (invoke_instruction->GetType() == DataType::Type::kReference) {
+ new_invoke->SetReferenceTypeInfo(invoke_instruction->GetReferenceTypeInfo());
+ }
+ // Run intrinsic recognizer again to set new_invoke's intrinsic.
+ IntrinsicsRecognizer::Recognize(new_invoke, method, &wrong_invoke_type);
+ DCHECK_NE(new_invoke->GetIntrinsic(), Intrinsics::kNone);
+ return_replacement = new_invoke;
+ // invoke_instruction is replaced with new_invoke.
+ should_remove_invoke_instruction = true;
+ } else {
+ // invoke_instruction is intrinsified and stays.
+ }
+ } else if (!TryBuildAndInline(invoke_instruction, method, receiver_type, &return_replacement)) {
if (invoke_instruction->IsInvokeInterface()) {
DCHECK(!method->IsProxyMethod());
// Turn an invoke-interface into an invoke-virtual. An invoke-virtual is always
@@ -1258,26 +1296,27 @@
new_invoke->SetReferenceTypeInfo(invoke_instruction->GetReferenceTypeInfo());
}
return_replacement = new_invoke;
- // Directly check if the new virtual can be recognized as an intrinsic.
- // This way, we avoid running a full recognition pass just to detect
- // these relative rare cases.
- bool wrong_invoke_type = false;
- if (IntrinsicsRecognizer::Recognize(new_invoke, &wrong_invoke_type)) {
- MaybeRecordStat(stats_, MethodCompilationStat::kIntrinsicRecognized);
- }
+ // invoke_instruction is replaced with new_invoke.
+ should_remove_invoke_instruction = true;
} else {
// TODO: Consider sharpening an invoke virtual once it is not dependent on the
// compiler driver.
return false;
}
+ } else {
+ // invoke_instruction is inlined.
+ should_remove_invoke_instruction = true;
}
+
if (cha_devirtualize) {
AddCHAGuard(invoke_instruction, dex_pc, cursor, bb_cursor);
}
if (return_replacement != nullptr) {
invoke_instruction->ReplaceWith(return_replacement);
}
- invoke_instruction->GetBlock()->RemoveInstruction(invoke_instruction);
+ if (should_remove_invoke_instruction) {
+ invoke_instruction->GetBlock()->RemoveInstruction(invoke_instruction);
+ }
FixUpReturnReferenceType(method, return_replacement);
if (do_rtp && ReturnTypeMoreSpecific(invoke_instruction, return_replacement)) {
// Actual return value has a more specific type than the method's declared
diff --git a/compiler/optimizing/instruction_simplifier.cc b/compiler/optimizing/instruction_simplifier.cc
index 7fa0c2b..089e41b 100644
--- a/compiler/optimizing/instruction_simplifier.cc
+++ b/compiler/optimizing/instruction_simplifier.cc
@@ -2035,7 +2035,9 @@
optimizations.SetArgumentIsString();
} else if (kUseReadBarrier) {
DCHECK(instruction->GetResolvedMethod() != nullptr);
- DCHECK(instruction->GetResolvedMethod()->GetDeclaringClass()->IsStringClass());
+ DCHECK(instruction->GetResolvedMethod()->GetDeclaringClass()->IsStringClass() ||
+ // Object.equals() can be devirtualized to String.equals().
+ instruction->GetResolvedMethod()->GetDeclaringClass()->IsObjectClass());
Runtime* runtime = Runtime::Current();
// For AOT, we always assume that the boot image shall contain the String.class and
// we do not need a read barrier for boot image classes as they are non-moveable.
diff --git a/compiler/optimizing/intrinsics.cc b/compiler/optimizing/intrinsics.cc
index 7719924..6928b70 100644
--- a/compiler/optimizing/intrinsics.cc
+++ b/compiler/optimizing/intrinsics.cc
@@ -137,7 +137,7 @@
case kVirtual:
// Call might be devirtualized.
- return (invoke_type == kVirtual || invoke_type == kDirect);
+ return (invoke_type == kVirtual || invoke_type == kDirect || invoke_type == kInterface);
case kSuper:
case kInterface:
@@ -148,8 +148,12 @@
UNREACHABLE();
}
-bool IntrinsicsRecognizer::Recognize(HInvoke* invoke, /*out*/ bool* wrong_invoke_type) {
- ArtMethod* art_method = invoke->GetResolvedMethod();
+bool IntrinsicsRecognizer::Recognize(HInvoke* invoke,
+ ArtMethod* art_method,
+ /*out*/ bool* wrong_invoke_type) {
+ if (art_method == nullptr) {
+ art_method = invoke->GetResolvedMethod();
+ }
*wrong_invoke_type = false;
if (art_method == nullptr || !art_method->IsIntrinsic()) {
return false;
@@ -182,7 +186,7 @@
HInstruction* inst = inst_it.Current();
if (inst->IsInvoke()) {
bool wrong_invoke_type = false;
- if (Recognize(inst->AsInvoke(), &wrong_invoke_type)) {
+ if (Recognize(inst->AsInvoke(), /* art_method */ nullptr, &wrong_invoke_type)) {
MaybeRecordStat(stats_, MethodCompilationStat::kIntrinsicRecognized);
} else if (wrong_invoke_type) {
LOG(WARNING)
diff --git a/compiler/optimizing/intrinsics.h b/compiler/optimizing/intrinsics.h
index c07a990..6299143 100644
--- a/compiler/optimizing/intrinsics.h
+++ b/compiler/optimizing/intrinsics.h
@@ -47,7 +47,7 @@
// Static helper that recognizes intrinsic call. Returns true on success.
// If it fails due to invoke type mismatch, wrong_invoke_type is set.
// Useful to recognize intrinsics on individual calls outside this full pass.
- static bool Recognize(HInvoke* invoke, /*out*/ bool* wrong_invoke_type)
+ static bool Recognize(HInvoke* invoke, ArtMethod* method, /*out*/ bool* wrong_invoke_type)
REQUIRES_SHARED(Locks::mutator_lock_);
static constexpr const char* kIntrinsicsRecognizerPassName = "intrinsics_recognition";