blob: 546d5b3b939b3c25a8efa640c51fce2b0121e368 [file] [log] [blame]
Andreas Gampe85b62f22015-09-09 13:15:38 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_INTRINSICS_UTILS_H_
18#define ART_COMPILER_OPTIMIZING_INTRINSICS_UTILS_H_
19
Vladimir Marko9922f002020-06-08 15:05:15 +010020#include "base/casts.h"
Andreas Gampe85b62f22015-09-09 13:15:38 -070021#include "base/macros.h"
22#include "code_generator.h"
Vladimir Markoa41ea272020-09-07 15:24:36 +000023#include "data_type-inl.h"
24#include "dex/dex_file-inl.h"
Andreas Gampe85b62f22015-09-09 13:15:38 -070025#include "locations.h"
Vladimir Markoa41ea272020-09-07 15:24:36 +000026#include "mirror/var_handle.h"
Andreas Gampe85b62f22015-09-09 13:15:38 -070027#include "nodes.h"
28#include "utils/assembler.h"
29#include "utils/label.h"
30
Vladimir Marko0a516052019-10-14 13:00:44 +000031namespace art {
Andreas Gampe85b62f22015-09-09 13:15:38 -070032
33// Default slow-path for fallback (calling the managed code to handle the intrinsic) in an
34// intrinsified call. This will copy the arguments into the positions for a regular call.
35//
36// Note: The actual parameters are required to be in the locations given by the invoke's location
37// summary. If an intrinsic modifies those locations before a slowpath call, they must be
38// restored!
39//
40// Note: If an invoke wasn't sharpened, we will put down an invoke-virtual here. That's potentially
41// sub-optimal (compared to a direct pointer call), but this is a slow-path.
42
Vladimir Marko9922f002020-06-08 15:05:15 +010043template <typename TDexCallingConvention,
44 typename TSlowPathCode = SlowPathCode,
45 typename TAssembler = Assembler>
46class IntrinsicSlowPath : public TSlowPathCode {
Andreas Gampe85b62f22015-09-09 13:15:38 -070047 public:
Vladimir Marko9922f002020-06-08 15:05:15 +010048 explicit IntrinsicSlowPath(HInvoke* invoke) : TSlowPathCode(invoke), invoke_(invoke) { }
Andreas Gampe85b62f22015-09-09 13:15:38 -070049
50 Location MoveArguments(CodeGenerator* codegen) {
51 TDexCallingConvention calling_convention_visitor;
52 IntrinsicVisitor::MoveArguments(invoke_, codegen, &calling_convention_visitor);
53 return calling_convention_visitor.GetMethodLocation();
54 }
55
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010056 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Marko9922f002020-06-08 15:05:15 +010057 TAssembler* assembler = down_cast<TAssembler*>(codegen->GetAssembler());
58 assembler->Bind(this->GetEntryLabel());
Andreas Gampe85b62f22015-09-09 13:15:38 -070059
Vladimir Marko9922f002020-06-08 15:05:15 +010060 this->SaveLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe85b62f22015-09-09 13:15:38 -070061
62 Location method_loc = MoveArguments(codegen);
63
64 if (invoke_->IsInvokeStaticOrDirect()) {
Vladimir Marko86c87522020-05-11 16:55:55 +010065 HInvokeStaticOrDirect* invoke_static_or_direct = invoke_->AsInvokeStaticOrDirect();
Nicolas Geoffray6d69b522020-09-23 14:47:28 +010066 DCHECK_NE(invoke_static_or_direct->GetMethodLoadKind(), MethodLoadKind::kRecursive);
Vladimir Marko86c87522020-05-11 16:55:55 +010067 DCHECK_NE(invoke_static_or_direct->GetCodePtrLocation(),
Nicolas Geoffray6d69b522020-09-23 14:47:28 +010068 CodePtrLocation::kCallCriticalNative);
Vladimir Marko86c87522020-05-11 16:55:55 +010069 codegen->GenerateStaticOrDirectCall(invoke_static_or_direct, method_loc, this);
Andra Danciue3e187f2020-07-30 12:19:31 +000070 } else if (invoke_->IsInvokeVirtual()) {
Vladimir Markoe7197bf2017-06-02 17:00:23 +010071 codegen->GenerateVirtualCall(invoke_->AsInvokeVirtual(), method_loc, this);
Andra Danciue3e187f2020-07-30 12:19:31 +000072 } else {
73 DCHECK(invoke_->IsInvokePolymorphic());
74 codegen->GenerateInvokePolymorphicCall(invoke_->AsInvokePolymorphic(), this);
Andreas Gampe85b62f22015-09-09 13:15:38 -070075 }
Andreas Gampe85b62f22015-09-09 13:15:38 -070076
77 // Copy the result back to the expected output.
78 Location out = invoke_->GetLocations()->Out();
79 if (out.IsValid()) {
Andra Danciue3e187f2020-07-30 12:19:31 +000080 DCHECK(out.IsRegisterKind()); // TODO: Replace this when we support output in memory.
Vladimir Marko8942b3a2020-07-20 10:42:15 +010081 // We want to double-check that we don't overwrite a live register with the return
82 // value.
83 // Note: For the possible kNoOutputOverlap case we can't simply remove the OUT register
84 // from the GetLiveRegisters() - theoretically it might be needed after the return from
85 // the slow path.
Andra Danciue3e187f2020-07-30 12:19:31 +000086 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->OverlapsRegisters(out));
Andreas Gampe85b62f22015-09-09 13:15:38 -070087 codegen->MoveFromReturnRegister(out, invoke_->GetType());
88 }
89
Vladimir Marko9922f002020-06-08 15:05:15 +010090 this->RestoreLiveRegisters(codegen, invoke_->GetLocations());
91 assembler->Jump(this->GetExitLabel());
Andreas Gampe85b62f22015-09-09 13:15:38 -070092 }
93
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010094 const char* GetDescription() const override { return "IntrinsicSlowPath"; }
Andreas Gampe85b62f22015-09-09 13:15:38 -070095
96 private:
97 // The instruction where this slow path is happening.
98 HInvoke* const invoke_;
99
100 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPath);
101};
102
Vladimir Markoa41ea272020-09-07 15:24:36 +0000103static inline size_t GetExpectedVarHandleCoordinatesCount(HInvoke *invoke) {
104 mirror::VarHandle::AccessModeTemplate access_mode_template =
105 mirror::VarHandle::GetAccessModeTemplateByIntrinsic(invoke->GetIntrinsic());
106 size_t var_type_count = mirror::VarHandle::GetNumberOfVarTypeParameters(access_mode_template);
107 size_t accessor_argument_count = invoke->GetNumberOfArguments() - 1;
108
109 return accessor_argument_count - var_type_count;
110}
111
112static inline DataType::Type GetDataTypeFromShorty(HInvoke* invoke, uint32_t index) {
113 DCHECK(invoke->IsInvokePolymorphic());
Vladimir Markofc42ce12021-07-21 15:51:35 +0100114 const DexFile* dex_file = invoke->GetMethodReference().dex_file;
115 const char* shorty = dex_file->GetShorty(invoke->AsInvokePolymorphic()->GetProtoIndex());
Vladimir Markoa41ea272020-09-07 15:24:36 +0000116 DCHECK_LT(index, strlen(shorty));
117
118 return DataType::FromShorty(shorty[index]);
119}
120
Vladimir Marko32c2eb82020-11-10 16:58:47 +0000121static inline bool IsVarHandleGetAndBitwiseOp(HInvoke* invoke) {
122 switch (invoke->GetIntrinsic()) {
123 case Intrinsics::kVarHandleGetAndBitwiseOr:
124 case Intrinsics::kVarHandleGetAndBitwiseOrAcquire:
125 case Intrinsics::kVarHandleGetAndBitwiseOrRelease:
126 case Intrinsics::kVarHandleGetAndBitwiseXor:
127 case Intrinsics::kVarHandleGetAndBitwiseXorAcquire:
128 case Intrinsics::kVarHandleGetAndBitwiseXorRelease:
129 case Intrinsics::kVarHandleGetAndBitwiseAnd:
130 case Intrinsics::kVarHandleGetAndBitwiseAndAcquire:
131 case Intrinsics::kVarHandleGetAndBitwiseAndRelease:
132 return true;
133 default:
134 return false;
135 }
136}
137
138static inline bool IsVarHandleGetAndAdd(HInvoke* invoke) {
139 switch (invoke->GetIntrinsic()) {
140 case Intrinsics::kVarHandleGetAndAdd:
141 case Intrinsics::kVarHandleGetAndAddAcquire:
142 case Intrinsics::kVarHandleGetAndAddRelease:
143 return true;
144 default:
145 return false;
146 }
147}
148
Andreas Gampe85b62f22015-09-09 13:15:38 -0700149} // namespace art
150
151#endif // ART_COMPILER_OPTIMIZING_INTRINSICS_UTILS_H_