blob: 5843886727591f77436ae2b679dcd90ce806c4a6 [file] [log] [blame]
Dave Allison65fcc2c2014-04-28 13:45:27 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "assembler_thumb2.h"
18
Vladimir Marko80afd022015-05-19 18:08:00 +010019#include "base/bit_utils.h"
Dave Allison65fcc2c2014-04-28 13:45:27 -070020#include "base/logging.h"
21#include "entrypoints/quick/quick_entrypoints.h"
22#include "offsets.h"
23#include "thread.h"
Dave Allison65fcc2c2014-04-28 13:45:27 -070024
25namespace art {
26namespace arm {
27
Vladimir Markocf93a5c2015-06-16 11:33:24 +000028void Thumb2Assembler::BindLabel(Label* label, uint32_t bound_pc) {
29 CHECK(!label->IsBound());
30
31 while (label->IsLinked()) {
32 FixupId fixup_id = label->Position(); // The id for linked Fixup.
33 Fixup* fixup = GetFixup(fixup_id); // Get the Fixup at this id.
34 fixup->Resolve(bound_pc); // Fixup can be resolved now.
35 // Add this fixup as a dependency of all later fixups.
36 for (FixupId id = fixup_id + 1u, end = fixups_.size(); id != end; ++id) {
37 GetFixup(id)->AddDependent(fixup_id);
38 }
39 uint32_t fixup_location = fixup->GetLocation();
40 uint16_t next = buffer_.Load<uint16_t>(fixup_location); // Get next in chain.
41 buffer_.Store<int16_t>(fixup_location, 0);
42 label->position_ = next; // Move to next.
43 }
44 label->BindTo(bound_pc);
45}
46
47void Thumb2Assembler::BindLiterals() {
48 // We don't add the padding here, that's done only after adjusting the Fixup sizes.
49 uint32_t code_size = buffer_.Size();
50 for (Literal& lit : literals_) {
51 Label* label = lit.GetLabel();
52 BindLabel(label, code_size);
53 code_size += lit.GetSize();
54 }
55}
56
57void Thumb2Assembler::AdjustFixupIfNeeded(Fixup* fixup, uint32_t* current_code_size,
58 std::deque<FixupId>* fixups_to_recalculate) {
59 uint32_t adjustment = fixup->AdjustSizeIfNeeded(*current_code_size);
60 if (adjustment != 0u) {
61 *current_code_size += adjustment;
62 for (FixupId dependent_id : fixup->Dependents()) {
63 Fixup* dependent = GetFixup(dependent_id);
64 dependent->IncreaseAdjustment(adjustment);
65 if (buffer_.Load<int16_t>(dependent->GetLocation()) == 0) {
66 buffer_.Store<int16_t>(dependent->GetLocation(), 1);
67 fixups_to_recalculate->push_back(dependent_id);
68 }
69 }
70 }
71}
72
73uint32_t Thumb2Assembler::AdjustFixups() {
74 uint32_t current_code_size = buffer_.Size();
75 std::deque<FixupId> fixups_to_recalculate;
76 if (kIsDebugBuild) {
77 // We will use the placeholders in the buffer_ to mark whether the fixup has
78 // been added to the fixups_to_recalculate. Make sure we start with zeros.
79 for (Fixup& fixup : fixups_) {
80 CHECK_EQ(buffer_.Load<int16_t>(fixup.GetLocation()), 0);
81 }
82 }
83 for (Fixup& fixup : fixups_) {
84 AdjustFixupIfNeeded(&fixup, &current_code_size, &fixups_to_recalculate);
85 }
86 while (!fixups_to_recalculate.empty()) {
87 // Pop the fixup.
88 FixupId fixup_id = fixups_to_recalculate.front();
89 fixups_to_recalculate.pop_front();
90 Fixup* fixup = GetFixup(fixup_id);
91 DCHECK_NE(buffer_.Load<int16_t>(fixup->GetLocation()), 0);
92 buffer_.Store<int16_t>(fixup->GetLocation(), 0);
93 // See if it needs adjustment.
94 AdjustFixupIfNeeded(fixup, &current_code_size, &fixups_to_recalculate);
95 }
96 if (kIsDebugBuild) {
97 // Check that no fixup is marked as being in fixups_to_recalculate anymore.
98 for (Fixup& fixup : fixups_) {
99 CHECK_EQ(buffer_.Load<int16_t>(fixup.GetLocation()), 0);
100 }
101 }
102
103 // Adjust literal pool labels for padding.
Roland Levillain14d90572015-07-16 10:52:26 +0100104 DCHECK_ALIGNED(current_code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000105 uint32_t literals_adjustment = current_code_size + (current_code_size & 2) - buffer_.Size();
106 if (literals_adjustment != 0u) {
107 for (Literal& literal : literals_) {
108 Label* label = literal.GetLabel();
109 DCHECK(label->IsBound());
110 int old_position = label->Position();
111 label->Reinitialize();
112 label->BindTo(old_position + literals_adjustment);
113 }
114 }
115
116 return current_code_size;
117}
118
119void Thumb2Assembler::EmitFixups(uint32_t adjusted_code_size) {
120 // Move non-fixup code to its final place and emit fixups.
121 // Process fixups in reverse order so that we don't repeatedly move the same data.
122 size_t src_end = buffer_.Size();
123 size_t dest_end = adjusted_code_size;
124 buffer_.Resize(dest_end);
125 DCHECK_GE(dest_end, src_end);
126 for (auto i = fixups_.rbegin(), end = fixups_.rend(); i != end; ++i) {
127 Fixup* fixup = &*i;
128 if (fixup->GetOriginalSize() == fixup->GetSize()) {
129 // The size of this Fixup didn't change. To avoid moving the data
130 // in small chunks, emit the code to its original position.
131 fixup->Emit(&buffer_, adjusted_code_size);
132 fixup->Finalize(dest_end - src_end);
133 } else {
134 // Move the data between the end of the fixup and src_end to its final location.
135 size_t old_fixup_location = fixup->GetLocation();
136 size_t src_begin = old_fixup_location + fixup->GetOriginalSizeInBytes();
137 size_t data_size = src_end - src_begin;
138 size_t dest_begin = dest_end - data_size;
139 buffer_.Move(dest_begin, src_begin, data_size);
140 src_end = old_fixup_location;
141 dest_end = dest_begin - fixup->GetSizeInBytes();
142 // Finalize the Fixup and emit the data to the new location.
143 fixup->Finalize(dest_end - src_end);
144 fixup->Emit(&buffer_, adjusted_code_size);
145 }
146 }
147 CHECK_EQ(src_end, dest_end);
148}
149
150void Thumb2Assembler::EmitLiterals() {
151 if (!literals_.empty()) {
152 // Load literal instructions (LDR, LDRD, VLDR) require 4-byte alignment.
153 // We don't support byte and half-word literals.
154 uint32_t code_size = buffer_.Size();
Roland Levillain14d90572015-07-16 10:52:26 +0100155 DCHECK_ALIGNED(code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000156 if ((code_size & 2u) != 0u) {
157 Emit16(0);
158 }
159 for (Literal& literal : literals_) {
160 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
161 DCHECK_EQ(static_cast<size_t>(literal.GetLabel()->Position()), buffer_.Size());
162 DCHECK(literal.GetSize() == 4u || literal.GetSize() == 8u);
163 for (size_t i = 0, size = literal.GetSize(); i != size; ++i) {
164 buffer_.Emit<uint8_t>(literal.GetData()[i]);
165 }
166 }
167 }
168}
169
170inline int16_t Thumb2Assembler::BEncoding16(int32_t offset, Condition cond) {
Roland Levillain14d90572015-07-16 10:52:26 +0100171 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000172 int16_t encoding = B15 | B14;
173 if (cond != AL) {
174 DCHECK(IsInt<9>(offset));
175 encoding |= B12 | (static_cast<int32_t>(cond) << 8) | ((offset >> 1) & 0xff);
176 } else {
177 DCHECK(IsInt<12>(offset));
178 encoding |= B13 | ((offset >> 1) & 0x7ff);
179 }
180 return encoding;
181}
182
183inline int32_t Thumb2Assembler::BEncoding32(int32_t offset, Condition cond) {
Roland Levillain14d90572015-07-16 10:52:26 +0100184 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000185 int32_t s = (offset >> 31) & 1; // Sign bit.
186 int32_t encoding = B31 | B30 | B29 | B28 | B15 |
187 (s << 26) | // Sign bit goes to bit 26.
188 ((offset >> 1) & 0x7ff); // imm11 goes to bits 0-10.
189 if (cond != AL) {
190 DCHECK(IsInt<21>(offset));
191 // Encode cond, move imm6 from bits 12-17 to bits 16-21 and move J1 and J2.
192 encoding |= (static_cast<int32_t>(cond) << 22) | ((offset & 0x3f000) << (16 - 12)) |
193 ((offset & (1 << 19)) >> (19 - 13)) | // Extract J1 from bit 19 to bit 13.
194 ((offset & (1 << 18)) >> (18 - 11)); // Extract J2 from bit 18 to bit 11.
195 } else {
196 DCHECK(IsInt<25>(offset));
197 int32_t j1 = ((offset >> 23) ^ s ^ 1) & 1; // Calculate J1 from I1 extracted from bit 23.
198 int32_t j2 = ((offset >> 22)^ s ^ 1) & 1; // Calculate J2 from I2 extracted from bit 22.
199 // Move imm10 from bits 12-21 to bits 16-25 and add J1 and J2.
200 encoding |= B12 | ((offset & 0x3ff000) << (16 - 12)) |
201 (j1 << 13) | (j2 << 11);
202 }
203 return encoding;
204}
205
206inline int16_t Thumb2Assembler::CbxzEncoding16(Register rn, int32_t offset, Condition cond) {
207 DCHECK(!IsHighRegister(rn));
Roland Levillain14d90572015-07-16 10:52:26 +0100208 DCHECK_ALIGNED(offset, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000209 DCHECK(IsUint<7>(offset));
210 DCHECK(cond == EQ || cond == NE);
211 return B15 | B13 | B12 | B8 | (cond == NE ? B11 : 0) | static_cast<int32_t>(rn) |
212 ((offset & 0x3e) << (3 - 1)) | // Move imm5 from bits 1-5 to bits 3-7.
213 ((offset & 0x40) << (9 - 6)); // Move i from bit 6 to bit 11
214}
215
216inline int16_t Thumb2Assembler::CmpRnImm8Encoding16(Register rn, int32_t value) {
217 DCHECK(!IsHighRegister(rn));
218 DCHECK(IsUint<8>(value));
219 return B13 | B11 | (rn << 8) | value;
220}
221
222inline int16_t Thumb2Assembler::AddRdnRmEncoding16(Register rdn, Register rm) {
223 // The high bit of rn is moved across 4-bit rm.
224 return B14 | B10 | (static_cast<int32_t>(rm) << 3) |
225 (static_cast<int32_t>(rdn) & 7) | ((static_cast<int32_t>(rdn) & 8) << 4);
226}
227
228inline int32_t Thumb2Assembler::MovwEncoding32(Register rd, int32_t value) {
229 DCHECK(IsUint<16>(value));
230 return B31 | B30 | B29 | B28 | B25 | B22 |
231 (static_cast<int32_t>(rd) << 8) |
232 ((value & 0xf000) << (16 - 12)) | // Move imm4 from bits 12-15 to bits 16-19.
233 ((value & 0x0800) << (26 - 11)) | // Move i from bit 11 to bit 26.
234 ((value & 0x0700) << (12 - 8)) | // Move imm3 from bits 8-10 to bits 12-14.
235 (value & 0xff); // Keep imm8 in bits 0-7.
236}
237
238inline int32_t Thumb2Assembler::MovtEncoding32(Register rd, int32_t value) {
239 DCHECK_EQ(value & 0xffff, 0);
240 int32_t movw_encoding = MovwEncoding32(rd, (value >> 16) & 0xffff);
241 return movw_encoding | B25 | B23;
242}
243
244inline int32_t Thumb2Assembler::MovModImmEncoding32(Register rd, int32_t value) {
245 uint32_t mod_imm = ModifiedImmediate(value);
246 DCHECK_NE(mod_imm, kInvalidModifiedImmediate);
247 return B31 | B30 | B29 | B28 | B22 | B19 | B18 | B17 | B16 |
248 (static_cast<int32_t>(rd) << 8) | static_cast<int32_t>(mod_imm);
249}
250
251inline int16_t Thumb2Assembler::LdrLitEncoding16(Register rt, int32_t offset) {
252 DCHECK(!IsHighRegister(rt));
Roland Levillain14d90572015-07-16 10:52:26 +0100253 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000254 DCHECK(IsUint<10>(offset));
255 return B14 | B11 | (static_cast<int32_t>(rt) << 8) | (offset >> 2);
256}
257
258inline int32_t Thumb2Assembler::LdrLitEncoding32(Register rt, int32_t offset) {
259 // NOTE: We don't support negative offset, i.e. U=0 (B23).
260 return LdrRtRnImm12Encoding(rt, PC, offset);
261}
262
263inline int32_t Thumb2Assembler::LdrdEncoding32(Register rt, Register rt2, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100264 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000265 CHECK(IsUint<10>(offset));
266 return B31 | B30 | B29 | B27 |
267 B24 /* P = 1 */ | B23 /* U = 1 */ | B22 | 0 /* W = 0 */ | B20 |
268 (static_cast<int32_t>(rn) << 16) | (static_cast<int32_t>(rt) << 12) |
269 (static_cast<int32_t>(rt2) << 8) | (offset >> 2);
270}
271
272inline int32_t Thumb2Assembler::VldrsEncoding32(SRegister sd, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100273 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000274 CHECK(IsUint<10>(offset));
275 return B31 | B30 | B29 | B27 | B26 | B24 |
276 B23 /* U = 1 */ | B20 | B11 | B9 |
277 (static_cast<int32_t>(rn) << 16) |
278 ((static_cast<int32_t>(sd) & 0x01) << (22 - 0)) | // Move D from bit 0 to bit 22.
279 ((static_cast<int32_t>(sd) & 0x1e) << (12 - 1)) | // Move Vd from bits 1-4 to bits 12-15.
280 (offset >> 2);
281}
282
283inline int32_t Thumb2Assembler::VldrdEncoding32(DRegister dd, Register rn, int32_t offset) {
Roland Levillain14d90572015-07-16 10:52:26 +0100284 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000285 CHECK(IsUint<10>(offset));
286 return B31 | B30 | B29 | B27 | B26 | B24 |
287 B23 /* U = 1 */ | B20 | B11 | B9 | B8 |
288 (rn << 16) |
289 ((static_cast<int32_t>(dd) & 0x10) << (22 - 4)) | // Move D from bit 4 to bit 22.
290 ((static_cast<int32_t>(dd) & 0x0f) << (12 - 0)) | // Move Vd from bits 0-3 to bits 12-15.
291 (offset >> 2);
292}
293
294inline int16_t Thumb2Assembler::LdrRtRnImm5Encoding16(Register rt, Register rn, int32_t offset) {
295 DCHECK(!IsHighRegister(rt));
296 DCHECK(!IsHighRegister(rn));
Roland Levillain14d90572015-07-16 10:52:26 +0100297 DCHECK_ALIGNED(offset, 4);
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000298 DCHECK(IsUint<7>(offset));
299 return B14 | B13 | B11 |
300 (static_cast<int32_t>(rn) << 3) | static_cast<int32_t>(rt) |
301 (offset << (6 - 2)); // Move imm5 from bits 2-6 to bits 6-10.
302}
303
304int32_t Thumb2Assembler::Fixup::LoadWideOrFpEncoding(Register rbase, int32_t offset) const {
305 switch (type_) {
306 case kLoadLiteralWide:
307 return LdrdEncoding32(rn_, rt2_, rbase, offset);
308 case kLoadFPLiteralSingle:
309 return VldrsEncoding32(sd_, rbase, offset);
310 case kLoadFPLiteralDouble:
311 return VldrdEncoding32(dd_, rbase, offset);
312 default:
313 LOG(FATAL) << "Unexpected type: " << static_cast<int>(type_);
314 UNREACHABLE();
315 }
316}
317
318inline int32_t Thumb2Assembler::LdrRtRnImm12Encoding(Register rt, Register rn, int32_t offset) {
319 DCHECK(IsUint<12>(offset));
320 return B31 | B30 | B29 | B28 | B27 | B23 | B22 | B20 | (rn << 16) | (rt << 12) | offset;
321}
322
323void Thumb2Assembler::FinalizeCode() {
324 ArmAssembler::FinalizeCode();
325 BindLiterals();
326 uint32_t adjusted_code_size = AdjustFixups();
327 EmitFixups(adjusted_code_size);
328 EmitLiterals();
329}
330
Nicolas Geoffray3d1e7882015-02-03 13:59:52 +0000331bool Thumb2Assembler::ShifterOperandCanHold(Register rd ATTRIBUTE_UNUSED,
332 Register rn ATTRIBUTE_UNUSED,
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000333 Opcode opcode,
334 uint32_t immediate,
335 ShifterOperand* shifter_op) {
336 shifter_op->type_ = ShifterOperand::kImmediate;
337 shifter_op->immed_ = immediate;
338 shifter_op->is_shift_ = false;
339 shifter_op->is_rotate_ = false;
340 switch (opcode) {
341 case ADD:
342 case SUB:
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +0000343 if (immediate < (1 << 12)) { // Less than (or equal to) 12 bits can always be done.
344 return true;
345 }
346 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
347
348 case MOV:
349 // TODO: Support less than or equal to 12bits.
350 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
351 case MVN:
352 default:
353 return ArmAssembler::ModifiedImmediate(immediate) != kInvalidModifiedImmediate;
354 }
355}
356
Dave Allison65fcc2c2014-04-28 13:45:27 -0700357void Thumb2Assembler::and_(Register rd, Register rn, const ShifterOperand& so,
358 Condition cond) {
359 EmitDataProcessing(cond, AND, 0, rn, rd, so);
360}
361
362
363void Thumb2Assembler::eor(Register rd, Register rn, const ShifterOperand& so,
364 Condition cond) {
365 EmitDataProcessing(cond, EOR, 0, rn, rd, so);
366}
367
368
369void Thumb2Assembler::sub(Register rd, Register rn, const ShifterOperand& so,
370 Condition cond) {
371 EmitDataProcessing(cond, SUB, 0, rn, rd, so);
372}
373
374
375void Thumb2Assembler::rsb(Register rd, Register rn, const ShifterOperand& so,
376 Condition cond) {
377 EmitDataProcessing(cond, RSB, 0, rn, rd, so);
378}
379
380
381void Thumb2Assembler::rsbs(Register rd, Register rn, const ShifterOperand& so,
382 Condition cond) {
383 EmitDataProcessing(cond, RSB, 1, rn, rd, so);
384}
385
386
387void Thumb2Assembler::add(Register rd, Register rn, const ShifterOperand& so,
388 Condition cond) {
389 EmitDataProcessing(cond, ADD, 0, rn, rd, so);
390}
391
392
393void Thumb2Assembler::adds(Register rd, Register rn, const ShifterOperand& so,
394 Condition cond) {
395 EmitDataProcessing(cond, ADD, 1, rn, rd, so);
396}
397
398
399void Thumb2Assembler::subs(Register rd, Register rn, const ShifterOperand& so,
400 Condition cond) {
401 EmitDataProcessing(cond, SUB, 1, rn, rd, so);
402}
403
404
405void Thumb2Assembler::adc(Register rd, Register rn, const ShifterOperand& so,
406 Condition cond) {
407 EmitDataProcessing(cond, ADC, 0, rn, rd, so);
408}
409
410
411void Thumb2Assembler::sbc(Register rd, Register rn, const ShifterOperand& so,
412 Condition cond) {
413 EmitDataProcessing(cond, SBC, 0, rn, rd, so);
414}
415
416
417void Thumb2Assembler::rsc(Register rd, Register rn, const ShifterOperand& so,
418 Condition cond) {
419 EmitDataProcessing(cond, RSC, 0, rn, rd, so);
420}
421
422
423void Thumb2Assembler::tst(Register rn, const ShifterOperand& so, Condition cond) {
424 CHECK_NE(rn, PC); // Reserve tst pc instruction for exception handler marker.
425 EmitDataProcessing(cond, TST, 1, rn, R0, so);
426}
427
428
429void Thumb2Assembler::teq(Register rn, const ShifterOperand& so, Condition cond) {
430 CHECK_NE(rn, PC); // Reserve teq pc instruction for exception handler marker.
431 EmitDataProcessing(cond, TEQ, 1, rn, R0, so);
432}
433
434
435void Thumb2Assembler::cmp(Register rn, const ShifterOperand& so, Condition cond) {
436 EmitDataProcessing(cond, CMP, 1, rn, R0, so);
437}
438
439
440void Thumb2Assembler::cmn(Register rn, const ShifterOperand& so, Condition cond) {
441 EmitDataProcessing(cond, CMN, 1, rn, R0, so);
442}
443
444
445void Thumb2Assembler::orr(Register rd, Register rn,
446 const ShifterOperand& so, Condition cond) {
447 EmitDataProcessing(cond, ORR, 0, rn, rd, so);
448}
449
450
451void Thumb2Assembler::orrs(Register rd, Register rn,
452 const ShifterOperand& so, Condition cond) {
453 EmitDataProcessing(cond, ORR, 1, rn, rd, so);
454}
455
456
457void Thumb2Assembler::mov(Register rd, const ShifterOperand& so, Condition cond) {
458 EmitDataProcessing(cond, MOV, 0, R0, rd, so);
459}
460
461
462void Thumb2Assembler::movs(Register rd, const ShifterOperand& so, Condition cond) {
463 EmitDataProcessing(cond, MOV, 1, R0, rd, so);
464}
465
466
467void Thumb2Assembler::bic(Register rd, Register rn, const ShifterOperand& so,
468 Condition cond) {
469 EmitDataProcessing(cond, BIC, 0, rn, rd, so);
470}
471
472
473void Thumb2Assembler::mvn(Register rd, const ShifterOperand& so, Condition cond) {
474 EmitDataProcessing(cond, MVN, 0, R0, rd, so);
475}
476
477
478void Thumb2Assembler::mvns(Register rd, const ShifterOperand& so, Condition cond) {
479 EmitDataProcessing(cond, MVN, 1, R0, rd, so);
480}
481
482
483void Thumb2Assembler::mul(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700484 CheckCondition(cond);
485
Dave Allison65fcc2c2014-04-28 13:45:27 -0700486 if (rd == rm && !IsHighRegister(rd) && !IsHighRegister(rn) && !force_32bit_) {
487 // 16 bit.
488 int16_t encoding = B14 | B9 | B8 | B6 |
489 rn << 3 | rd;
490 Emit16(encoding);
491 } else {
492 // 32 bit.
Andreas Gampec8ccf682014-09-29 20:07:43 -0700493 uint32_t op1 = 0U /* 0b000 */;
494 uint32_t op2 = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700495 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
496 op1 << 20 |
497 B15 | B14 | B13 | B12 |
498 op2 << 4 |
499 static_cast<uint32_t>(rd) << 8 |
500 static_cast<uint32_t>(rn) << 16 |
501 static_cast<uint32_t>(rm);
502
503 Emit32(encoding);
504 }
505}
506
507
508void Thumb2Assembler::mla(Register rd, Register rn, Register rm, Register ra,
509 Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700510 CheckCondition(cond);
511
Andreas Gampec8ccf682014-09-29 20:07:43 -0700512 uint32_t op1 = 0U /* 0b000 */;
513 uint32_t op2 = 0U /* 0b00 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700514 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
515 op1 << 20 |
516 op2 << 4 |
517 static_cast<uint32_t>(rd) << 8 |
518 static_cast<uint32_t>(ra) << 12 |
519 static_cast<uint32_t>(rn) << 16 |
520 static_cast<uint32_t>(rm);
521
522 Emit32(encoding);
523}
524
525
526void Thumb2Assembler::mls(Register rd, Register rn, Register rm, Register ra,
527 Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700528 CheckCondition(cond);
529
Andreas Gampec8ccf682014-09-29 20:07:43 -0700530 uint32_t op1 = 0U /* 0b000 */;
531 uint32_t op2 = 01 /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700532 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 |
533 op1 << 20 |
534 op2 << 4 |
535 static_cast<uint32_t>(rd) << 8 |
536 static_cast<uint32_t>(ra) << 12 |
537 static_cast<uint32_t>(rn) << 16 |
538 static_cast<uint32_t>(rm);
539
540 Emit32(encoding);
541}
542
543
Zheng Xuc6667102015-05-15 16:08:45 +0800544void Thumb2Assembler::smull(Register rd_lo, Register rd_hi, Register rn,
545 Register rm, Condition cond) {
546 CheckCondition(cond);
547
548 uint32_t op1 = 0U /* 0b000; */;
549 uint32_t op2 = 0U /* 0b0000 */;
550 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 |
551 op1 << 20 |
552 op2 << 4 |
553 static_cast<uint32_t>(rd_lo) << 12 |
554 static_cast<uint32_t>(rd_hi) << 8 |
555 static_cast<uint32_t>(rn) << 16 |
556 static_cast<uint32_t>(rm);
557
558 Emit32(encoding);
559}
560
561
Dave Allison65fcc2c2014-04-28 13:45:27 -0700562void Thumb2Assembler::umull(Register rd_lo, Register rd_hi, Register rn,
563 Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700564 CheckCondition(cond);
565
Andreas Gampec8ccf682014-09-29 20:07:43 -0700566 uint32_t op1 = 2U /* 0b010; */;
567 uint32_t op2 = 0U /* 0b0000 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700568 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 |
569 op1 << 20 |
570 op2 << 4 |
571 static_cast<uint32_t>(rd_lo) << 12 |
572 static_cast<uint32_t>(rd_hi) << 8 |
573 static_cast<uint32_t>(rn) << 16 |
574 static_cast<uint32_t>(rm);
575
576 Emit32(encoding);
577}
578
579
580void Thumb2Assembler::sdiv(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700581 CheckCondition(cond);
582
Andreas Gampec8ccf682014-09-29 20:07:43 -0700583 uint32_t op1 = 1U /* 0b001 */;
584 uint32_t op2 = 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700585 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 | B20 |
586 op1 << 20 |
587 op2 << 4 |
588 0xf << 12 |
589 static_cast<uint32_t>(rd) << 8 |
590 static_cast<uint32_t>(rn) << 16 |
591 static_cast<uint32_t>(rm);
592
593 Emit32(encoding);
594}
595
596
597void Thumb2Assembler::udiv(Register rd, Register rn, Register rm, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700598 CheckCondition(cond);
599
Andreas Gampec8ccf682014-09-29 20:07:43 -0700600 uint32_t op1 = 1U /* 0b001 */;
601 uint32_t op2 = 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700602 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 | B24 | B23 | B21 | B20 |
603 op1 << 20 |
604 op2 << 4 |
605 0xf << 12 |
606 static_cast<uint32_t>(rd) << 8 |
607 static_cast<uint32_t>(rn) << 16 |
608 static_cast<uint32_t>(rm);
609
610 Emit32(encoding);
611}
612
613
Roland Levillain51d3fc42014-11-13 14:11:42 +0000614void Thumb2Assembler::sbfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
615 CheckCondition(cond);
616 CHECK_LE(lsb, 31U);
617 CHECK(1U <= width && width <= 32U) << width;
618 uint32_t widthminus1 = width - 1;
619 uint32_t imm2 = lsb & (B1 | B0); // Bits 0-1 of `lsb`.
620 uint32_t imm3 = (lsb & (B4 | B3 | B2)) >> 2; // Bits 2-4 of `lsb`.
621
622 uint32_t op = 20U /* 0b10100 */;
623 int32_t encoding = B31 | B30 | B29 | B28 | B25 |
624 op << 20 |
625 static_cast<uint32_t>(rn) << 16 |
626 imm3 << 12 |
627 static_cast<uint32_t>(rd) << 8 |
628 imm2 << 6 |
629 widthminus1;
630
631 Emit32(encoding);
632}
633
634
Roland Levillain981e4542014-11-14 11:47:14 +0000635void Thumb2Assembler::ubfx(Register rd, Register rn, uint32_t lsb, uint32_t width, Condition cond) {
636 CheckCondition(cond);
637 CHECK_LE(lsb, 31U);
638 CHECK(1U <= width && width <= 32U) << width;
639 uint32_t widthminus1 = width - 1;
640 uint32_t imm2 = lsb & (B1 | B0); // Bits 0-1 of `lsb`.
641 uint32_t imm3 = (lsb & (B4 | B3 | B2)) >> 2; // Bits 2-4 of `lsb`.
642
643 uint32_t op = 28U /* 0b11100 */;
644 int32_t encoding = B31 | B30 | B29 | B28 | B25 |
645 op << 20 |
646 static_cast<uint32_t>(rn) << 16 |
647 imm3 << 12 |
648 static_cast<uint32_t>(rd) << 8 |
649 imm2 << 6 |
650 widthminus1;
651
652 Emit32(encoding);
653}
654
655
Dave Allison65fcc2c2014-04-28 13:45:27 -0700656void Thumb2Assembler::ldr(Register rd, const Address& ad, Condition cond) {
657 EmitLoadStore(cond, true, false, false, false, rd, ad);
658}
659
660
661void Thumb2Assembler::str(Register rd, const Address& ad, Condition cond) {
662 EmitLoadStore(cond, false, false, false, false, rd, ad);
663}
664
665
666void Thumb2Assembler::ldrb(Register rd, const Address& ad, Condition cond) {
667 EmitLoadStore(cond, true, true, false, false, rd, ad);
668}
669
670
671void Thumb2Assembler::strb(Register rd, const Address& ad, Condition cond) {
672 EmitLoadStore(cond, false, true, false, false, rd, ad);
673}
674
675
676void Thumb2Assembler::ldrh(Register rd, const Address& ad, Condition cond) {
677 EmitLoadStore(cond, true, false, true, false, rd, ad);
678}
679
680
681void Thumb2Assembler::strh(Register rd, const Address& ad, Condition cond) {
682 EmitLoadStore(cond, false, false, true, false, rd, ad);
683}
684
685
686void Thumb2Assembler::ldrsb(Register rd, const Address& ad, Condition cond) {
687 EmitLoadStore(cond, true, true, false, true, rd, ad);
688}
689
690
691void Thumb2Assembler::ldrsh(Register rd, const Address& ad, Condition cond) {
692 EmitLoadStore(cond, true, false, true, true, rd, ad);
693}
694
695
696void Thumb2Assembler::ldrd(Register rd, const Address& ad, Condition cond) {
Roland Levillain4af147e2015-04-07 13:54:49 +0100697 ldrd(rd, Register(rd + 1), ad, cond);
698}
699
700
701void Thumb2Assembler::ldrd(Register rd, Register rd2, const Address& ad, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700702 CheckCondition(cond);
Roland Levillain4af147e2015-04-07 13:54:49 +0100703 // Encoding T1.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700704 // This is different from other loads. The encoding is like ARM.
705 int32_t encoding = B31 | B30 | B29 | B27 | B22 | B20 |
706 static_cast<int32_t>(rd) << 12 |
Roland Levillain4af147e2015-04-07 13:54:49 +0100707 static_cast<int32_t>(rd2) << 8 |
Dave Allison65fcc2c2014-04-28 13:45:27 -0700708 ad.encodingThumbLdrdStrd();
709 Emit32(encoding);
710}
711
712
713void Thumb2Assembler::strd(Register rd, const Address& ad, Condition cond) {
Roland Levillain4af147e2015-04-07 13:54:49 +0100714 strd(rd, Register(rd + 1), ad, cond);
715}
716
717
718void Thumb2Assembler::strd(Register rd, Register rd2, const Address& ad, Condition cond) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700719 CheckCondition(cond);
Roland Levillain4af147e2015-04-07 13:54:49 +0100720 // Encoding T1.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700721 // This is different from other loads. The encoding is like ARM.
722 int32_t encoding = B31 | B30 | B29 | B27 | B22 |
723 static_cast<int32_t>(rd) << 12 |
Roland Levillain4af147e2015-04-07 13:54:49 +0100724 static_cast<int32_t>(rd2) << 8 |
Dave Allison65fcc2c2014-04-28 13:45:27 -0700725 ad.encodingThumbLdrdStrd();
726 Emit32(encoding);
727}
728
729
730void Thumb2Assembler::ldm(BlockAddressMode am,
731 Register base,
732 RegList regs,
733 Condition cond) {
Vladimir Markoe8469c12014-11-26 18:09:30 +0000734 CHECK_NE(regs, 0u); // Do not use ldm if there's nothing to load.
735 if (IsPowerOfTwo(regs)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700736 // Thumb doesn't support one reg in the list.
737 // Find the register number.
Vladimir Markoe8469c12014-11-26 18:09:30 +0000738 int reg = CTZ(static_cast<uint32_t>(regs));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700739 CHECK_LT(reg, 16);
Dave Allison45fdb932014-06-25 12:37:10 -0700740 CHECK(am == DB_W); // Only writeback is supported.
Dave Allison65fcc2c2014-04-28 13:45:27 -0700741 ldr(static_cast<Register>(reg), Address(base, kRegisterSize, Address::PostIndex), cond);
742 } else {
743 EmitMultiMemOp(cond, am, true, base, regs);
744 }
745}
746
747
748void Thumb2Assembler::stm(BlockAddressMode am,
749 Register base,
750 RegList regs,
751 Condition cond) {
Vladimir Markoe8469c12014-11-26 18:09:30 +0000752 CHECK_NE(regs, 0u); // Do not use stm if there's nothing to store.
753 if (IsPowerOfTwo(regs)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -0700754 // Thumb doesn't support one reg in the list.
755 // Find the register number.
Vladimir Markoe8469c12014-11-26 18:09:30 +0000756 int reg = CTZ(static_cast<uint32_t>(regs));
Dave Allison65fcc2c2014-04-28 13:45:27 -0700757 CHECK_LT(reg, 16);
Dave Allison45fdb932014-06-25 12:37:10 -0700758 CHECK(am == IA || am == IA_W);
759 Address::Mode strmode = am == IA ? Address::PreIndex : Address::Offset;
Dave Allison65fcc2c2014-04-28 13:45:27 -0700760 str(static_cast<Register>(reg), Address(base, -kRegisterSize, strmode), cond);
761 } else {
762 EmitMultiMemOp(cond, am, false, base, regs);
763 }
764}
765
766
767bool Thumb2Assembler::vmovs(SRegister sd, float s_imm, Condition cond) {
768 uint32_t imm32 = bit_cast<uint32_t, float>(s_imm);
769 if (((imm32 & ((1 << 19) - 1)) == 0) &&
770 ((((imm32 >> 25) & ((1 << 6) - 1)) == (1 << 5)) ||
771 (((imm32 >> 25) & ((1 << 6) - 1)) == ((1 << 5) -1)))) {
772 uint8_t imm8 = ((imm32 >> 31) << 7) | (((imm32 >> 29) & 1) << 6) |
773 ((imm32 >> 19) & ((1 << 6) -1));
774 EmitVFPsss(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | (imm8 & 0xf),
775 sd, S0, S0);
776 return true;
777 }
778 return false;
779}
780
781
782bool Thumb2Assembler::vmovd(DRegister dd, double d_imm, Condition cond) {
783 uint64_t imm64 = bit_cast<uint64_t, double>(d_imm);
784 if (((imm64 & ((1LL << 48) - 1)) == 0) &&
785 ((((imm64 >> 54) & ((1 << 9) - 1)) == (1 << 8)) ||
786 (((imm64 >> 54) & ((1 << 9) - 1)) == ((1 << 8) -1)))) {
787 uint8_t imm8 = ((imm64 >> 63) << 7) | (((imm64 >> 61) & 1) << 6) |
788 ((imm64 >> 48) & ((1 << 6) -1));
789 EmitVFPddd(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | B8 | (imm8 & 0xf),
790 dd, D0, D0);
791 return true;
792 }
793 return false;
794}
795
796
797void Thumb2Assembler::vmovs(SRegister sd, SRegister sm, Condition cond) {
798 EmitVFPsss(cond, B23 | B21 | B20 | B6, sd, S0, sm);
799}
800
801
802void Thumb2Assembler::vmovd(DRegister dd, DRegister dm, Condition cond) {
803 EmitVFPddd(cond, B23 | B21 | B20 | B6, dd, D0, dm);
804}
805
806
807void Thumb2Assembler::vadds(SRegister sd, SRegister sn, SRegister sm,
808 Condition cond) {
809 EmitVFPsss(cond, B21 | B20, sd, sn, sm);
810}
811
812
813void Thumb2Assembler::vaddd(DRegister dd, DRegister dn, DRegister dm,
814 Condition cond) {
815 EmitVFPddd(cond, B21 | B20, dd, dn, dm);
816}
817
818
819void Thumb2Assembler::vsubs(SRegister sd, SRegister sn, SRegister sm,
820 Condition cond) {
821 EmitVFPsss(cond, B21 | B20 | B6, sd, sn, sm);
822}
823
824
825void Thumb2Assembler::vsubd(DRegister dd, DRegister dn, DRegister dm,
826 Condition cond) {
827 EmitVFPddd(cond, B21 | B20 | B6, dd, dn, dm);
828}
829
830
831void Thumb2Assembler::vmuls(SRegister sd, SRegister sn, SRegister sm,
832 Condition cond) {
833 EmitVFPsss(cond, B21, sd, sn, sm);
834}
835
836
837void Thumb2Assembler::vmuld(DRegister dd, DRegister dn, DRegister dm,
838 Condition cond) {
839 EmitVFPddd(cond, B21, dd, dn, dm);
840}
841
842
843void Thumb2Assembler::vmlas(SRegister sd, SRegister sn, SRegister sm,
844 Condition cond) {
845 EmitVFPsss(cond, 0, sd, sn, sm);
846}
847
848
849void Thumb2Assembler::vmlad(DRegister dd, DRegister dn, DRegister dm,
850 Condition cond) {
851 EmitVFPddd(cond, 0, dd, dn, dm);
852}
853
854
855void Thumb2Assembler::vmlss(SRegister sd, SRegister sn, SRegister sm,
856 Condition cond) {
857 EmitVFPsss(cond, B6, sd, sn, sm);
858}
859
860
861void Thumb2Assembler::vmlsd(DRegister dd, DRegister dn, DRegister dm,
862 Condition cond) {
863 EmitVFPddd(cond, B6, dd, dn, dm);
864}
865
866
867void Thumb2Assembler::vdivs(SRegister sd, SRegister sn, SRegister sm,
868 Condition cond) {
869 EmitVFPsss(cond, B23, sd, sn, sm);
870}
871
872
873void Thumb2Assembler::vdivd(DRegister dd, DRegister dn, DRegister dm,
874 Condition cond) {
875 EmitVFPddd(cond, B23, dd, dn, dm);
876}
877
878
879void Thumb2Assembler::vabss(SRegister sd, SRegister sm, Condition cond) {
880 EmitVFPsss(cond, B23 | B21 | B20 | B7 | B6, sd, S0, sm);
881}
882
883
884void Thumb2Assembler::vabsd(DRegister dd, DRegister dm, Condition cond) {
885 EmitVFPddd(cond, B23 | B21 | B20 | B7 | B6, dd, D0, dm);
886}
887
888
889void Thumb2Assembler::vnegs(SRegister sd, SRegister sm, Condition cond) {
890 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B6, sd, S0, sm);
891}
892
893
894void Thumb2Assembler::vnegd(DRegister dd, DRegister dm, Condition cond) {
895 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B6, dd, D0, dm);
896}
897
898
899void Thumb2Assembler::vsqrts(SRegister sd, SRegister sm, Condition cond) {
900 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B7 | B6, sd, S0, sm);
901}
902
903void Thumb2Assembler::vsqrtd(DRegister dd, DRegister dm, Condition cond) {
904 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B7 | B6, dd, D0, dm);
905}
906
907
908void Thumb2Assembler::vcvtsd(SRegister sd, DRegister dm, Condition cond) {
909 EmitVFPsd(cond, B23 | B21 | B20 | B18 | B17 | B16 | B8 | B7 | B6, sd, dm);
910}
911
912
913void Thumb2Assembler::vcvtds(DRegister dd, SRegister sm, Condition cond) {
914 EmitVFPds(cond, B23 | B21 | B20 | B18 | B17 | B16 | B7 | B6, dd, sm);
915}
916
917
918void Thumb2Assembler::vcvtis(SRegister sd, SRegister sm, Condition cond) {
919 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B16 | B7 | B6, sd, S0, sm);
920}
921
922
923void Thumb2Assembler::vcvtid(SRegister sd, DRegister dm, Condition cond) {
924 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B16 | B8 | B7 | B6, sd, dm);
925}
926
927
928void Thumb2Assembler::vcvtsi(SRegister sd, SRegister sm, Condition cond) {
929 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B7 | B6, sd, S0, sm);
930}
931
932
933void Thumb2Assembler::vcvtdi(DRegister dd, SRegister sm, Condition cond) {
934 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B7 | B6, dd, sm);
935}
936
937
938void Thumb2Assembler::vcvtus(SRegister sd, SRegister sm, Condition cond) {
939 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B7 | B6, sd, S0, sm);
940}
941
942
943void Thumb2Assembler::vcvtud(SRegister sd, DRegister dm, Condition cond) {
944 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B8 | B7 | B6, sd, dm);
945}
946
947
948void Thumb2Assembler::vcvtsu(SRegister sd, SRegister sm, Condition cond) {
949 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B6, sd, S0, sm);
950}
951
952
953void Thumb2Assembler::vcvtdu(DRegister dd, SRegister sm, Condition cond) {
954 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B6, dd, sm);
955}
956
957
958void Thumb2Assembler::vcmps(SRegister sd, SRegister sm, Condition cond) {
959 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B6, sd, S0, sm);
960}
961
962
963void Thumb2Assembler::vcmpd(DRegister dd, DRegister dm, Condition cond) {
964 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B6, dd, D0, dm);
965}
966
967
968void Thumb2Assembler::vcmpsz(SRegister sd, Condition cond) {
969 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B16 | B6, sd, S0, S0);
970}
971
972
973void Thumb2Assembler::vcmpdz(DRegister dd, Condition cond) {
974 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B16 | B6, dd, D0, D0);
975}
976
977void Thumb2Assembler::b(Label* label, Condition cond) {
agicsakie2142d252015-06-30 17:10:03 -0700978 DCHECK_EQ(next_condition_, AL);
Dave Allison65fcc2c2014-04-28 13:45:27 -0700979 EmitBranch(cond, label, false, false);
980}
981
982
983void Thumb2Assembler::bl(Label* label, Condition cond) {
984 CheckCondition(cond);
985 EmitBranch(cond, label, true, false);
986}
987
988
989void Thumb2Assembler::blx(Label* label) {
990 EmitBranch(AL, label, true, true);
991}
992
993
994void Thumb2Assembler::MarkExceptionHandler(Label* label) {
995 EmitDataProcessing(AL, TST, 1, PC, R0, ShifterOperand(0));
996 Label l;
997 b(&l);
998 EmitBranch(AL, label, false, false);
999 Bind(&l);
1000}
1001
1002
1003void Thumb2Assembler::Emit32(int32_t value) {
1004 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1005 buffer_.Emit<int16_t>(value >> 16);
1006 buffer_.Emit<int16_t>(value & 0xffff);
1007}
1008
1009
1010void Thumb2Assembler::Emit16(int16_t value) {
1011 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1012 buffer_.Emit<int16_t>(value);
1013}
1014
1015
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001016bool Thumb2Assembler::Is32BitDataProcessing(Condition cond ATTRIBUTE_UNUSED,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001017 Opcode opcode,
Andreas Gampeca714582015-04-03 19:41:34 -07001018 bool set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001019 Register rn,
1020 Register rd,
1021 const ShifterOperand& so) {
1022 if (force_32bit_) {
1023 return true;
1024 }
1025
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001026 // Check special case for SP relative ADD and SUB immediate.
1027 if ((opcode == ADD || opcode == SUB) && rn == SP && so.IsImmediate()) {
1028 // If the immediate is in range, use 16 bit.
1029 if (rd == SP) {
1030 if (so.GetImmediate() < (1 << 9)) { // 9 bit immediate.
1031 return false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001032 }
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001033 } else if (!IsHighRegister(rd) && opcode == ADD) {
1034 if (so.GetImmediate() < (1 << 10)) { // 10 bit immediate.
1035 return false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001036 }
1037 }
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001038 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001039
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001040 bool can_contain_high_register = (opcode == MOV)
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001041 || ((opcode == ADD) && (rn == rd) && !set_cc);
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001042
1043 if (IsHighRegister(rd) || IsHighRegister(rn)) {
1044 if (!can_contain_high_register) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001045 return true;
1046 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001047
Vladimir Marko5bc561c2014-12-16 17:41:59 +00001048 // There are high register instructions available for this opcode.
1049 // However, there is no actual shift available, neither for ADD nor for MOV (ASR/LSR/LSL/ROR).
1050 if (so.IsShift() && (so.GetShift() == RRX || so.GetImmediate() != 0u)) {
1051 return true;
1052 }
1053
1054 // The ADD and MOV instructions that work with high registers don't have 16-bit
1055 // immediate variants.
1056 if (so.IsImmediate()) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01001057 return true;
1058 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001059 }
1060
1061 if (so.IsRegister() && IsHighRegister(so.GetRegister()) && !can_contain_high_register) {
1062 return true;
1063 }
1064
Dave Allison65fcc2c2014-04-28 13:45:27 -07001065 bool rn_is_valid = true;
1066
1067 // Check for single operand instructions and ADD/SUB.
1068 switch (opcode) {
1069 case CMP:
1070 case MOV:
1071 case TST:
1072 case MVN:
1073 rn_is_valid = false; // There is no Rn for these instructions.
1074 break;
1075 case TEQ:
1076 return true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001077 case ADD:
1078 case SUB:
1079 break;
1080 default:
1081 if (so.IsRegister() && rd != rn) {
1082 return true;
1083 }
1084 }
1085
1086 if (so.IsImmediate()) {
1087 if (rn_is_valid && rn != rd) {
1088 // The only thumb1 instruction with a register and an immediate are ADD and SUB. The
1089 // immediate must be 3 bits.
1090 if (opcode != ADD && opcode != SUB) {
1091 return true;
1092 } else {
1093 // Check that the immediate is 3 bits for ADD and SUB.
1094 if (so.GetImmediate() >= 8) {
1095 return true;
1096 }
1097 }
1098 } else {
1099 // ADD, SUB, CMP and MOV may be thumb1 only if the immediate is 8 bits.
1100 if (!(opcode == ADD || opcode == SUB || opcode == MOV || opcode == CMP)) {
1101 return true;
1102 } else {
1103 if (so.GetImmediate() > 255) {
1104 return true;
1105 }
1106 }
1107 }
1108 }
1109
Zheng Xuc6667102015-05-15 16:08:45 +08001110 // Check for register shift operand.
1111 if (so.IsRegister() && so.IsShift()) {
1112 if (opcode != MOV) {
1113 return true;
1114 }
1115 // Check for MOV with an ROR.
1116 if (so.GetShift() == ROR) {
1117 if (so.GetImmediate() != 0) {
1118 return true;
1119 }
1120 }
1121 }
1122
Dave Allison65fcc2c2014-04-28 13:45:27 -07001123 // The instruction can be encoded in 16 bits.
1124 return false;
1125}
1126
1127
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001128void Thumb2Assembler::Emit32BitDataProcessing(Condition cond ATTRIBUTE_UNUSED,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001129 Opcode opcode,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001130 bool set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001131 Register rn,
1132 Register rd,
1133 const ShifterOperand& so) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001134 uint8_t thumb_opcode = 255U /* 0b11111111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001135 switch (opcode) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001136 case AND: thumb_opcode = 0U /* 0b0000 */; break;
1137 case EOR: thumb_opcode = 4U /* 0b0100 */; break;
1138 case SUB: thumb_opcode = 13U /* 0b1101 */; break;
1139 case RSB: thumb_opcode = 14U /* 0b1110 */; break;
1140 case ADD: thumb_opcode = 8U /* 0b1000 */; break;
Andreas Gampe35c68e32014-09-30 08:39:37 -07001141 case ADC: thumb_opcode = 10U /* 0b1010 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001142 case SBC: thumb_opcode = 11U /* 0b1011 */; break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001143 case RSC: break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001144 case TST: thumb_opcode = 0U /* 0b0000 */; set_cc = true; rd = PC; break;
1145 case TEQ: thumb_opcode = 4U /* 0b0100 */; set_cc = true; rd = PC; break;
1146 case CMP: thumb_opcode = 13U /* 0b1101 */; set_cc = true; rd = PC; break;
1147 case CMN: thumb_opcode = 8U /* 0b1000 */; set_cc = true; rd = PC; break;
1148 case ORR: thumb_opcode = 2U /* 0b0010 */; break;
1149 case MOV: thumb_opcode = 2U /* 0b0010 */; rn = PC; break;
1150 case BIC: thumb_opcode = 1U /* 0b0001 */; break;
1151 case MVN: thumb_opcode = 3U /* 0b0011 */; rn = PC; break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001152 default:
1153 break;
1154 }
1155
Andreas Gampec8ccf682014-09-29 20:07:43 -07001156 if (thumb_opcode == 255U /* 0b11111111 */) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001157 LOG(FATAL) << "Invalid thumb2 opcode " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001158 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001159 }
1160
1161 int32_t encoding = 0;
1162 if (so.IsImmediate()) {
1163 // Check special cases.
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001164 if ((opcode == SUB || opcode == ADD) && (so.GetImmediate() < (1u << 12))) {
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001165 if (!set_cc) {
1166 if (opcode == SUB) {
1167 thumb_opcode = 5U;
1168 } else if (opcode == ADD) {
1169 thumb_opcode = 0U;
1170 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001171 }
1172 uint32_t imm = so.GetImmediate();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001173
1174 uint32_t i = (imm >> 11) & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001175 uint32_t imm3 = (imm >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001176 uint32_t imm8 = imm & 0xff;
1177
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001178 encoding = B31 | B30 | B29 | B28 |
1179 (set_cc ? B20 : B25) |
1180 thumb_opcode << 21 |
1181 rn << 16 |
1182 rd << 8 |
1183 i << 26 |
1184 imm3 << 12 |
1185 imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001186 } else {
1187 // Modified immediate.
Dave Allison45fdb932014-06-25 12:37:10 -07001188 uint32_t imm = ModifiedImmediate(so.encodingThumb());
Dave Allison65fcc2c2014-04-28 13:45:27 -07001189 if (imm == kInvalidModifiedImmediate) {
1190 LOG(FATAL) << "Immediate value cannot fit in thumb2 modified immediate";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001191 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001192 }
1193 encoding = B31 | B30 | B29 | B28 |
1194 thumb_opcode << 21 |
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001195 (set_cc ? B20 : 0) |
Dave Allison65fcc2c2014-04-28 13:45:27 -07001196 rn << 16 |
1197 rd << 8 |
1198 imm;
1199 }
1200 } else if (so.IsRegister()) {
Guillaume "Vermeille" Sanchezdc62c482015-03-11 14:30:31 +00001201 // Register (possibly shifted)
1202 encoding = B31 | B30 | B29 | B27 | B25 |
1203 thumb_opcode << 21 |
1204 (set_cc ? B20 : 0) |
1205 rn << 16 |
1206 rd << 8 |
1207 so.encodingThumb();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001208 }
1209 Emit32(encoding);
1210}
1211
1212
1213void Thumb2Assembler::Emit16BitDataProcessing(Condition cond,
1214 Opcode opcode,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001215 bool set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001216 Register rn,
1217 Register rd,
1218 const ShifterOperand& so) {
1219 if (opcode == ADD || opcode == SUB) {
1220 Emit16BitAddSub(cond, opcode, set_cc, rn, rd, so);
1221 return;
1222 }
Andreas Gampec8ccf682014-09-29 20:07:43 -07001223 uint8_t thumb_opcode = 255U /* 0b11111111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001224 // Thumb1.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001225 uint8_t dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001226 uint8_t opcode_shift = 6;
1227 uint8_t rd_shift = 0;
1228 uint8_t rn_shift = 3;
1229 uint8_t immediate_shift = 0;
1230 bool use_immediate = false;
1231 uint8_t immediate = 0;
1232
1233 if (opcode == MOV && so.IsRegister() && so.IsShift()) {
1234 // Convert shifted mov operand2 into 16 bit opcodes.
1235 dp_opcode = 0;
1236 opcode_shift = 11;
1237
1238 use_immediate = true;
1239 immediate = so.GetImmediate();
1240 immediate_shift = 6;
1241
1242 rn = so.GetRegister();
1243
1244 switch (so.GetShift()) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001245 case LSL: thumb_opcode = 0U /* 0b00 */; break;
1246 case LSR: thumb_opcode = 1U /* 0b01 */; break;
1247 case ASR: thumb_opcode = 2U /* 0b10 */; break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001248 case ROR:
1249 // ROR doesn't allow immediates.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001250 thumb_opcode = 7U /* 0b111 */;
1251 dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001252 opcode_shift = 6;
1253 use_immediate = false;
1254 break;
1255 case RRX: break;
1256 default:
1257 break;
1258 }
1259 } else {
1260 if (so.IsImmediate()) {
1261 use_immediate = true;
1262 immediate = so.GetImmediate();
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001263 } else {
Guillaume "Vermeille" Sanchezab4a2f52015-03-11 14:00:30 +00001264 CHECK(!(so.IsRegister() && so.IsShift() && so.GetSecondRegister() != kNoRegister))
1265 << "No register-shifted register instruction available in thumb";
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001266 // Adjust rn and rd: only two registers will be emitted.
1267 switch (opcode) {
1268 case AND:
1269 case ORR:
1270 case EOR:
1271 case RSB:
1272 case ADC:
1273 case SBC:
1274 case BIC: {
1275 if (rn == rd) {
1276 rn = so.GetRegister();
1277 } else {
1278 CHECK_EQ(rd, so.GetRegister());
1279 }
1280 break;
1281 }
1282 case CMP:
1283 case CMN: {
1284 CHECK_EQ(rd, 0);
1285 rd = rn;
1286 rn = so.GetRegister();
1287 break;
1288 }
Andreas Gampe7b7e5242015-02-02 19:17:11 -08001289 case TST:
1290 case TEQ:
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001291 case MVN: {
1292 CHECK_EQ(rn, 0);
1293 rn = so.GetRegister();
1294 break;
1295 }
1296 default:
1297 break;
1298 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001299 }
1300
1301 switch (opcode) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001302 case AND: thumb_opcode = 0U /* 0b0000 */; break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001303 case ORR: thumb_opcode = 12U /* 0b1100 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001304 case EOR: thumb_opcode = 1U /* 0b0001 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001305 case RSB: thumb_opcode = 9U /* 0b1001 */; break;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001306 case ADC: thumb_opcode = 5U /* 0b0101 */; break;
1307 case SBC: thumb_opcode = 6U /* 0b0110 */; break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001308 case BIC: thumb_opcode = 14U /* 0b1110 */; break;
1309 case TST: thumb_opcode = 8U /* 0b1000 */; CHECK(!use_immediate); break;
1310 case MVN: thumb_opcode = 15U /* 0b1111 */; CHECK(!use_immediate); break;
1311 case CMP: {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001312 if (use_immediate) {
1313 // T2 encoding.
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001314 dp_opcode = 0;
1315 opcode_shift = 11;
1316 thumb_opcode = 5U /* 0b101 */;
1317 rd_shift = 8;
1318 rn_shift = 8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001319 } else {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001320 thumb_opcode = 10U /* 0b1010 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001321 }
1322
1323 break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001324 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001325 case CMN: {
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001326 CHECK(!use_immediate);
Andreas Gampec8ccf682014-09-29 20:07:43 -07001327 thumb_opcode = 11U /* 0b1011 */;
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001328 break;
1329 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001330 case MOV:
1331 dp_opcode = 0;
1332 if (use_immediate) {
1333 // T2 encoding.
1334 opcode_shift = 11;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001335 thumb_opcode = 4U /* 0b100 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001336 rd_shift = 8;
1337 rn_shift = 8;
1338 } else {
1339 rn = so.GetRegister();
1340 if (IsHighRegister(rn) || IsHighRegister(rd)) {
1341 // Special mov for high registers.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001342 dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001343 opcode_shift = 7;
1344 // Put the top bit of rd into the bottom bit of the opcode.
Andreas Gampec8ccf682014-09-29 20:07:43 -07001345 thumb_opcode = 12U /* 0b0001100 */ | static_cast<uint32_t>(rd) >> 3;
1346 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001347 } else {
1348 thumb_opcode = 0;
1349 }
1350 }
1351 break;
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001352
1353 case TEQ:
1354 case RSC:
Dave Allison65fcc2c2014-04-28 13:45:27 -07001355 default:
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001356 LOG(FATAL) << "Invalid thumb1 opcode " << opcode;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001357 break;
1358 }
1359 }
1360
Andreas Gampec8ccf682014-09-29 20:07:43 -07001361 if (thumb_opcode == 255U /* 0b11111111 */) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001362 LOG(FATAL) << "Invalid thumb1 opcode " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001363 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001364 }
1365
1366 int16_t encoding = dp_opcode << 14 |
1367 (thumb_opcode << opcode_shift) |
1368 rd << rd_shift |
1369 rn << rn_shift |
1370 (use_immediate ? (immediate << immediate_shift) : 0);
1371
1372 Emit16(encoding);
1373}
1374
1375
1376// ADD and SUB are complex enough to warrant their own emitter.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001377void Thumb2Assembler::Emit16BitAddSub(Condition cond ATTRIBUTE_UNUSED,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001378 Opcode opcode,
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001379 bool set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001380 Register rn,
1381 Register rd,
1382 const ShifterOperand& so) {
1383 uint8_t dp_opcode = 0;
1384 uint8_t opcode_shift = 6;
1385 uint8_t rd_shift = 0;
1386 uint8_t rn_shift = 3;
1387 uint8_t immediate_shift = 0;
1388 bool use_immediate = false;
Vladimir Markoac0341e2014-12-18 19:56:49 +00001389 uint32_t immediate = 0; // Should be at most 9 bits but keep the full immediate for CHECKs.
Dave Allison65fcc2c2014-04-28 13:45:27 -07001390 uint8_t thumb_opcode;;
1391
1392 if (so.IsImmediate()) {
1393 use_immediate = true;
1394 immediate = so.GetImmediate();
1395 }
1396
1397 switch (opcode) {
1398 case ADD:
1399 if (so.IsRegister()) {
1400 Register rm = so.GetRegister();
Andreas Gampe513ea0c2015-02-02 13:17:52 -08001401 if (rn == rd && !set_cc) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001402 // Can use T2 encoding (allows 4 bit registers)
Andreas Gampec8ccf682014-09-29 20:07:43 -07001403 dp_opcode = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001404 opcode_shift = 10;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001405 thumb_opcode = 1U /* 0b0001 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001406 // Make Rn also contain the top bit of rd.
1407 rn = static_cast<Register>(static_cast<uint32_t>(rm) |
Andreas Gampec8ccf682014-09-29 20:07:43 -07001408 (static_cast<uint32_t>(rd) & 8U /* 0b1000 */) << 1);
1409 rd = static_cast<Register>(static_cast<uint32_t>(rd) & 7U /* 0b111 */);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001410 } else {
1411 // T1.
1412 opcode_shift = 9;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001413 thumb_opcode = 12U /* 0b01100 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001414 immediate = static_cast<uint32_t>(so.GetRegister());
1415 use_immediate = true;
1416 immediate_shift = 6;
1417 }
1418 } else {
1419 // Immediate.
1420 if (rd == SP && rn == SP) {
1421 // ADD sp, sp, #imm
Andreas Gampec8ccf682014-09-29 20:07:43 -07001422 dp_opcode = 2U /* 0b10 */;
1423 thumb_opcode = 3U /* 0b11 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001424 opcode_shift = 12;
Vladimir Markoac0341e2014-12-18 19:56:49 +00001425 CHECK_LT(immediate, (1u << 9));
Roland Levillain14d90572015-07-16 10:52:26 +01001426 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001427
1428 // Remove rd and rn from instruction by orring it with immed and clearing bits.
1429 rn = R0;
1430 rd = R0;
1431 rd_shift = 0;
1432 rn_shift = 0;
1433 immediate >>= 2;
1434 } else if (rd != SP && rn == SP) {
1435 // ADD rd, SP, #imm
Andreas Gampec8ccf682014-09-29 20:07:43 -07001436 dp_opcode = 2U /* 0b10 */;
1437 thumb_opcode = 5U /* 0b101 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001438 opcode_shift = 11;
Vladimir Markoac0341e2014-12-18 19:56:49 +00001439 CHECK_LT(immediate, (1u << 10));
Roland Levillain14d90572015-07-16 10:52:26 +01001440 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001441
1442 // Remove rn from instruction.
1443 rn = R0;
1444 rn_shift = 0;
1445 rd_shift = 8;
1446 immediate >>= 2;
1447 } else if (rn != rd) {
1448 // Must use T1.
1449 opcode_shift = 9;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001450 thumb_opcode = 14U /* 0b01110 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001451 immediate_shift = 6;
1452 } else {
1453 // T2 encoding.
1454 opcode_shift = 11;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001455 thumb_opcode = 6U /* 0b110 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001456 rd_shift = 8;
1457 rn_shift = 8;
1458 }
1459 }
1460 break;
1461
1462 case SUB:
1463 if (so.IsRegister()) {
1464 // T1.
1465 opcode_shift = 9;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001466 thumb_opcode = 13U /* 0b01101 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001467 immediate = static_cast<uint32_t>(so.GetRegister());
1468 use_immediate = true;
1469 immediate_shift = 6;
1470 } else {
1471 if (rd == SP && rn == SP) {
1472 // SUB sp, sp, #imm
Andreas Gampec8ccf682014-09-29 20:07:43 -07001473 dp_opcode = 2U /* 0b10 */;
1474 thumb_opcode = 0x61 /* 0b1100001 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001475 opcode_shift = 7;
Vladimir Markoac0341e2014-12-18 19:56:49 +00001476 CHECK_LT(immediate, (1u << 9));
Roland Levillain14d90572015-07-16 10:52:26 +01001477 CHECK_ALIGNED(immediate, 4);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001478
1479 // Remove rd and rn from instruction by orring it with immed and clearing bits.
1480 rn = R0;
1481 rd = R0;
1482 rd_shift = 0;
1483 rn_shift = 0;
1484 immediate >>= 2;
1485 } else if (rn != rd) {
1486 // Must use T1.
1487 opcode_shift = 9;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001488 thumb_opcode = 15U /* 0b01111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001489 immediate_shift = 6;
1490 } else {
1491 // T2 encoding.
1492 opcode_shift = 11;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001493 thumb_opcode = 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001494 rd_shift = 8;
1495 rn_shift = 8;
1496 }
1497 }
1498 break;
1499 default:
1500 LOG(FATAL) << "This opcode is not an ADD or SUB: " << opcode;
Vladimir Markoe8469c12014-11-26 18:09:30 +00001501 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001502 }
1503
1504 int16_t encoding = dp_opcode << 14 |
1505 (thumb_opcode << opcode_shift) |
1506 rd << rd_shift |
1507 rn << rn_shift |
1508 (use_immediate ? (immediate << immediate_shift) : 0);
1509
1510 Emit16(encoding);
1511}
1512
1513
1514void Thumb2Assembler::EmitDataProcessing(Condition cond,
1515 Opcode opcode,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001516 bool set_cc,
Dave Allison65fcc2c2014-04-28 13:45:27 -07001517 Register rn,
1518 Register rd,
1519 const ShifterOperand& so) {
1520 CHECK_NE(rd, kNoRegister);
1521 CheckCondition(cond);
1522
1523 if (Is32BitDataProcessing(cond, opcode, set_cc, rn, rd, so)) {
1524 Emit32BitDataProcessing(cond, opcode, set_cc, rn, rd, so);
1525 } else {
1526 Emit16BitDataProcessing(cond, opcode, set_cc, rn, rd, so);
1527 }
1528}
1529
Dave Allison45fdb932014-06-25 12:37:10 -07001530void Thumb2Assembler::EmitShift(Register rd, Register rm, Shift shift, uint8_t amount, bool setcc) {
1531 CHECK_LT(amount, (1 << 5));
1532 if (IsHighRegister(rd) || IsHighRegister(rm) || shift == ROR || shift == RRX) {
1533 uint16_t opcode = 0;
1534 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001535 case LSL: opcode = 0U /* 0b00 */; break;
1536 case LSR: opcode = 1U /* 0b01 */; break;
1537 case ASR: opcode = 2U /* 0b10 */; break;
1538 case ROR: opcode = 3U /* 0b11 */; break;
1539 case RRX: opcode = 3U /* 0b11 */; amount = 0; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001540 default:
1541 LOG(FATAL) << "Unsupported thumb2 shift opcode";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001542 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001543 }
1544 // 32 bit.
1545 int32_t encoding = B31 | B30 | B29 | B27 | B25 | B22 |
1546 0xf << 16 | (setcc ? B20 : 0);
1547 uint32_t imm3 = amount >> 2;
Andreas Gampec8ccf682014-09-29 20:07:43 -07001548 uint32_t imm2 = amount & 3U /* 0b11 */;
Dave Allison45fdb932014-06-25 12:37:10 -07001549 encoding |= imm3 << 12 | imm2 << 6 | static_cast<int16_t>(rm) |
1550 static_cast<int16_t>(rd) << 8 | opcode << 4;
1551 Emit32(encoding);
1552 } else {
1553 // 16 bit shift
1554 uint16_t opcode = 0;
1555 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001556 case LSL: opcode = 0U /* 0b00 */; break;
1557 case LSR: opcode = 1U /* 0b01 */; break;
1558 case ASR: opcode = 2U /* 0b10 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001559 default:
Vladimir Markoe8469c12014-11-26 18:09:30 +00001560 LOG(FATAL) << "Unsupported thumb2 shift opcode";
1561 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001562 }
1563 int16_t encoding = opcode << 11 | amount << 6 | static_cast<int16_t>(rm) << 3 |
1564 static_cast<int16_t>(rd);
1565 Emit16(encoding);
1566 }
1567}
1568
1569void Thumb2Assembler::EmitShift(Register rd, Register rn, Shift shift, Register rm, bool setcc) {
1570 CHECK_NE(shift, RRX);
1571 bool must_be_32bit = false;
1572 if (IsHighRegister(rd) || IsHighRegister(rm) || IsHighRegister(rn) || rd != rn) {
1573 must_be_32bit = true;
1574 }
1575
1576 if (must_be_32bit) {
1577 uint16_t opcode = 0;
1578 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001579 case LSL: opcode = 0U /* 0b00 */; break;
1580 case LSR: opcode = 1U /* 0b01 */; break;
1581 case ASR: opcode = 2U /* 0b10 */; break;
1582 case ROR: opcode = 3U /* 0b11 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001583 default:
1584 LOG(FATAL) << "Unsupported thumb2 shift opcode";
Vladimir Markoe8469c12014-11-26 18:09:30 +00001585 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001586 }
1587 // 32 bit.
1588 int32_t encoding = B31 | B30 | B29 | B28 | B27 | B25 |
1589 0xf << 12 | (setcc ? B20 : 0);
1590 encoding |= static_cast<int16_t>(rn) << 16 | static_cast<int16_t>(rm) |
1591 static_cast<int16_t>(rd) << 8 | opcode << 21;
1592 Emit32(encoding);
1593 } else {
1594 uint16_t opcode = 0;
1595 switch (shift) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07001596 case LSL: opcode = 2U /* 0b0010 */; break;
1597 case LSR: opcode = 3U /* 0b0011 */; break;
1598 case ASR: opcode = 4U /* 0b0100 */; break;
Dave Allison45fdb932014-06-25 12:37:10 -07001599 default:
Vladimir Markoe8469c12014-11-26 18:09:30 +00001600 LOG(FATAL) << "Unsupported thumb2 shift opcode";
1601 UNREACHABLE();
Dave Allison45fdb932014-06-25 12:37:10 -07001602 }
1603 int16_t encoding = B14 | opcode << 6 | static_cast<int16_t>(rm) << 3 |
1604 static_cast<int16_t>(rd);
1605 Emit16(encoding);
1606 }
1607}
1608
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001609inline size_t Thumb2Assembler::Fixup::SizeInBytes(Size size) {
1610 switch (size) {
1611 case kBranch16Bit:
1612 return 2u;
1613 case kBranch32Bit:
1614 return 4u;
Dave Allison45fdb932014-06-25 12:37:10 -07001615
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001616 case kCbxz16Bit:
1617 return 2u;
1618 case kCbxz32Bit:
1619 return 4u;
1620 case kCbxz48Bit:
1621 return 6u;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001622
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001623 case kLiteral1KiB:
1624 return 2u;
1625 case kLiteral4KiB:
1626 return 4u;
1627 case kLiteral64KiB:
1628 return 8u;
1629 case kLiteral1MiB:
1630 return 10u;
1631 case kLiteralFar:
1632 return 14u;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001633
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001634 case kLongOrFPLiteral1KiB:
1635 return 4u;
1636 case kLongOrFPLiteral256KiB:
1637 return 10u;
1638 case kLongOrFPLiteralFar:
1639 return 14u;
1640 }
1641 LOG(FATAL) << "Unexpected size: " << static_cast<int>(size);
1642 UNREACHABLE();
1643}
1644
1645inline uint32_t Thumb2Assembler::Fixup::GetOriginalSizeInBytes() const {
1646 return SizeInBytes(original_size_);
1647}
1648
1649inline uint32_t Thumb2Assembler::Fixup::GetSizeInBytes() const {
1650 return SizeInBytes(size_);
1651}
1652
1653inline size_t Thumb2Assembler::Fixup::LiteralPoolPaddingSize(uint32_t current_code_size) {
1654 // The code size must be a multiple of 2.
Roland Levillain14d90572015-07-16 10:52:26 +01001655 DCHECK_ALIGNED(current_code_size, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001656 // If it isn't a multiple of 4, we need to add a 2-byte padding before the literal pool.
1657 return current_code_size & 2;
1658}
1659
1660inline int32_t Thumb2Assembler::Fixup::GetOffset(uint32_t current_code_size) const {
1661 static constexpr int32_t int32_min = std::numeric_limits<int32_t>::min();
1662 static constexpr int32_t int32_max = std::numeric_limits<int32_t>::max();
1663 DCHECK_LE(target_, static_cast<uint32_t>(int32_max));
1664 DCHECK_LE(location_, static_cast<uint32_t>(int32_max));
1665 DCHECK_LE(adjustment_, static_cast<uint32_t>(int32_max));
1666 int32_t diff = static_cast<int32_t>(target_) - static_cast<int32_t>(location_);
1667 if (target_ > location_) {
1668 DCHECK_LE(adjustment_, static_cast<uint32_t>(int32_max - diff));
1669 diff += static_cast<int32_t>(adjustment_);
Dave Allison65fcc2c2014-04-28 13:45:27 -07001670 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001671 DCHECK_LE(int32_min + static_cast<int32_t>(adjustment_), diff);
1672 diff -= static_cast<int32_t>(adjustment_);
1673 }
1674 // The default PC adjustment for Thumb2 is 4 bytes.
1675 DCHECK_GE(diff, int32_min + 4);
1676 diff -= 4;
1677 // Add additional adjustment for instructions preceding the PC usage, padding
1678 // before the literal pool and rounding down the PC for literal loads.
1679 switch (GetSize()) {
1680 case kBranch16Bit:
1681 case kBranch32Bit:
1682 break;
1683
1684 case kCbxz16Bit:
1685 break;
1686 case kCbxz32Bit:
1687 case kCbxz48Bit:
1688 DCHECK_GE(diff, int32_min + 2);
1689 diff -= 2; // Extra CMP Rn, #0, 16-bit.
1690 break;
1691
1692 case kLiteral1KiB:
1693 case kLiteral4KiB:
1694 case kLongOrFPLiteral1KiB:
1695 DCHECK(diff >= 0 || (GetSize() == kLiteral1KiB && diff == -2));
1696 diff += LiteralPoolPaddingSize(current_code_size);
1697 // Load literal instructions round down the PC+4 to a multiple of 4, so if the PC
1698 // isn't a multiple of 2, we need to adjust. Since we already adjusted for the target
1699 // being aligned, current PC alignment can be inferred from diff.
Roland Levillain14d90572015-07-16 10:52:26 +01001700 DCHECK_ALIGNED(diff, 2);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001701 diff = diff + (diff & 2);
1702 DCHECK_GE(diff, 0);
1703 break;
1704 case kLiteral1MiB:
1705 case kLiteral64KiB:
1706 case kLongOrFPLiteral256KiB:
1707 DCHECK_GE(diff, 4); // The target must be at least 4 bytes after the ADD rX, PC.
1708 diff -= 4; // One extra 32-bit MOV.
1709 diff += LiteralPoolPaddingSize(current_code_size);
1710 break;
1711 case kLiteralFar:
1712 case kLongOrFPLiteralFar:
1713 DCHECK_GE(diff, 8); // The target must be at least 4 bytes after the ADD rX, PC.
1714 diff -= 8; // Extra MOVW+MOVT; both 32-bit.
1715 diff += LiteralPoolPaddingSize(current_code_size);
1716 break;
1717 }
1718 return diff;
1719}
1720
1721inline size_t Thumb2Assembler::Fixup::IncreaseSize(Size new_size) {
1722 DCHECK_NE(target_, kUnresolved);
1723 Size old_size = size_;
1724 size_ = new_size;
1725 DCHECK_GT(SizeInBytes(new_size), SizeInBytes(old_size));
1726 size_t adjustment = SizeInBytes(new_size) - SizeInBytes(old_size);
1727 if (target_ > location_) {
1728 adjustment_ += adjustment;
1729 }
1730 return adjustment;
1731}
1732
1733uint32_t Thumb2Assembler::Fixup::AdjustSizeIfNeeded(uint32_t current_code_size) {
1734 uint32_t old_code_size = current_code_size;
1735 switch (GetSize()) {
1736 case kBranch16Bit:
1737 if (IsInt(cond_ != AL ? 9 : 12, GetOffset(current_code_size))) {
1738 break;
Vladimir Markof38caa62015-05-29 15:50:18 +01001739 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001740 current_code_size += IncreaseSize(kBranch32Bit);
1741 FALLTHROUGH_INTENDED;
1742 case kBranch32Bit:
1743 // We don't support conditional branches beyond +-1MiB
1744 // or unconditional branches beyond +-16MiB.
1745 break;
1746
1747 case kCbxz16Bit:
1748 if (IsUint<7>(GetOffset(current_code_size))) {
1749 break;
1750 }
1751 current_code_size += IncreaseSize(kCbxz32Bit);
1752 FALLTHROUGH_INTENDED;
1753 case kCbxz32Bit:
1754 if (IsInt<9>(GetOffset(current_code_size))) {
1755 break;
1756 }
1757 current_code_size += IncreaseSize(kCbxz48Bit);
1758 FALLTHROUGH_INTENDED;
1759 case kCbxz48Bit:
1760 // We don't support conditional branches beyond +-1MiB.
1761 break;
1762
1763 case kLiteral1KiB:
1764 DCHECK(!IsHighRegister(rn_));
1765 if (IsUint<10>(GetOffset(current_code_size))) {
1766 break;
1767 }
1768 current_code_size += IncreaseSize(kLiteral4KiB);
1769 FALLTHROUGH_INTENDED;
1770 case kLiteral4KiB:
1771 if (IsUint<12>(GetOffset(current_code_size))) {
1772 break;
1773 }
1774 current_code_size += IncreaseSize(kLiteral64KiB);
1775 FALLTHROUGH_INTENDED;
1776 case kLiteral64KiB:
1777 // Can't handle high register which we can encounter by fall-through from kLiteral4KiB.
1778 if (!IsHighRegister(rn_) && IsUint<16>(GetOffset(current_code_size))) {
1779 break;
1780 }
1781 current_code_size += IncreaseSize(kLiteral1MiB);
1782 FALLTHROUGH_INTENDED;
1783 case kLiteral1MiB:
1784 if (IsUint<20>(GetOffset(current_code_size))) {
1785 break;
1786 }
1787 current_code_size += IncreaseSize(kLiteralFar);
1788 FALLTHROUGH_INTENDED;
1789 case kLiteralFar:
1790 // This encoding can reach any target.
1791 break;
1792
1793 case kLongOrFPLiteral1KiB:
1794 if (IsUint<10>(GetOffset(current_code_size))) {
1795 break;
1796 }
1797 current_code_size += IncreaseSize(kLongOrFPLiteral256KiB);
1798 FALLTHROUGH_INTENDED;
1799 case kLongOrFPLiteral256KiB:
1800 if (IsUint<18>(GetOffset(current_code_size))) {
1801 break;
1802 }
1803 current_code_size += IncreaseSize(kLongOrFPLiteralFar);
1804 FALLTHROUGH_INTENDED;
1805 case kLongOrFPLiteralFar:
1806 // This encoding can reach any target.
1807 break;
1808 }
1809 return current_code_size - old_code_size;
1810}
1811
1812void Thumb2Assembler::Fixup::Emit(AssemblerBuffer* buffer, uint32_t code_size) const {
1813 switch (GetSize()) {
1814 case kBranch16Bit: {
1815 DCHECK(type_ == kUnconditional || type_ == kConditional);
1816 DCHECK_EQ(type_ == kConditional, cond_ != AL);
1817 int16_t encoding = BEncoding16(GetOffset(code_size), cond_);
Vladimir Markof38caa62015-05-29 15:50:18 +01001818 buffer->Store<int16_t>(location_, encoding);
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001819 break;
1820 }
1821 case kBranch32Bit: {
1822 DCHECK(type_ == kConditional || type_ == kUnconditional ||
1823 type_ == kUnconditionalLink || type_ == kUnconditionalLinkX);
1824 DCHECK_EQ(type_ == kConditional, cond_ != AL);
1825 int32_t encoding = BEncoding32(GetOffset(code_size), cond_);
1826 if (type_ == kUnconditionalLink) {
1827 DCHECK_NE(encoding & B12, 0);
1828 encoding |= B14;
1829 } else if (type_ == kUnconditionalLinkX) {
1830 DCHECK_NE(encoding & B12, 0);
1831 encoding ^= B14 | B12;
1832 }
1833 buffer->Store<int16_t>(location_, encoding >> 16);
1834 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
1835 break;
1836 }
1837
1838 case kCbxz16Bit: {
1839 DCHECK(type_ == kCompareAndBranchXZero);
1840 int16_t encoding = CbxzEncoding16(rn_, GetOffset(code_size), cond_);
1841 buffer->Store<int16_t>(location_, encoding);
1842 break;
1843 }
1844 case kCbxz32Bit: {
1845 DCHECK(type_ == kCompareAndBranchXZero);
1846 DCHECK(cond_ == EQ || cond_ == NE);
1847 int16_t cmp_encoding = CmpRnImm8Encoding16(rn_, 0);
1848 int16_t b_encoding = BEncoding16(GetOffset(code_size), cond_);
1849 buffer->Store<int16_t>(location_, cmp_encoding);
1850 buffer->Store<int16_t>(location_ + 2, b_encoding);
1851 break;
1852 }
1853 case kCbxz48Bit: {
1854 DCHECK(type_ == kCompareAndBranchXZero);
1855 DCHECK(cond_ == EQ || cond_ == NE);
1856 int16_t cmp_encoding = CmpRnImm8Encoding16(rn_, 0);
1857 int32_t b_encoding = BEncoding32(GetOffset(code_size), cond_);
1858 buffer->Store<int16_t>(location_, cmp_encoding);
1859 buffer->Store<int16_t>(location_ + 2u, b_encoding >> 16);
1860 buffer->Store<int16_t>(location_ + 4u, static_cast<int16_t>(b_encoding & 0xffff));
1861 break;
1862 }
1863
1864 case kLiteral1KiB: {
1865 DCHECK(type_ == kLoadLiteralNarrow);
1866 int16_t encoding = LdrLitEncoding16(rn_, GetOffset(code_size));
1867 buffer->Store<int16_t>(location_, encoding);
1868 break;
1869 }
1870 case kLiteral4KiB: {
1871 DCHECK(type_ == kLoadLiteralNarrow);
1872 // GetOffset() uses PC+4 but load literal uses AlignDown(PC+4, 4). Adjust offset accordingly.
1873 int32_t encoding = LdrLitEncoding32(rn_, GetOffset(code_size));
1874 buffer->Store<int16_t>(location_, encoding >> 16);
1875 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
1876 break;
1877 }
1878 case kLiteral64KiB: {
1879 DCHECK(type_ == kLoadLiteralNarrow);
1880 int32_t mov_encoding = MovwEncoding32(rn_, GetOffset(code_size));
1881 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
1882 int16_t ldr_encoding = LdrRtRnImm5Encoding16(rn_, rn_, 0);
1883 buffer->Store<int16_t>(location_, mov_encoding >> 16);
1884 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
1885 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
1886 buffer->Store<int16_t>(location_ + 6u, ldr_encoding);
1887 break;
1888 }
1889 case kLiteral1MiB: {
1890 DCHECK(type_ == kLoadLiteralNarrow);
1891 int32_t offset = GetOffset(code_size);
1892 int32_t mov_encoding = MovModImmEncoding32(rn_, offset & ~0xfff);
1893 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
1894 int32_t ldr_encoding = LdrRtRnImm12Encoding(rn_, rn_, offset & 0xfff);
1895 buffer->Store<int16_t>(location_, mov_encoding >> 16);
1896 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
1897 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
1898 buffer->Store<int16_t>(location_ + 6u, ldr_encoding >> 16);
1899 buffer->Store<int16_t>(location_ + 8u, static_cast<int16_t>(ldr_encoding & 0xffff));
1900 break;
1901 }
1902 case kLiteralFar: {
1903 DCHECK(type_ == kLoadLiteralNarrow);
1904 int32_t offset = GetOffset(code_size);
1905 int32_t movw_encoding = MovwEncoding32(rn_, offset & 0xffff);
1906 int32_t movt_encoding = MovtEncoding32(rn_, offset & ~0xffff);
1907 int16_t add_pc_encoding = AddRdnRmEncoding16(rn_, PC);
1908 int32_t ldr_encoding = LdrRtRnImm12Encoding(rn_, rn_, 0);
1909 buffer->Store<int16_t>(location_, movw_encoding >> 16);
1910 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(movw_encoding & 0xffff));
1911 buffer->Store<int16_t>(location_ + 4u, movt_encoding >> 16);
1912 buffer->Store<int16_t>(location_ + 6u, static_cast<int16_t>(movt_encoding & 0xffff));
1913 buffer->Store<int16_t>(location_ + 8u, add_pc_encoding);
1914 buffer->Store<int16_t>(location_ + 10u, ldr_encoding >> 16);
1915 buffer->Store<int16_t>(location_ + 12u, static_cast<int16_t>(ldr_encoding & 0xffff));
1916 break;
1917 }
1918
1919 case kLongOrFPLiteral1KiB: {
1920 int32_t encoding = LoadWideOrFpEncoding(PC, GetOffset(code_size)); // DCHECKs type_.
1921 buffer->Store<int16_t>(location_, encoding >> 16);
1922 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(encoding & 0xffff));
1923 break;
1924 }
1925 case kLongOrFPLiteral256KiB: {
1926 int32_t offset = GetOffset(code_size);
1927 int32_t mov_encoding = MovModImmEncoding32(IP, offset & ~0x3ff);
1928 int16_t add_pc_encoding = AddRdnRmEncoding16(IP, PC);
1929 int32_t ldr_encoding = LoadWideOrFpEncoding(IP, offset & 0x3ff); // DCHECKs type_.
1930 buffer->Store<int16_t>(location_, mov_encoding >> 16);
1931 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(mov_encoding & 0xffff));
1932 buffer->Store<int16_t>(location_ + 4u, add_pc_encoding);
1933 buffer->Store<int16_t>(location_ + 6u, ldr_encoding >> 16);
1934 buffer->Store<int16_t>(location_ + 8u, static_cast<int16_t>(ldr_encoding & 0xffff));
1935 break;
1936 }
1937 case kLongOrFPLiteralFar: {
1938 int32_t offset = GetOffset(code_size);
1939 int32_t movw_encoding = MovwEncoding32(IP, offset & 0xffff);
1940 int32_t movt_encoding = MovtEncoding32(IP, offset & ~0xffff);
1941 int16_t add_pc_encoding = AddRdnRmEncoding16(IP, PC);
1942 int32_t ldr_encoding = LoadWideOrFpEncoding(IP, 0); // DCHECKs type_.
1943 buffer->Store<int16_t>(location_, movw_encoding >> 16);
1944 buffer->Store<int16_t>(location_ + 2u, static_cast<int16_t>(movw_encoding & 0xffff));
1945 buffer->Store<int16_t>(location_ + 4u, movt_encoding >> 16);
1946 buffer->Store<int16_t>(location_ + 6u, static_cast<int16_t>(movt_encoding & 0xffff));
1947 buffer->Store<int16_t>(location_ + 8u, add_pc_encoding);
1948 buffer->Store<int16_t>(location_ + 10u, ldr_encoding >> 16);
1949 buffer->Store<int16_t>(location_ + 12u, static_cast<int16_t>(ldr_encoding & 0xffff));
1950 break;
Dave Allison65fcc2c2014-04-28 13:45:27 -07001951 }
1952 }
1953}
1954
Dave Allison65fcc2c2014-04-28 13:45:27 -07001955uint16_t Thumb2Assembler::EmitCompareAndBranch(Register rn, uint16_t prev, bool n) {
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00001956 CHECK(IsLowRegister(rn));
Dave Allison65fcc2c2014-04-28 13:45:27 -07001957 uint32_t location = buffer_.Size();
1958
1959 // This is always unresolved as it must be a forward branch.
1960 Emit16(prev); // Previous link.
Vladimir Markocf93a5c2015-06-16 11:33:24 +00001961 return AddFixup(Fixup::CompareAndBranch(location, rn, n ? NE : EQ));
Dave Allison65fcc2c2014-04-28 13:45:27 -07001962}
1963
1964
1965// NOTE: this only support immediate offsets, not [rx,ry].
1966// TODO: support [rx,ry] instructions.
1967void Thumb2Assembler::EmitLoadStore(Condition cond,
1968 bool load,
1969 bool byte,
1970 bool half,
1971 bool is_signed,
1972 Register rd,
1973 const Address& ad) {
1974 CHECK_NE(rd, kNoRegister);
1975 CheckCondition(cond);
1976 bool must_be_32bit = force_32bit_;
1977 if (IsHighRegister(rd)) {
1978 must_be_32bit = true;
1979 }
1980
1981 Register rn = ad.GetRegister();
Dave Allison45fdb932014-06-25 12:37:10 -07001982 if (IsHighRegister(rn) && rn != SP && rn != PC) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001983 must_be_32bit = true;
1984 }
1985
1986 if (is_signed || ad.GetOffset() < 0 || ad.GetMode() != Address::Offset) {
1987 must_be_32bit = true;
1988 }
1989
Dave Allison45fdb932014-06-25 12:37:10 -07001990 if (ad.IsImmediate()) {
1991 // Immediate offset
1992 int32_t offset = ad.GetOffset();
Dave Allison65fcc2c2014-04-28 13:45:27 -07001993
Dave Allison45fdb932014-06-25 12:37:10 -07001994 // The 16 bit SP relative instruction can only have a 10 bit offset.
Dave Allison0bb9ade2014-06-26 17:57:36 -07001995 if (rn == SP && offset >= (1 << 10)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07001996 must_be_32bit = true;
1997 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07001998
1999 if (byte) {
Dave Allison45fdb932014-06-25 12:37:10 -07002000 // 5 bit offset, no shift.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002001 if (offset >= (1 << 5)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002002 must_be_32bit = true;
2003 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002004 } else if (half) {
Dave Allison45fdb932014-06-25 12:37:10 -07002005 // 6 bit offset, shifted by 1.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002006 if (offset >= (1 << 6)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002007 must_be_32bit = true;
2008 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002009 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002010 // 7 bit offset, shifted by 2.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002011 if (offset >= (1 << 7)) {
Dave Allison45fdb932014-06-25 12:37:10 -07002012 must_be_32bit = true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002013 }
2014 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002015
Dave Allison45fdb932014-06-25 12:37:10 -07002016 if (must_be_32bit) {
2017 int32_t encoding = B31 | B30 | B29 | B28 | B27 |
2018 (load ? B20 : 0) |
2019 (is_signed ? B24 : 0) |
2020 static_cast<uint32_t>(rd) << 12 |
2021 ad.encodingThumb(true) |
2022 (byte ? 0 : half ? B21 : B22);
2023 Emit32(encoding);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002024 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002025 // 16 bit thumb1.
2026 uint8_t opA = 0;
2027 bool sp_relative = false;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002028
2029 if (byte) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002030 opA = 7U /* 0b0111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002031 } else if (half) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002032 opA = 8U /* 0b1000 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002033 } else {
Dave Allison45fdb932014-06-25 12:37:10 -07002034 if (rn == SP) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002035 opA = 9U /* 0b1001 */;
Dave Allison45fdb932014-06-25 12:37:10 -07002036 sp_relative = true;
2037 } else {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002038 opA = 6U /* 0b0110 */;
Dave Allison45fdb932014-06-25 12:37:10 -07002039 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002040 }
Dave Allison45fdb932014-06-25 12:37:10 -07002041 int16_t encoding = opA << 12 |
2042 (load ? B11 : 0);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002043
Dave Allison45fdb932014-06-25 12:37:10 -07002044 CHECK_GE(offset, 0);
2045 if (sp_relative) {
2046 // SP relative, 10 bit offset.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002047 CHECK_LT(offset, (1 << 10));
Roland Levillain14d90572015-07-16 10:52:26 +01002048 CHECK_ALIGNED(offset, 4);
Dave Allison45fdb932014-06-25 12:37:10 -07002049 encoding |= rd << 8 | offset >> 2;
2050 } else {
2051 // No SP relative. The offset is shifted right depending on
2052 // the size of the load/store.
2053 encoding |= static_cast<uint32_t>(rd);
2054
2055 if (byte) {
2056 // 5 bit offset, no shift.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002057 CHECK_LT(offset, (1 << 5));
Dave Allison45fdb932014-06-25 12:37:10 -07002058 } else if (half) {
2059 // 6 bit offset, shifted by 1.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002060 CHECK_LT(offset, (1 << 6));
Roland Levillain14d90572015-07-16 10:52:26 +01002061 CHECK_ALIGNED(offset, 2);
Dave Allison45fdb932014-06-25 12:37:10 -07002062 offset >>= 1;
2063 } else {
2064 // 7 bit offset, shifted by 2.
Dave Allison0bb9ade2014-06-26 17:57:36 -07002065 CHECK_LT(offset, (1 << 7));
Roland Levillain14d90572015-07-16 10:52:26 +01002066 CHECK_ALIGNED(offset, 4);
Dave Allison45fdb932014-06-25 12:37:10 -07002067 offset >>= 2;
2068 }
2069 encoding |= rn << 3 | offset << 6;
2070 }
2071
2072 Emit16(encoding);
2073 }
2074 } else {
2075 // Register shift.
2076 if (ad.GetRegister() == PC) {
2077 // PC relative literal encoding.
2078 int32_t offset = ad.GetOffset();
Dave Allison0bb9ade2014-06-26 17:57:36 -07002079 if (must_be_32bit || offset < 0 || offset >= (1 << 10) || !load) {
Dave Allison45fdb932014-06-25 12:37:10 -07002080 int32_t up = B23;
2081 if (offset < 0) {
2082 offset = -offset;
2083 up = 0;
2084 }
2085 CHECK_LT(offset, (1 << 12));
2086 int32_t encoding = 0x1f << 27 | 0xf << 16 | B22 | (load ? B20 : 0) |
2087 offset | up |
2088 static_cast<uint32_t>(rd) << 12;
2089 Emit32(encoding);
2090 } else {
2091 // 16 bit literal load.
2092 CHECK_GE(offset, 0);
2093 CHECK_LT(offset, (1 << 10));
2094 int32_t encoding = B14 | (load ? B11 : 0) | static_cast<uint32_t>(rd) << 8 | offset >> 2;
2095 Emit16(encoding);
2096 }
2097 } else {
2098 if (ad.GetShiftCount() != 0) {
2099 // If there is a shift count this must be 32 bit.
2100 must_be_32bit = true;
2101 } else if (IsHighRegister(ad.GetRegisterOffset())) {
2102 must_be_32bit = true;
2103 }
2104
2105 if (must_be_32bit) {
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002106 int32_t encoding = 0x1f << 27 | (load ? B20 : 0) | static_cast<uint32_t>(rd) << 12 |
Dave Allison45fdb932014-06-25 12:37:10 -07002107 ad.encodingThumb(true);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002108 if (half) {
2109 encoding |= B21;
2110 } else if (!byte) {
2111 encoding |= B22;
2112 }
Dave Allison45fdb932014-06-25 12:37:10 -07002113 Emit32(encoding);
2114 } else {
2115 // 16 bit register offset.
2116 int32_t encoding = B14 | B12 | (load ? B11 : 0) | static_cast<uint32_t>(rd) |
2117 ad.encodingThumb(false);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01002118 if (byte) {
2119 encoding |= B10;
2120 } else if (half) {
2121 encoding |= B9;
2122 }
Dave Allison45fdb932014-06-25 12:37:10 -07002123 Emit16(encoding);
2124 }
2125 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07002126 }
2127}
2128
2129
2130void Thumb2Assembler::EmitMultiMemOp(Condition cond,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002131 BlockAddressMode bam,
Dave Allison65fcc2c2014-04-28 13:45:27 -07002132 bool load,
2133 Register base,
2134 RegList regs) {
2135 CHECK_NE(base, kNoRegister);
2136 CheckCondition(cond);
2137 bool must_be_32bit = force_32bit_;
2138
Vladimir Markoe8469c12014-11-26 18:09:30 +00002139 if (!must_be_32bit && base == SP && bam == (load ? IA_W : DB_W) &&
2140 (regs & 0xff00 & ~(1 << (load ? PC : LR))) == 0) {
2141 // Use 16-bit PUSH/POP.
2142 int16_t encoding = B15 | B13 | B12 | (load ? B11 : 0) | B10 |
2143 ((regs & (1 << (load ? PC : LR))) != 0 ? B8 : 0) | (regs & 0x00ff);
2144 Emit16(encoding);
2145 return;
2146 }
2147
Dave Allison65fcc2c2014-04-28 13:45:27 -07002148 if ((regs & 0xff00) != 0) {
2149 must_be_32bit = true;
2150 }
2151
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002152 bool w_bit = bam == IA_W || bam == DB_W || bam == DA_W || bam == IB_W;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002153 // 16 bit always uses writeback.
2154 if (!w_bit) {
2155 must_be_32bit = true;
2156 }
2157
2158 if (must_be_32bit) {
2159 uint32_t op = 0;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002160 switch (bam) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07002161 case IA:
2162 case IA_W:
Andreas Gampec8ccf682014-09-29 20:07:43 -07002163 op = 1U /* 0b01 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002164 break;
2165 case DB:
2166 case DB_W:
Andreas Gampec8ccf682014-09-29 20:07:43 -07002167 op = 2U /* 0b10 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002168 break;
2169 case DA:
2170 case IB:
2171 case DA_W:
2172 case IB_W:
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002173 LOG(FATAL) << "LDM/STM mode not supported on thumb: " << bam;
Vladimir Markoe8469c12014-11-26 18:09:30 +00002174 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002175 }
2176 if (load) {
2177 // Cannot have SP in the list.
2178 CHECK_EQ((regs & (1 << SP)), 0);
2179 } else {
2180 // Cannot have PC or SP in the list.
2181 CHECK_EQ((regs & (1 << PC | 1 << SP)), 0);
2182 }
2183 int32_t encoding = B31 | B30 | B29 | B27 |
2184 (op << 23) |
2185 (load ? B20 : 0) |
2186 base << 16 |
2187 regs |
2188 (w_bit << 21);
2189 Emit32(encoding);
2190 } else {
2191 int16_t encoding = B15 | B14 |
2192 (load ? B11 : 0) |
2193 base << 8 |
2194 regs;
2195 Emit16(encoding);
2196 }
2197}
2198
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002199void Thumb2Assembler::EmitBranch(Condition cond, Label* label, bool link, bool x) {
2200 bool use32bit = IsForced32Bit() || !CanRelocateBranches();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002201 uint32_t pc = buffer_.Size();
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002202 Fixup::Type branch_type;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002203 if (cond == AL) {
2204 if (link) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002205 use32bit = true;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002206 if (x) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002207 branch_type = Fixup::kUnconditionalLinkX; // BLX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002208 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002209 branch_type = Fixup::kUnconditionalLink; // BX.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002210 }
2211 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002212 branch_type = Fixup::kUnconditional; // B.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002213 }
2214 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002215 branch_type = Fixup::kConditional; // B<cond>.
Dave Allison65fcc2c2014-04-28 13:45:27 -07002216 }
2217
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002218 Fixup::Size size = use32bit ? Fixup::kBranch32Bit : Fixup::kBranch16Bit;
2219 FixupId branch_id = AddFixup(Fixup::Branch(pc, branch_type, size, cond));
2220
Dave Allison65fcc2c2014-04-28 13:45:27 -07002221 if (label->IsBound()) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002222 // The branch is to a bound label which means that it's a backwards branch.
2223 // Record this branch as a dependency of all Fixups between the label and the branch.
2224 GetFixup(branch_id)->Resolve(label->Position());
2225 for (FixupId fixup_id = branch_id; fixup_id != 0u; ) {
2226 --fixup_id;
2227 Fixup* fixup = GetFixup(fixup_id);
2228 DCHECK_GE(label->Position(), 0);
2229 if (fixup->GetLocation() < static_cast<uint32_t>(label->Position())) {
2230 break;
2231 }
2232 fixup->AddDependent(branch_id);
Vladimir Markofbeb4ae2015-06-16 11:32:01 +00002233 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002234 Emit16(0);
Vladimir Markofbeb4ae2015-06-16 11:32:01 +00002235 } else {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002236 // Branch target is an unbound label. Add it to a singly-linked list maintained within
2237 // the code with the label serving as the head.
2238 Emit16(static_cast<uint16_t>(label->position_));
2239 label->LinkTo(branch_id);
Vladimir Markof38caa62015-05-29 15:50:18 +01002240 }
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002241
2242 if (use32bit) {
2243 Emit16(0);
2244 }
2245 DCHECK_EQ(buffer_.Size() - pc, GetFixup(branch_id)->GetSizeInBytes());
Dave Allison65fcc2c2014-04-28 13:45:27 -07002246}
2247
2248
2249void Thumb2Assembler::clz(Register rd, Register rm, Condition cond) {
2250 CHECK_NE(rd, kNoRegister);
2251 CHECK_NE(rm, kNoRegister);
2252 CheckCondition(cond);
2253 CHECK_NE(rd, PC);
2254 CHECK_NE(rm, PC);
2255 int32_t encoding = B31 | B30 | B29 | B28 | B27 |
2256 B25 | B23 | B21 | B20 |
2257 static_cast<uint32_t>(rm) << 16 |
2258 0xf << 12 |
2259 static_cast<uint32_t>(rd) << 8 |
2260 B7 |
2261 static_cast<uint32_t>(rm);
2262 Emit32(encoding);
2263}
2264
2265
2266void Thumb2Assembler::movw(Register rd, uint16_t imm16, Condition cond) {
2267 CheckCondition(cond);
2268 bool must_be_32bit = force_32bit_;
2269 if (IsHighRegister(rd)|| imm16 >= 256u) {
2270 must_be_32bit = true;
2271 }
2272
2273 if (must_be_32bit) {
2274 // Use encoding T3.
Andreas Gampec8ccf682014-09-29 20:07:43 -07002275 uint32_t imm4 = (imm16 >> 12) & 15U /* 0b1111 */;
2276 uint32_t i = (imm16 >> 11) & 1U /* 0b1 */;
2277 uint32_t imm3 = (imm16 >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002278 uint32_t imm8 = imm16 & 0xff;
2279 int32_t encoding = B31 | B30 | B29 | B28 |
2280 B25 | B22 |
2281 static_cast<uint32_t>(rd) << 8 |
2282 i << 26 |
2283 imm4 << 16 |
2284 imm3 << 12 |
2285 imm8;
2286 Emit32(encoding);
2287 } else {
2288 int16_t encoding = B13 | static_cast<uint16_t>(rd) << 8 |
2289 imm16;
2290 Emit16(encoding);
2291 }
2292}
2293
2294
2295void Thumb2Assembler::movt(Register rd, uint16_t imm16, Condition cond) {
2296 CheckCondition(cond);
2297 // Always 32 bits.
Andreas Gampec8ccf682014-09-29 20:07:43 -07002298 uint32_t imm4 = (imm16 >> 12) & 15U /* 0b1111 */;
2299 uint32_t i = (imm16 >> 11) & 1U /* 0b1 */;
2300 uint32_t imm3 = (imm16 >> 8) & 7U /* 0b111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002301 uint32_t imm8 = imm16 & 0xff;
2302 int32_t encoding = B31 | B30 | B29 | B28 |
2303 B25 | B23 | B22 |
2304 static_cast<uint32_t>(rd) << 8 |
2305 i << 26 |
2306 imm4 << 16 |
2307 imm3 << 12 |
2308 imm8;
2309 Emit32(encoding);
2310}
2311
2312
2313void Thumb2Assembler::ldrex(Register rt, Register rn, uint16_t imm, Condition cond) {
2314 CHECK_NE(rn, kNoRegister);
2315 CHECK_NE(rt, kNoRegister);
2316 CheckCondition(cond);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002317 CHECK_LT(imm, (1u << 10));
2318
2319 int32_t encoding = B31 | B30 | B29 | B27 | B22 | B20 |
2320 static_cast<uint32_t>(rn) << 16 |
2321 static_cast<uint32_t>(rt) << 12 |
2322 0xf << 8 |
2323 imm >> 2;
2324 Emit32(encoding);
2325}
2326
2327
2328void Thumb2Assembler::ldrex(Register rt, Register rn, Condition cond) {
2329 ldrex(rt, rn, 0, cond);
2330}
2331
2332
2333void Thumb2Assembler::strex(Register rd,
2334 Register rt,
2335 Register rn,
2336 uint16_t imm,
2337 Condition cond) {
2338 CHECK_NE(rn, kNoRegister);
2339 CHECK_NE(rd, kNoRegister);
2340 CHECK_NE(rt, kNoRegister);
2341 CheckCondition(cond);
2342 CHECK_LT(imm, (1u << 10));
2343
2344 int32_t encoding = B31 | B30 | B29 | B27 | B22 |
2345 static_cast<uint32_t>(rn) << 16 |
2346 static_cast<uint32_t>(rt) << 12 |
2347 static_cast<uint32_t>(rd) << 8 |
2348 imm >> 2;
2349 Emit32(encoding);
2350}
2351
2352
Calin Juravle52c48962014-12-16 17:02:57 +00002353void Thumb2Assembler::ldrexd(Register rt, Register rt2, Register rn, Condition cond) {
2354 CHECK_NE(rn, kNoRegister);
2355 CHECK_NE(rt, kNoRegister);
2356 CHECK_NE(rt2, kNoRegister);
2357 CHECK_NE(rt, rt2);
2358 CheckCondition(cond);
2359
2360 int32_t encoding = B31 | B30 | B29 | B27 | B23 | B22 | B20 |
2361 static_cast<uint32_t>(rn) << 16 |
2362 static_cast<uint32_t>(rt) << 12 |
2363 static_cast<uint32_t>(rt2) << 8 |
2364 B6 | B5 | B4 | B3 | B2 | B1 | B0;
2365 Emit32(encoding);
2366}
2367
2368
Dave Allison65fcc2c2014-04-28 13:45:27 -07002369void Thumb2Assembler::strex(Register rd,
2370 Register rt,
2371 Register rn,
2372 Condition cond) {
2373 strex(rd, rt, rn, 0, cond);
2374}
2375
2376
Calin Juravle52c48962014-12-16 17:02:57 +00002377void Thumb2Assembler::strexd(Register rd, Register rt, Register rt2, Register rn, Condition cond) {
2378 CHECK_NE(rd, kNoRegister);
2379 CHECK_NE(rn, kNoRegister);
2380 CHECK_NE(rt, kNoRegister);
2381 CHECK_NE(rt2, kNoRegister);
2382 CHECK_NE(rt, rt2);
2383 CHECK_NE(rd, rt);
2384 CHECK_NE(rd, rt2);
2385 CheckCondition(cond);
2386
2387 int32_t encoding = B31 | B30 | B29 | B27 | B23 | B22 |
2388 static_cast<uint32_t>(rn) << 16 |
2389 static_cast<uint32_t>(rt) << 12 |
2390 static_cast<uint32_t>(rt2) << 8 |
2391 B6 | B5 | B4 |
2392 static_cast<uint32_t>(rd);
2393 Emit32(encoding);
2394}
2395
2396
Dave Allison65fcc2c2014-04-28 13:45:27 -07002397void Thumb2Assembler::clrex(Condition cond) {
2398 CheckCondition(cond);
2399 int32_t encoding = B31 | B30 | B29 | B27 | B28 | B25 | B24 | B23 |
2400 B21 | B20 |
2401 0xf << 16 |
2402 B15 |
2403 0xf << 8 |
2404 B5 |
2405 0xf;
2406 Emit32(encoding);
2407}
2408
2409
2410void Thumb2Assembler::nop(Condition cond) {
2411 CheckCondition(cond);
Andreas Gampec8ccf682014-09-29 20:07:43 -07002412 uint16_t encoding = B15 | B13 | B12 |
Dave Allison65fcc2c2014-04-28 13:45:27 -07002413 B11 | B10 | B9 | B8;
Andreas Gampec8ccf682014-09-29 20:07:43 -07002414 Emit16(static_cast<int16_t>(encoding));
Dave Allison65fcc2c2014-04-28 13:45:27 -07002415}
2416
2417
2418void Thumb2Assembler::vmovsr(SRegister sn, Register rt, Condition cond) {
2419 CHECK_NE(sn, kNoSRegister);
2420 CHECK_NE(rt, kNoRegister);
2421 CHECK_NE(rt, SP);
2422 CHECK_NE(rt, PC);
2423 CheckCondition(cond);
2424 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2425 B27 | B26 | B25 |
2426 ((static_cast<int32_t>(sn) >> 1)*B16) |
2427 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2428 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
2429 Emit32(encoding);
2430}
2431
2432
2433void Thumb2Assembler::vmovrs(Register rt, SRegister sn, Condition cond) {
2434 CHECK_NE(sn, kNoSRegister);
2435 CHECK_NE(rt, kNoRegister);
2436 CHECK_NE(rt, SP);
2437 CHECK_NE(rt, PC);
2438 CheckCondition(cond);
2439 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2440 B27 | B26 | B25 | B20 |
2441 ((static_cast<int32_t>(sn) >> 1)*B16) |
2442 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2443 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
2444 Emit32(encoding);
2445}
2446
2447
2448void Thumb2Assembler::vmovsrr(SRegister sm, Register rt, Register rt2,
2449 Condition cond) {
2450 CHECK_NE(sm, kNoSRegister);
2451 CHECK_NE(sm, S31);
2452 CHECK_NE(rt, kNoRegister);
2453 CHECK_NE(rt, SP);
2454 CHECK_NE(rt, PC);
2455 CHECK_NE(rt2, kNoRegister);
2456 CHECK_NE(rt2, SP);
2457 CHECK_NE(rt2, PC);
2458 CheckCondition(cond);
2459 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2460 B27 | B26 | B22 |
2461 (static_cast<int32_t>(rt2)*B16) |
2462 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2463 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
2464 (static_cast<int32_t>(sm) >> 1);
2465 Emit32(encoding);
2466}
2467
2468
2469void Thumb2Assembler::vmovrrs(Register rt, Register rt2, SRegister sm,
2470 Condition cond) {
2471 CHECK_NE(sm, kNoSRegister);
2472 CHECK_NE(sm, S31);
2473 CHECK_NE(rt, kNoRegister);
2474 CHECK_NE(rt, SP);
2475 CHECK_NE(rt, PC);
2476 CHECK_NE(rt2, kNoRegister);
2477 CHECK_NE(rt2, SP);
2478 CHECK_NE(rt2, PC);
2479 CHECK_NE(rt, rt2);
2480 CheckCondition(cond);
2481 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2482 B27 | B26 | B22 | B20 |
2483 (static_cast<int32_t>(rt2)*B16) |
2484 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
2485 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
2486 (static_cast<int32_t>(sm) >> 1);
2487 Emit32(encoding);
2488}
2489
2490
2491void Thumb2Assembler::vmovdrr(DRegister dm, Register rt, Register rt2,
2492 Condition cond) {
2493 CHECK_NE(dm, kNoDRegister);
2494 CHECK_NE(rt, kNoRegister);
2495 CHECK_NE(rt, SP);
2496 CHECK_NE(rt, PC);
2497 CHECK_NE(rt2, kNoRegister);
2498 CHECK_NE(rt2, SP);
2499 CHECK_NE(rt2, PC);
2500 CheckCondition(cond);
2501 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2502 B27 | B26 | B22 |
2503 (static_cast<int32_t>(rt2)*B16) |
2504 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
2505 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
2506 (static_cast<int32_t>(dm) & 0xf);
2507 Emit32(encoding);
2508}
2509
2510
2511void Thumb2Assembler::vmovrrd(Register rt, Register rt2, DRegister dm,
2512 Condition cond) {
2513 CHECK_NE(dm, kNoDRegister);
2514 CHECK_NE(rt, kNoRegister);
2515 CHECK_NE(rt, SP);
2516 CHECK_NE(rt, PC);
2517 CHECK_NE(rt2, kNoRegister);
2518 CHECK_NE(rt2, SP);
2519 CHECK_NE(rt2, PC);
2520 CHECK_NE(rt, rt2);
2521 CheckCondition(cond);
2522 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2523 B27 | B26 | B22 | B20 |
2524 (static_cast<int32_t>(rt2)*B16) |
2525 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
2526 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
2527 (static_cast<int32_t>(dm) & 0xf);
2528 Emit32(encoding);
2529}
2530
2531
2532void Thumb2Assembler::vldrs(SRegister sd, const Address& ad, Condition cond) {
2533 const Address& addr = static_cast<const Address&>(ad);
2534 CHECK_NE(sd, kNoSRegister);
2535 CheckCondition(cond);
2536 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2537 B27 | B26 | B24 | B20 |
2538 ((static_cast<int32_t>(sd) & 1)*B22) |
2539 ((static_cast<int32_t>(sd) >> 1)*B12) |
2540 B11 | B9 | addr.vencoding();
2541 Emit32(encoding);
2542}
2543
2544
2545void Thumb2Assembler::vstrs(SRegister sd, const Address& ad, Condition cond) {
2546 const Address& addr = static_cast<const Address&>(ad);
2547 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
2548 CHECK_NE(sd, kNoSRegister);
2549 CheckCondition(cond);
2550 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2551 B27 | B26 | B24 |
2552 ((static_cast<int32_t>(sd) & 1)*B22) |
2553 ((static_cast<int32_t>(sd) >> 1)*B12) |
2554 B11 | B9 | addr.vencoding();
2555 Emit32(encoding);
2556}
2557
2558
2559void Thumb2Assembler::vldrd(DRegister dd, const Address& ad, Condition cond) {
2560 const Address& addr = static_cast<const Address&>(ad);
2561 CHECK_NE(dd, kNoDRegister);
2562 CheckCondition(cond);
2563 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2564 B27 | B26 | B24 | B20 |
2565 ((static_cast<int32_t>(dd) >> 4)*B22) |
2566 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2567 B11 | B9 | B8 | addr.vencoding();
2568 Emit32(encoding);
2569}
2570
2571
2572void Thumb2Assembler::vstrd(DRegister dd, const Address& ad, Condition cond) {
2573 const Address& addr = static_cast<const Address&>(ad);
2574 CHECK_NE(static_cast<Register>(addr.encodingArm() & (0xf << kRnShift)), PC);
2575 CHECK_NE(dd, kNoDRegister);
2576 CheckCondition(cond);
2577 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2578 B27 | B26 | B24 |
2579 ((static_cast<int32_t>(dd) >> 4)*B22) |
2580 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2581 B11 | B9 | B8 | addr.vencoding();
2582 Emit32(encoding);
2583}
2584
2585
2586void Thumb2Assembler::vpushs(SRegister reg, int nregs, Condition cond) {
2587 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, false, cond);
2588}
2589
2590
2591void Thumb2Assembler::vpushd(DRegister reg, int nregs, Condition cond) {
2592 EmitVPushPop(static_cast<uint32_t>(reg), nregs, true, true, cond);
2593}
2594
2595
2596void Thumb2Assembler::vpops(SRegister reg, int nregs, Condition cond) {
2597 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, false, cond);
2598}
2599
2600
2601void Thumb2Assembler::vpopd(DRegister reg, int nregs, Condition cond) {
2602 EmitVPushPop(static_cast<uint32_t>(reg), nregs, false, true, cond);
2603}
2604
2605
2606void Thumb2Assembler::EmitVPushPop(uint32_t reg, int nregs, bool push, bool dbl, Condition cond) {
2607 CheckCondition(cond);
2608
2609 uint32_t D;
2610 uint32_t Vd;
2611 if (dbl) {
2612 // Encoded as D:Vd.
2613 D = (reg >> 4) & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07002614 Vd = reg & 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002615 } else {
2616 // Encoded as Vd:D.
2617 D = reg & 1;
Andreas Gampec8ccf682014-09-29 20:07:43 -07002618 Vd = (reg >> 1) & 15U /* 0b1111 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002619 }
2620 int32_t encoding = B27 | B26 | B21 | B19 | B18 | B16 |
2621 B11 | B9 |
2622 (dbl ? B8 : 0) |
2623 (push ? B24 : (B23 | B20)) |
Andreas Gampec8ccf682014-09-29 20:07:43 -07002624 14U /* 0b1110 */ << 28 |
Dave Allison65fcc2c2014-04-28 13:45:27 -07002625 nregs << (dbl ? 1 : 0) |
2626 D << 22 |
2627 Vd << 12;
2628 Emit32(encoding);
2629}
2630
2631
2632void Thumb2Assembler::EmitVFPsss(Condition cond, int32_t opcode,
2633 SRegister sd, SRegister sn, SRegister sm) {
2634 CHECK_NE(sd, kNoSRegister);
2635 CHECK_NE(sn, kNoSRegister);
2636 CHECK_NE(sm, kNoSRegister);
2637 CheckCondition(cond);
2638 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2639 B27 | B26 | B25 | B11 | B9 | opcode |
2640 ((static_cast<int32_t>(sd) & 1)*B22) |
2641 ((static_cast<int32_t>(sn) >> 1)*B16) |
2642 ((static_cast<int32_t>(sd) >> 1)*B12) |
2643 ((static_cast<int32_t>(sn) & 1)*B7) |
2644 ((static_cast<int32_t>(sm) & 1)*B5) |
2645 (static_cast<int32_t>(sm) >> 1);
2646 Emit32(encoding);
2647}
2648
2649
2650void Thumb2Assembler::EmitVFPddd(Condition cond, int32_t opcode,
2651 DRegister dd, DRegister dn, DRegister dm) {
2652 CHECK_NE(dd, kNoDRegister);
2653 CHECK_NE(dn, kNoDRegister);
2654 CHECK_NE(dm, kNoDRegister);
2655 CheckCondition(cond);
2656 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2657 B27 | B26 | B25 | B11 | B9 | B8 | opcode |
2658 ((static_cast<int32_t>(dd) >> 4)*B22) |
2659 ((static_cast<int32_t>(dn) & 0xf)*B16) |
2660 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2661 ((static_cast<int32_t>(dn) >> 4)*B7) |
2662 ((static_cast<int32_t>(dm) >> 4)*B5) |
2663 (static_cast<int32_t>(dm) & 0xf);
2664 Emit32(encoding);
2665}
2666
2667
2668void Thumb2Assembler::EmitVFPsd(Condition cond, int32_t opcode,
2669 SRegister sd, DRegister dm) {
2670 CHECK_NE(sd, kNoSRegister);
2671 CHECK_NE(dm, kNoDRegister);
2672 CheckCondition(cond);
2673 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2674 B27 | B26 | B25 | B11 | B9 | opcode |
2675 ((static_cast<int32_t>(sd) & 1)*B22) |
2676 ((static_cast<int32_t>(sd) >> 1)*B12) |
2677 ((static_cast<int32_t>(dm) >> 4)*B5) |
2678 (static_cast<int32_t>(dm) & 0xf);
2679 Emit32(encoding);
2680}
2681
2682
2683void Thumb2Assembler::EmitVFPds(Condition cond, int32_t opcode,
2684 DRegister dd, SRegister sm) {
2685 CHECK_NE(dd, kNoDRegister);
2686 CHECK_NE(sm, kNoSRegister);
2687 CheckCondition(cond);
2688 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2689 B27 | B26 | B25 | B11 | B9 | opcode |
2690 ((static_cast<int32_t>(dd) >> 4)*B22) |
2691 ((static_cast<int32_t>(dd) & 0xf)*B12) |
2692 ((static_cast<int32_t>(sm) & 1)*B5) |
2693 (static_cast<int32_t>(sm) >> 1);
2694 Emit32(encoding);
2695}
2696
2697
2698void Thumb2Assembler::vmstat(Condition cond) { // VMRS APSR_nzcv, FPSCR.
Calin Juravleddb7df22014-11-25 20:56:51 +00002699 CHECK_NE(cond, kNoCondition);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002700 CheckCondition(cond);
Calin Juravleddb7df22014-11-25 20:56:51 +00002701 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
2702 B27 | B26 | B25 | B23 | B22 | B21 | B20 | B16 |
2703 (static_cast<int32_t>(PC)*B12) |
2704 B11 | B9 | B4;
2705 Emit32(encoding);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002706}
2707
2708
2709void Thumb2Assembler::svc(uint32_t imm8) {
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08002710 CHECK(IsUint<8>(imm8)) << imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002711 int16_t encoding = B15 | B14 | B12 |
2712 B11 | B10 | B9 | B8 |
2713 imm8;
2714 Emit16(encoding);
2715}
2716
2717
2718void Thumb2Assembler::bkpt(uint16_t imm8) {
Andreas Gampeab1eb0d2015-02-13 19:23:55 -08002719 CHECK(IsUint<8>(imm8)) << imm8;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002720 int16_t encoding = B15 | B13 | B12 |
2721 B11 | B10 | B9 |
2722 imm8;
2723 Emit16(encoding);
2724}
2725
2726// Convert the given IT state to a mask bit given bit 0 of the first
2727// condition and a shift position.
2728static uint8_t ToItMask(ItState s, uint8_t firstcond0, uint8_t shift) {
2729 switch (s) {
2730 case kItOmitted: return 1 << shift;
2731 case kItThen: return firstcond0 << shift;
2732 case kItElse: return !firstcond0 << shift;
2733 }
2734 return 0;
2735}
2736
2737
2738// Set the IT condition in the given position for the given state. This is used
2739// to check that conditional instructions match the preceding IT statement.
2740void Thumb2Assembler::SetItCondition(ItState s, Condition cond, uint8_t index) {
2741 switch (s) {
2742 case kItOmitted: it_conditions_[index] = AL; break;
2743 case kItThen: it_conditions_[index] = cond; break;
2744 case kItElse:
2745 it_conditions_[index] = static_cast<Condition>(static_cast<uint8_t>(cond) ^ 1);
2746 break;
2747 }
2748}
2749
2750
2751void Thumb2Assembler::it(Condition firstcond, ItState i1, ItState i2, ItState i3) {
2752 CheckCondition(AL); // Not allowed in IT block.
2753 uint8_t firstcond0 = static_cast<uint8_t>(firstcond) & 1;
2754
2755 // All conditions to AL.
2756 for (uint8_t i = 0; i < 4; ++i) {
2757 it_conditions_[i] = AL;
2758 }
2759
2760 SetItCondition(kItThen, firstcond, 0);
2761 uint8_t mask = ToItMask(i1, firstcond0, 3);
2762 SetItCondition(i1, firstcond, 1);
2763
2764 if (i1 != kItOmitted) {
2765 mask |= ToItMask(i2, firstcond0, 2);
2766 SetItCondition(i2, firstcond, 2);
2767 if (i2 != kItOmitted) {
2768 mask |= ToItMask(i3, firstcond0, 1);
2769 SetItCondition(i3, firstcond, 3);
2770 if (i3 != kItOmitted) {
Andreas Gampec8ccf682014-09-29 20:07:43 -07002771 mask |= 1U /* 0b0001 */;
Dave Allison65fcc2c2014-04-28 13:45:27 -07002772 }
2773 }
2774 }
2775
2776 // Start at first condition.
2777 it_cond_index_ = 0;
2778 next_condition_ = it_conditions_[0];
2779 uint16_t encoding = B15 | B13 | B12 |
2780 B11 | B10 | B9 | B8 |
2781 firstcond << 4 |
2782 mask;
2783 Emit16(encoding);
2784}
2785
2786
2787void Thumb2Assembler::cbz(Register rn, Label* label) {
2788 CheckCondition(AL);
2789 if (label->IsBound()) {
2790 LOG(FATAL) << "cbz can only be used to branch forwards";
Vladimir Markoe8469c12014-11-26 18:09:30 +00002791 UNREACHABLE();
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00002792 } else if (IsHighRegister(rn)) {
2793 LOG(FATAL) << "cbz can only be used with low registers";
2794 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002795 } else {
2796 uint16_t branchid = EmitCompareAndBranch(rn, static_cast<uint16_t>(label->position_), false);
2797 label->LinkTo(branchid);
2798 }
2799}
2800
2801
2802void Thumb2Assembler::cbnz(Register rn, Label* label) {
2803 CheckCondition(AL);
2804 if (label->IsBound()) {
2805 LOG(FATAL) << "cbnz can only be used to branch forwards";
Vladimir Markoe8469c12014-11-26 18:09:30 +00002806 UNREACHABLE();
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00002807 } else if (IsHighRegister(rn)) {
2808 LOG(FATAL) << "cbnz can only be used with low registers";
2809 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07002810 } else {
2811 uint16_t branchid = EmitCompareAndBranch(rn, static_cast<uint16_t>(label->position_), true);
2812 label->LinkTo(branchid);
2813 }
2814}
2815
2816
2817void Thumb2Assembler::blx(Register rm, Condition cond) {
2818 CHECK_NE(rm, kNoRegister);
2819 CheckCondition(cond);
2820 int16_t encoding = B14 | B10 | B9 | B8 | B7 | static_cast<int16_t>(rm) << 3;
2821 Emit16(encoding);
2822}
2823
2824
2825void Thumb2Assembler::bx(Register rm, Condition cond) {
2826 CHECK_NE(rm, kNoRegister);
2827 CheckCondition(cond);
2828 int16_t encoding = B14 | B10 | B9 | B8 | static_cast<int16_t>(rm) << 3;
2829 Emit16(encoding);
2830}
2831
2832
2833void Thumb2Assembler::Push(Register rd, Condition cond) {
2834 str(rd, Address(SP, -kRegisterSize, Address::PreIndex), cond);
2835}
2836
2837
2838void Thumb2Assembler::Pop(Register rd, Condition cond) {
2839 ldr(rd, Address(SP, kRegisterSize, Address::PostIndex), cond);
2840}
2841
2842
2843void Thumb2Assembler::PushList(RegList regs, Condition cond) {
2844 stm(DB_W, SP, regs, cond);
2845}
2846
2847
2848void Thumb2Assembler::PopList(RegList regs, Condition cond) {
2849 ldm(IA_W, SP, regs, cond);
2850}
2851
2852
2853void Thumb2Assembler::Mov(Register rd, Register rm, Condition cond) {
2854 if (cond != AL || rd != rm) {
2855 mov(rd, ShifterOperand(rm), cond);
2856 }
2857}
2858
2859
Dave Allison65fcc2c2014-04-28 13:45:27 -07002860void Thumb2Assembler::Bind(Label* label) {
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002861 BindLabel(label, buffer_.Size());
Dave Allison65fcc2c2014-04-28 13:45:27 -07002862}
2863
2864
2865void Thumb2Assembler::Lsl(Register rd, Register rm, uint32_t shift_imm,
Dave Allison45fdb932014-06-25 12:37:10 -07002866 bool setcc, Condition cond) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00002867 CHECK_LE(shift_imm, 31u);
Dave Allison45fdb932014-06-25 12:37:10 -07002868 CheckCondition(cond);
2869 EmitShift(rd, rm, LSL, shift_imm, setcc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002870}
2871
2872
2873void Thumb2Assembler::Lsr(Register rd, Register rm, uint32_t shift_imm,
Dave Allison45fdb932014-06-25 12:37:10 -07002874 bool setcc, Condition cond) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00002875 CHECK(1u <= shift_imm && shift_imm <= 32u);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002876 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
Dave Allison45fdb932014-06-25 12:37:10 -07002877 CheckCondition(cond);
2878 EmitShift(rd, rm, LSR, shift_imm, setcc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002879}
2880
2881
2882void Thumb2Assembler::Asr(Register rd, Register rm, uint32_t shift_imm,
Dave Allison45fdb932014-06-25 12:37:10 -07002883 bool setcc, Condition cond) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00002884 CHECK(1u <= shift_imm && shift_imm <= 32u);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002885 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
Dave Allison45fdb932014-06-25 12:37:10 -07002886 CheckCondition(cond);
2887 EmitShift(rd, rm, ASR, shift_imm, setcc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002888}
2889
2890
2891void Thumb2Assembler::Ror(Register rd, Register rm, uint32_t shift_imm,
Dave Allison45fdb932014-06-25 12:37:10 -07002892 bool setcc, Condition cond) {
Calin Juravle9aec02f2014-11-18 23:06:35 +00002893 CHECK(1u <= shift_imm && shift_imm <= 31u);
Dave Allison45fdb932014-06-25 12:37:10 -07002894 CheckCondition(cond);
2895 EmitShift(rd, rm, ROR, shift_imm, setcc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002896}
2897
2898
Dave Allison45fdb932014-06-25 12:37:10 -07002899void Thumb2Assembler::Rrx(Register rd, Register rm, bool setcc, Condition cond) {
2900 CheckCondition(cond);
2901 EmitShift(rd, rm, RRX, rm, setcc);
2902}
2903
2904
2905void Thumb2Assembler::Lsl(Register rd, Register rm, Register rn,
2906 bool setcc, Condition cond) {
2907 CheckCondition(cond);
2908 EmitShift(rd, rm, LSL, rn, setcc);
2909}
2910
2911
2912void Thumb2Assembler::Lsr(Register rd, Register rm, Register rn,
2913 bool setcc, Condition cond) {
2914 CheckCondition(cond);
2915 EmitShift(rd, rm, LSR, rn, setcc);
2916}
2917
2918
2919void Thumb2Assembler::Asr(Register rd, Register rm, Register rn,
2920 bool setcc, Condition cond) {
2921 CheckCondition(cond);
2922 EmitShift(rd, rm, ASR, rn, setcc);
2923}
2924
2925
2926void Thumb2Assembler::Ror(Register rd, Register rm, Register rn,
2927 bool setcc, Condition cond) {
2928 CheckCondition(cond);
2929 EmitShift(rd, rm, ROR, rn, setcc);
Dave Allison65fcc2c2014-04-28 13:45:27 -07002930}
2931
2932
2933int32_t Thumb2Assembler::EncodeBranchOffset(int32_t offset, int32_t inst) {
2934 // The offset is off by 4 due to the way the ARM CPUs read PC.
2935 offset -= 4;
2936 offset >>= 1;
2937
2938 uint32_t value = 0;
2939 // There are two different encodings depending on the value of bit 12. In one case
2940 // intermediate values are calculated using the sign bit.
2941 if ((inst & B12) == B12) {
2942 // 25 bits of offset.
2943 uint32_t signbit = (offset >> 31) & 0x1;
2944 uint32_t i1 = (offset >> 22) & 0x1;
2945 uint32_t i2 = (offset >> 21) & 0x1;
2946 uint32_t imm10 = (offset >> 11) & 0x03ff;
2947 uint32_t imm11 = offset & 0x07ff;
2948 uint32_t j1 = (i1 ^ signbit) ? 0 : 1;
2949 uint32_t j2 = (i2 ^ signbit) ? 0 : 1;
2950 value = (signbit << 26) | (j1 << 13) | (j2 << 11) | (imm10 << 16) |
2951 imm11;
2952 // Remove the offset from the current encoding.
2953 inst &= ~(0x3ff << 16 | 0x7ff);
2954 } else {
2955 uint32_t signbit = (offset >> 31) & 0x1;
2956 uint32_t imm6 = (offset >> 11) & 0x03f;
2957 uint32_t imm11 = offset & 0x07ff;
2958 uint32_t j1 = (offset >> 19) & 1;
2959 uint32_t j2 = (offset >> 17) & 1;
2960 value = (signbit << 26) | (j1 << 13) | (j2 << 11) | (imm6 << 16) |
2961 imm11;
2962 // Remove the offset from the current encoding.
2963 inst &= ~(0x3f << 16 | 0x7ff);
2964 }
2965 // Mask out offset bits in current instruction.
2966 inst &= ~(B26 | B13 | B11);
2967 inst |= value;
2968 return inst;
2969}
2970
2971
2972int Thumb2Assembler::DecodeBranchOffset(int32_t instr) {
2973 int32_t imm32;
2974 if ((instr & B12) == B12) {
2975 uint32_t S = (instr >> 26) & 1;
2976 uint32_t J2 = (instr >> 11) & 1;
2977 uint32_t J1 = (instr >> 13) & 1;
2978 uint32_t imm10 = (instr >> 16) & 0x3FF;
2979 uint32_t imm11 = instr & 0x7FF;
2980
2981 uint32_t I1 = ~(J1 ^ S) & 1;
2982 uint32_t I2 = ~(J2 ^ S) & 1;
2983 imm32 = (S << 24) | (I1 << 23) | (I2 << 22) | (imm10 << 12) | (imm11 << 1);
2984 imm32 = (imm32 << 8) >> 8; // sign extend 24 bit immediate.
2985 } else {
2986 uint32_t S = (instr >> 26) & 1;
2987 uint32_t J2 = (instr >> 11) & 1;
2988 uint32_t J1 = (instr >> 13) & 1;
2989 uint32_t imm6 = (instr >> 16) & 0x3F;
2990 uint32_t imm11 = instr & 0x7FF;
2991
2992 imm32 = (S << 20) | (J2 << 19) | (J1 << 18) | (imm6 << 12) | (imm11 << 1);
2993 imm32 = (imm32 << 11) >> 11; // sign extend 21 bit immediate.
2994 }
2995 imm32 += 4;
2996 return imm32;
2997}
2998
Vladimir Markocf93a5c2015-06-16 11:33:24 +00002999uint32_t Thumb2Assembler::GetAdjustedPosition(uint32_t old_position) {
3000 // We can reconstruct the adjustment by going through all the fixups from the beginning
3001 // up to the old_position. Since we expect AdjustedPosition() to be called in a loop
3002 // with increasing old_position, we can use the data from last AdjustedPosition() to
3003 // continue where we left off and the whole loop should be O(m+n) where m is the number
3004 // of positions to adjust and n is the number of fixups.
3005 if (old_position < last_old_position_) {
3006 last_position_adjustment_ = 0u;
3007 last_old_position_ = 0u;
3008 last_fixup_id_ = 0u;
3009 }
3010 while (last_fixup_id_ != fixups_.size()) {
3011 Fixup* fixup = GetFixup(last_fixup_id_);
3012 if (fixup->GetLocation() >= old_position + last_position_adjustment_) {
3013 break;
3014 }
3015 if (fixup->GetSize() != fixup->GetOriginalSize()) {
3016 last_position_adjustment_ += fixup->GetSizeInBytes() - fixup->GetOriginalSizeInBytes();
3017 }
3018 ++last_fixup_id_;
3019 }
3020 last_old_position_ = old_position;
3021 return old_position + last_position_adjustment_;
3022}
3023
3024Literal* Thumb2Assembler::NewLiteral(size_t size, const uint8_t* data) {
3025 DCHECK(size == 4u || size == 8u) << size;
3026 literals_.emplace_back(size, data);
3027 return &literals_.back();
3028}
3029
3030void Thumb2Assembler::LoadLiteral(Register rt, Literal* literal) {
3031 DCHECK_EQ(literal->GetSize(), 4u);
3032 DCHECK(!literal->GetLabel()->IsBound());
3033 bool use32bit = IsForced32Bit() || IsHighRegister(rt);
3034 uint32_t location = buffer_.Size();
3035 Fixup::Size size = use32bit ? Fixup::kLiteral4KiB : Fixup::kLiteral1KiB;
3036 FixupId fixup_id = AddFixup(Fixup::LoadNarrowLiteral(location, rt, size));
3037 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3038 literal->GetLabel()->LinkTo(fixup_id);
3039 if (use32bit) {
3040 Emit16(0);
3041 }
3042 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3043}
3044
3045void Thumb2Assembler::LoadLiteral(Register rt, Register rt2, Literal* literal) {
3046 DCHECK_EQ(literal->GetSize(), 8u);
3047 DCHECK(!literal->GetLabel()->IsBound());
3048 uint32_t location = buffer_.Size();
3049 FixupId fixup_id =
3050 AddFixup(Fixup::LoadWideLiteral(location, rt, rt2, Fixup::kLongOrFPLiteral1KiB));
3051 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3052 literal->GetLabel()->LinkTo(fixup_id);
3053 Emit16(0);
3054 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3055}
3056
3057void Thumb2Assembler::LoadLiteral(SRegister sd, Literal* literal) {
3058 DCHECK_EQ(literal->GetSize(), 4u);
3059 DCHECK(!literal->GetLabel()->IsBound());
3060 uint32_t location = buffer_.Size();
3061 FixupId fixup_id = AddFixup(Fixup::LoadSingleLiteral(location, sd, Fixup::kLongOrFPLiteral1KiB));
3062 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3063 literal->GetLabel()->LinkTo(fixup_id);
3064 Emit16(0);
3065 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3066}
3067
3068void Thumb2Assembler::LoadLiteral(DRegister dd, Literal* literal) {
3069 DCHECK_EQ(literal->GetSize(), 8u);
3070 DCHECK(!literal->GetLabel()->IsBound());
3071 uint32_t location = buffer_.Size();
3072 FixupId fixup_id = AddFixup(Fixup::LoadDoubleLiteral(location, dd, Fixup::kLongOrFPLiteral1KiB));
3073 Emit16(static_cast<uint16_t>(literal->GetLabel()->position_));
3074 literal->GetLabel()->LinkTo(fixup_id);
3075 Emit16(0);
3076 DCHECK_EQ(location + GetFixup(fixup_id)->GetSizeInBytes(), buffer_.Size());
3077}
Dave Allison65fcc2c2014-04-28 13:45:27 -07003078
3079void Thumb2Assembler::AddConstant(Register rd, int32_t value, Condition cond) {
3080 AddConstant(rd, rd, value, cond);
3081}
3082
3083
3084void Thumb2Assembler::AddConstant(Register rd, Register rn, int32_t value,
3085 Condition cond) {
3086 if (value == 0) {
3087 if (rd != rn) {
3088 mov(rd, ShifterOperand(rn), cond);
3089 }
3090 return;
3091 }
3092 // We prefer to select the shorter code sequence rather than selecting add for
3093 // positive values and sub for negatives ones, which would slightly improve
3094 // the readability of generated code for some constants.
3095 ShifterOperand shifter_op;
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003096 if (ShifterOperandCanHold(rd, rn, ADD, value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003097 add(rd, rn, shifter_op, cond);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003098 } else if (ShifterOperandCanHold(rd, rn, SUB, -value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003099 sub(rd, rn, shifter_op, cond);
3100 } else {
3101 CHECK(rn != IP);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003102 if (ShifterOperandCanHold(rd, rn, MVN, ~value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003103 mvn(IP, shifter_op, cond);
3104 add(rd, rn, ShifterOperand(IP), cond);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003105 } else if (ShifterOperandCanHold(rd, rn, MVN, ~(-value), &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003106 mvn(IP, shifter_op, cond);
3107 sub(rd, rn, ShifterOperand(IP), cond);
3108 } else {
3109 movw(IP, Low16Bits(value), cond);
3110 uint16_t value_high = High16Bits(value);
3111 if (value_high != 0) {
3112 movt(IP, value_high, cond);
3113 }
3114 add(rd, rn, ShifterOperand(IP), cond);
3115 }
3116 }
3117}
3118
3119
3120void Thumb2Assembler::AddConstantSetFlags(Register rd, Register rn, int32_t value,
3121 Condition cond) {
3122 ShifterOperand shifter_op;
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003123 if (ShifterOperandCanHold(rd, rn, ADD, value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003124 adds(rd, rn, shifter_op, cond);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003125 } else if (ShifterOperandCanHold(rd, rn, ADD, -value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003126 subs(rd, rn, shifter_op, cond);
3127 } else {
3128 CHECK(rn != IP);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003129 if (ShifterOperandCanHold(rd, rn, MVN, ~value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003130 mvn(IP, shifter_op, cond);
3131 adds(rd, rn, ShifterOperand(IP), cond);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003132 } else if (ShifterOperandCanHold(rd, rn, MVN, ~(-value), &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003133 mvn(IP, shifter_op, cond);
3134 subs(rd, rn, ShifterOperand(IP), cond);
3135 } else {
3136 movw(IP, Low16Bits(value), cond);
3137 uint16_t value_high = High16Bits(value);
3138 if (value_high != 0) {
3139 movt(IP, value_high, cond);
3140 }
3141 adds(rd, rn, ShifterOperand(IP), cond);
3142 }
3143 }
3144}
3145
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003146
Dave Allison65fcc2c2014-04-28 13:45:27 -07003147void Thumb2Assembler::LoadImmediate(Register rd, int32_t value, Condition cond) {
3148 ShifterOperand shifter_op;
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003149 if (ShifterOperandCanHold(rd, R0, MOV, value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003150 mov(rd, shifter_op, cond);
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003151 } else if (ShifterOperandCanHold(rd, R0, MVN, ~value, &shifter_op)) {
Dave Allison65fcc2c2014-04-28 13:45:27 -07003152 mvn(rd, shifter_op, cond);
3153 } else {
3154 movw(rd, Low16Bits(value), cond);
3155 uint16_t value_high = High16Bits(value);
3156 if (value_high != 0) {
3157 movt(rd, value_high, cond);
3158 }
3159 }
3160}
3161
Nicolas Geoffray3bcc8ea2014-11-28 15:00:02 +00003162
Dave Allison65fcc2c2014-04-28 13:45:27 -07003163// Implementation note: this method must emit at most one instruction when
3164// Address::CanHoldLoadOffsetThumb.
3165void Thumb2Assembler::LoadFromOffset(LoadOperandType type,
3166 Register reg,
3167 Register base,
3168 int32_t offset,
3169 Condition cond) {
3170 if (!Address::CanHoldLoadOffsetThumb(type, offset)) {
Roland Levillain775ef492014-11-04 17:43:11 +00003171 CHECK_NE(base, IP);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003172 LoadImmediate(IP, offset, cond);
3173 add(IP, IP, ShifterOperand(base), cond);
3174 base = IP;
3175 offset = 0;
3176 }
3177 CHECK(Address::CanHoldLoadOffsetThumb(type, offset));
3178 switch (type) {
3179 case kLoadSignedByte:
3180 ldrsb(reg, Address(base, offset), cond);
3181 break;
3182 case kLoadUnsignedByte:
3183 ldrb(reg, Address(base, offset), cond);
3184 break;
3185 case kLoadSignedHalfword:
3186 ldrsh(reg, Address(base, offset), cond);
3187 break;
3188 case kLoadUnsignedHalfword:
3189 ldrh(reg, Address(base, offset), cond);
3190 break;
3191 case kLoadWord:
3192 ldr(reg, Address(base, offset), cond);
3193 break;
3194 case kLoadWordPair:
3195 ldrd(reg, Address(base, offset), cond);
3196 break;
3197 default:
3198 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07003199 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003200 }
3201}
3202
3203
3204// Implementation note: this method must emit at most one instruction when
3205// Address::CanHoldLoadOffsetThumb, as expected by JIT::GuardedLoadFromOffset.
3206void Thumb2Assembler::LoadSFromOffset(SRegister reg,
3207 Register base,
3208 int32_t offset,
3209 Condition cond) {
3210 if (!Address::CanHoldLoadOffsetThumb(kLoadSWord, offset)) {
3211 CHECK_NE(base, IP);
3212 LoadImmediate(IP, offset, cond);
3213 add(IP, IP, ShifterOperand(base), cond);
3214 base = IP;
3215 offset = 0;
3216 }
3217 CHECK(Address::CanHoldLoadOffsetThumb(kLoadSWord, offset));
3218 vldrs(reg, Address(base, offset), cond);
3219}
3220
3221
3222// Implementation note: this method must emit at most one instruction when
3223// Address::CanHoldLoadOffsetThumb, as expected by JIT::GuardedLoadFromOffset.
3224void Thumb2Assembler::LoadDFromOffset(DRegister reg,
3225 Register base,
3226 int32_t offset,
3227 Condition cond) {
3228 if (!Address::CanHoldLoadOffsetThumb(kLoadDWord, offset)) {
3229 CHECK_NE(base, IP);
3230 LoadImmediate(IP, offset, cond);
3231 add(IP, IP, ShifterOperand(base), cond);
3232 base = IP;
3233 offset = 0;
3234 }
3235 CHECK(Address::CanHoldLoadOffsetThumb(kLoadDWord, offset));
3236 vldrd(reg, Address(base, offset), cond);
3237}
3238
3239
3240// Implementation note: this method must emit at most one instruction when
3241// Address::CanHoldStoreOffsetThumb.
3242void Thumb2Assembler::StoreToOffset(StoreOperandType type,
3243 Register reg,
3244 Register base,
3245 int32_t offset,
3246 Condition cond) {
Roland Levillain775ef492014-11-04 17:43:11 +00003247 Register tmp_reg = kNoRegister;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003248 if (!Address::CanHoldStoreOffsetThumb(type, offset)) {
Roland Levillain775ef492014-11-04 17:43:11 +00003249 CHECK_NE(base, IP);
Roland Levillain4af147e2015-04-07 13:54:49 +01003250 if (reg != IP &&
3251 (type != kStoreWordPair || reg + 1 != IP)) {
Roland Levillain775ef492014-11-04 17:43:11 +00003252 tmp_reg = IP;
3253 } else {
Roland Levillain4af147e2015-04-07 13:54:49 +01003254 // Be careful not to use IP twice (for `reg` (or `reg` + 1 in
3255 // the case of a word-pair store)) and to build the Address
3256 // object used by the store instruction(s) below). Instead,
3257 // save R5 on the stack (or R6 if R5 is not available), use it
3258 // as secondary temporary register, and restore it after the
3259 // store instruction has been emitted.
Roland Levillain775ef492014-11-04 17:43:11 +00003260 tmp_reg = base != R5 ? R5 : R6;
3261 Push(tmp_reg);
3262 if (base == SP) {
3263 offset += kRegisterSize;
3264 }
3265 }
3266 LoadImmediate(tmp_reg, offset, cond);
3267 add(tmp_reg, tmp_reg, ShifterOperand(base), cond);
3268 base = tmp_reg;
Dave Allison65fcc2c2014-04-28 13:45:27 -07003269 offset = 0;
3270 }
3271 CHECK(Address::CanHoldStoreOffsetThumb(type, offset));
3272 switch (type) {
3273 case kStoreByte:
3274 strb(reg, Address(base, offset), cond);
3275 break;
3276 case kStoreHalfword:
3277 strh(reg, Address(base, offset), cond);
3278 break;
3279 case kStoreWord:
3280 str(reg, Address(base, offset), cond);
3281 break;
3282 case kStoreWordPair:
3283 strd(reg, Address(base, offset), cond);
3284 break;
3285 default:
3286 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07003287 UNREACHABLE();
Dave Allison65fcc2c2014-04-28 13:45:27 -07003288 }
Roland Levillain775ef492014-11-04 17:43:11 +00003289 if (tmp_reg != kNoRegister && tmp_reg != IP) {
3290 DCHECK(tmp_reg == R5 || tmp_reg == R6);
3291 Pop(tmp_reg);
3292 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07003293}
3294
3295
3296// Implementation note: this method must emit at most one instruction when
3297// Address::CanHoldStoreOffsetThumb, as expected by JIT::GuardedStoreToOffset.
3298void Thumb2Assembler::StoreSToOffset(SRegister reg,
3299 Register base,
3300 int32_t offset,
3301 Condition cond) {
3302 if (!Address::CanHoldStoreOffsetThumb(kStoreSWord, offset)) {
3303 CHECK_NE(base, IP);
3304 LoadImmediate(IP, offset, cond);
3305 add(IP, IP, ShifterOperand(base), cond);
3306 base = IP;
3307 offset = 0;
3308 }
3309 CHECK(Address::CanHoldStoreOffsetThumb(kStoreSWord, offset));
3310 vstrs(reg, Address(base, offset), cond);
3311}
3312
3313
3314// Implementation note: this method must emit at most one instruction when
3315// Address::CanHoldStoreOffsetThumb, as expected by JIT::GuardedStoreSToOffset.
3316void Thumb2Assembler::StoreDToOffset(DRegister reg,
3317 Register base,
3318 int32_t offset,
3319 Condition cond) {
3320 if (!Address::CanHoldStoreOffsetThumb(kStoreDWord, offset)) {
3321 CHECK_NE(base, IP);
3322 LoadImmediate(IP, offset, cond);
3323 add(IP, IP, ShifterOperand(base), cond);
3324 base = IP;
3325 offset = 0;
3326 }
3327 CHECK(Address::CanHoldStoreOffsetThumb(kStoreDWord, offset));
3328 vstrd(reg, Address(base, offset), cond);
3329}
3330
3331
3332void Thumb2Assembler::MemoryBarrier(ManagedRegister mscratch) {
3333 CHECK_EQ(mscratch.AsArm().AsCoreRegister(), R12);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01003334 dmb(SY);
3335}
3336
3337
3338void Thumb2Assembler::dmb(DmbOptions flavor) {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01003339 int32_t encoding = 0xf3bf8f50; // dmb in T1 encoding.
3340 Emit32(encoding | flavor);
Dave Allison65fcc2c2014-04-28 13:45:27 -07003341}
3342
3343
3344void Thumb2Assembler::CompareAndBranchIfZero(Register r, Label* label) {
Nicolas Geoffray2bcb4312015-07-01 12:22:56 +01003345 if (CanRelocateBranches() && IsLowRegister(r) && !label->IsBound()) {
Nicolas Geoffrayd56376c2015-05-21 12:32:34 +00003346 cbz(r, label);
3347 } else {
3348 cmp(r, ShifterOperand(0));
3349 b(label, EQ);
3350 }
3351}
3352
3353
Dave Allison65fcc2c2014-04-28 13:45:27 -07003354void Thumb2Assembler::CompareAndBranchIfNonZero(Register r, Label* label) {
Nicolas Geoffray2bcb4312015-07-01 12:22:56 +01003355 if (CanRelocateBranches() && IsLowRegister(r) && !label->IsBound()) {
Nicolas Geoffrayd126ba12015-05-20 11:25:27 +01003356 cbnz(r, label);
3357 } else {
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01003358 cmp(r, ShifterOperand(0));
3359 b(label, NE);
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01003360 }
Dave Allison65fcc2c2014-04-28 13:45:27 -07003361}
3362} // namespace arm
3363} // namespace art