blob: 9b8cc3ad28b82557bc4db415bd52935cb7d6b4ce [file] [log] [blame]
Carl Shapiroa5d5cfd2011-06-21 12:46:59 -07001// Copyright 2011 Google Inc. All Rights Reserved.
2
Brian Carlstrom578bbdc2011-07-21 14:07:47 -07003#include "assembler.h"
4#include "logging.h"
5#include "offsets.h"
Carl Shapiroe2d373e2011-07-25 15:20:06 -07006#include "thread.h"
Brian Carlstrom578bbdc2011-07-21 14:07:47 -07007#include "utils.h"
Carl Shapiroa5d5cfd2011-06-21 12:46:59 -07008
Carl Shapiro6b6b5f02011-06-21 15:05:09 -07009namespace art {
Carl Shapiroa5d5cfd2011-06-21 12:46:59 -070010
Carl Shapiroa2e18e12011-06-21 18:57:55 -070011// Instruction encoding bits.
12enum {
13 H = 1 << 5, // halfword (or byte)
14 L = 1 << 20, // load (or store)
15 S = 1 << 20, // set condition code (or leave unchanged)
16 W = 1 << 21, // writeback base register (or leave unchanged)
17 A = 1 << 21, // accumulate in multiply instruction (or not)
18 B = 1 << 22, // unsigned byte (or word)
19 N = 1 << 22, // long (or short)
20 U = 1 << 23, // positive (or negative) offset/index
21 P = 1 << 24, // offset/pre-indexed addressing (or post-indexed addressing)
22 I = 1 << 25, // immediate shifter operand (or not)
23
24 B0 = 1,
25 B1 = 1 << 1,
26 B2 = 1 << 2,
27 B3 = 1 << 3,
28 B4 = 1 << 4,
29 B5 = 1 << 5,
30 B6 = 1 << 6,
31 B7 = 1 << 7,
32 B8 = 1 << 8,
33 B9 = 1 << 9,
34 B10 = 1 << 10,
35 B11 = 1 << 11,
36 B12 = 1 << 12,
37 B16 = 1 << 16,
38 B17 = 1 << 17,
39 B18 = 1 << 18,
40 B19 = 1 << 19,
41 B20 = 1 << 20,
42 B21 = 1 << 21,
43 B22 = 1 << 22,
44 B23 = 1 << 23,
45 B24 = 1 << 24,
46 B25 = 1 << 25,
47 B26 = 1 << 26,
48 B27 = 1 << 27,
49
50 // Instruction bit masks.
51 RdMask = 15 << 12, // in str instruction
52 CondMask = 15 << 28,
53 CoprocessorMask = 15 << 8,
54 OpCodeMask = 15 << 21, // in data-processing instructions
55 Imm24Mask = (1 << 24) - 1,
56 Off12Mask = (1 << 12) - 1,
57
58 // ldrex/strex register field encodings.
59 kLdExRnShift = 16,
60 kLdExRtShift = 12,
61 kStrExRnShift = 16,
62 kStrExRdShift = 12,
63 kStrExRtShift = 0,
64};
65
66
Elliott Hughes1f359b02011-07-17 14:27:17 -070067static const char* kRegisterNames[] = {
68 "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10",
69 "fp", "ip", "sp", "lr", "pc"
70};
71std::ostream& operator<<(std::ostream& os, const Register& rhs) {
72 if (rhs >= R0 && rhs <= PC) {
73 os << kRegisterNames[rhs];
74 } else {
Ian Rogersb033c752011-07-20 12:22:35 -070075 os << "Register[" << static_cast<int>(rhs) << "]";
Elliott Hughes1f359b02011-07-17 14:27:17 -070076 }
77 return os;
78}
79
80
81std::ostream& operator<<(std::ostream& os, const SRegister& rhs) {
82 if (rhs >= S0 && rhs < kNumberOfSRegisters) {
Ian Rogersb033c752011-07-20 12:22:35 -070083 os << "s" << static_cast<int>(rhs);
Elliott Hughes1f359b02011-07-17 14:27:17 -070084 } else {
Ian Rogersb033c752011-07-20 12:22:35 -070085 os << "SRegister[" << static_cast<int>(rhs) << "]";
Elliott Hughes1f359b02011-07-17 14:27:17 -070086 }
87 return os;
88}
89
90
91std::ostream& operator<<(std::ostream& os, const DRegister& rhs) {
92 if (rhs >= D0 && rhs < kNumberOfDRegisters) {
Ian Rogersb033c752011-07-20 12:22:35 -070093 os << "d" << static_cast<int>(rhs);
Elliott Hughes1f359b02011-07-17 14:27:17 -070094 } else {
Ian Rogersb033c752011-07-20 12:22:35 -070095 os << "DRegister[" << static_cast<int>(rhs) << "]";
Elliott Hughes1f359b02011-07-17 14:27:17 -070096 }
97 return os;
98}
99
100
101static const char* kConditionNames[] = {
Ian Rogersb033c752011-07-20 12:22:35 -0700102 "EQ", "NE", "CS", "CC", "MI", "PL", "VS", "VC", "HI", "LS", "GE", "LT", "GT",
103 "LE", "AL",
Elliott Hughes1f359b02011-07-17 14:27:17 -0700104};
105std::ostream& operator<<(std::ostream& os, const Condition& rhs) {
106 if (rhs >= EQ && rhs <= AL) {
107 os << kConditionNames[rhs];
108 } else {
Ian Rogersb033c752011-07-20 12:22:35 -0700109 os << "Condition[" << static_cast<int>(rhs) << "]";
Elliott Hughes1f359b02011-07-17 14:27:17 -0700110 }
111 return os;
112}
113
114
Carl Shapiroa2e18e12011-06-21 18:57:55 -0700115void Assembler::Emit(int32_t value) {
116 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
117 buffer_.Emit<int32_t>(value);
118}
119
120
121void Assembler::EmitType01(Condition cond,
122 int type,
123 Opcode opcode,
124 int set_cc,
125 Register rn,
126 Register rd,
127 ShifterOperand so) {
128 CHECK_NE(rd, kNoRegister);
129 CHECK_NE(cond, kNoCondition);
130 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
131 type << kTypeShift |
132 static_cast<int32_t>(opcode) << kOpcodeShift |
133 set_cc << kSShift |
134 static_cast<int32_t>(rn) << kRnShift |
135 static_cast<int32_t>(rd) << kRdShift |
136 so.encoding();
137 Emit(encoding);
138}
139
140
141void Assembler::EmitType5(Condition cond, int offset, bool link) {
142 CHECK_NE(cond, kNoCondition);
143 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
144 5 << kTypeShift |
145 (link ? 1 : 0) << kLinkShift;
146 Emit(Assembler::EncodeBranchOffset(offset, encoding));
147}
148
149
150void Assembler::EmitMemOp(Condition cond,
151 bool load,
152 bool byte,
153 Register rd,
154 Address ad) {
155 CHECK_NE(rd, kNoRegister);
156 CHECK_NE(cond, kNoCondition);
157 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
158 B26 |
159 (load ? L : 0) |
160 (byte ? B : 0) |
161 (static_cast<int32_t>(rd) << kRdShift) |
162 ad.encoding();
163 Emit(encoding);
164}
165
166
167void Assembler::EmitMemOpAddressMode3(Condition cond,
168 int32_t mode,
169 Register rd,
170 Address ad) {
171 CHECK_NE(rd, kNoRegister);
172 CHECK_NE(cond, kNoCondition);
173 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
174 B22 |
175 mode |
176 (static_cast<int32_t>(rd) << kRdShift) |
177 ad.encoding3();
178 Emit(encoding);
179}
180
181
182void Assembler::EmitMultiMemOp(Condition cond,
183 BlockAddressMode am,
184 bool load,
185 Register base,
186 RegList regs) {
187 CHECK_NE(base, kNoRegister);
188 CHECK_NE(cond, kNoCondition);
189 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
190 B27 |
191 am |
192 (load ? L : 0) |
193 (static_cast<int32_t>(base) << kRnShift) |
194 regs;
195 Emit(encoding);
196}
197
198
199void Assembler::EmitShiftImmediate(Condition cond,
200 Shift opcode,
201 Register rd,
202 Register rm,
203 ShifterOperand so) {
204 CHECK_NE(cond, kNoCondition);
Elliott Hughes1f359b02011-07-17 14:27:17 -0700205 CHECK_EQ(so.type(), 1U);
Carl Shapiroa2e18e12011-06-21 18:57:55 -0700206 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
207 static_cast<int32_t>(MOV) << kOpcodeShift |
208 static_cast<int32_t>(rd) << kRdShift |
209 so.encoding() << kShiftImmShift |
210 static_cast<int32_t>(opcode) << kShiftShift |
211 static_cast<int32_t>(rm);
212 Emit(encoding);
213}
214
215
216void Assembler::EmitShiftRegister(Condition cond,
217 Shift opcode,
218 Register rd,
219 Register rm,
220 ShifterOperand so) {
221 CHECK_NE(cond, kNoCondition);
Elliott Hughes1f359b02011-07-17 14:27:17 -0700222 CHECK_EQ(so.type(), 0U);
Carl Shapiroa2e18e12011-06-21 18:57:55 -0700223 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
224 static_cast<int32_t>(MOV) << kOpcodeShift |
225 static_cast<int32_t>(rd) << kRdShift |
226 so.encoding() << kShiftRegisterShift |
227 static_cast<int32_t>(opcode) << kShiftShift |
228 B4 |
229 static_cast<int32_t>(rm);
230 Emit(encoding);
231}
232
233
234void Assembler::EmitBranch(Condition cond, Label* label, bool link) {
235 if (label->IsBound()) {
236 EmitType5(cond, label->Position() - buffer_.Size(), link);
237 } else {
238 int position = buffer_.Size();
239 // Use the offset field of the branch instruction for linking the sites.
240 EmitType5(cond, label->position_, link);
241 label->LinkTo(position);
242 }
243}
244
Carl Shapiroa2e18e12011-06-21 18:57:55 -0700245void Assembler::and_(Register rd, Register rn, ShifterOperand so,
246 Condition cond) {
247 EmitType01(cond, so.type(), AND, 0, rn, rd, so);
248}
249
250
251void Assembler::eor(Register rd, Register rn, ShifterOperand so,
252 Condition cond) {
253 EmitType01(cond, so.type(), EOR, 0, rn, rd, so);
254}
255
256
257void Assembler::sub(Register rd, Register rn, ShifterOperand so,
258 Condition cond) {
259 EmitType01(cond, so.type(), SUB, 0, rn, rd, so);
260}
261
262void Assembler::rsb(Register rd, Register rn, ShifterOperand so,
263 Condition cond) {
264 EmitType01(cond, so.type(), RSB, 0, rn, rd, so);
265}
266
267void Assembler::rsbs(Register rd, Register rn, ShifterOperand so,
268 Condition cond) {
269 EmitType01(cond, so.type(), RSB, 1, rn, rd, so);
270}
271
272
273void Assembler::add(Register rd, Register rn, ShifterOperand so,
274 Condition cond) {
275 EmitType01(cond, so.type(), ADD, 0, rn, rd, so);
276}
277
278
279void Assembler::adds(Register rd, Register rn, ShifterOperand so,
280 Condition cond) {
281 EmitType01(cond, so.type(), ADD, 1, rn, rd, so);
282}
283
284
285void Assembler::subs(Register rd, Register rn, ShifterOperand so,
286 Condition cond) {
287 EmitType01(cond, so.type(), SUB, 1, rn, rd, so);
288}
289
290
291void Assembler::adc(Register rd, Register rn, ShifterOperand so,
292 Condition cond) {
293 EmitType01(cond, so.type(), ADC, 0, rn, rd, so);
294}
295
296
297void Assembler::sbc(Register rd, Register rn, ShifterOperand so,
298 Condition cond) {
299 EmitType01(cond, so.type(), SBC, 0, rn, rd, so);
300}
301
302
303void Assembler::rsc(Register rd, Register rn, ShifterOperand so,
304 Condition cond) {
305 EmitType01(cond, so.type(), RSC, 0, rn, rd, so);
306}
307
308
309void Assembler::tst(Register rn, ShifterOperand so, Condition cond) {
310 CHECK_NE(rn, PC); // Reserve tst pc instruction for exception handler marker.
311 EmitType01(cond, so.type(), TST, 1, rn, R0, so);
312}
313
314
315void Assembler::teq(Register rn, ShifterOperand so, Condition cond) {
316 CHECK_NE(rn, PC); // Reserve teq pc instruction for exception handler marker.
317 EmitType01(cond, so.type(), TEQ, 1, rn, R0, so);
318}
319
320
321void Assembler::cmp(Register rn, ShifterOperand so, Condition cond) {
322 EmitType01(cond, so.type(), CMP, 1, rn, R0, so);
323}
324
325
326void Assembler::cmn(Register rn, ShifterOperand so, Condition cond) {
327 EmitType01(cond, so.type(), CMN, 1, rn, R0, so);
328}
329
330
331void Assembler::orr(Register rd, Register rn,
332 ShifterOperand so, Condition cond) {
333 EmitType01(cond, so.type(), ORR, 0, rn, rd, so);
334}
335
336
337void Assembler::orrs(Register rd, Register rn,
338 ShifterOperand so, Condition cond) {
339 EmitType01(cond, so.type(), ORR, 1, rn, rd, so);
340}
341
342
343void Assembler::mov(Register rd, ShifterOperand so, Condition cond) {
344 EmitType01(cond, so.type(), MOV, 0, R0, rd, so);
345}
346
347
348void Assembler::movs(Register rd, ShifterOperand so, Condition cond) {
349 EmitType01(cond, so.type(), MOV, 1, R0, rd, so);
350}
351
352
353void Assembler::bic(Register rd, Register rn, ShifterOperand so,
354 Condition cond) {
355 EmitType01(cond, so.type(), BIC, 0, rn, rd, so);
356}
357
358
359void Assembler::mvn(Register rd, ShifterOperand so, Condition cond) {
360 EmitType01(cond, so.type(), MVN, 0, R0, rd, so);
361}
362
363
364void Assembler::mvns(Register rd, ShifterOperand so, Condition cond) {
365 EmitType01(cond, so.type(), MVN, 1, R0, rd, so);
366}
367
368
369void Assembler::clz(Register rd, Register rm, Condition cond) {
370 CHECK_NE(rd, kNoRegister);
371 CHECK_NE(rm, kNoRegister);
372 CHECK_NE(cond, kNoCondition);
373 CHECK_NE(rd, PC);
374 CHECK_NE(rm, PC);
375 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
376 B24 | B22 | B21 | (0xf << 16) |
377 (static_cast<int32_t>(rd) << kRdShift) |
378 (0xf << 8) | B4 | static_cast<int32_t>(rm);
379 Emit(encoding);
380}
381
382
383void Assembler::movw(Register rd, uint16_t imm16, Condition cond) {
384 CHECK_NE(cond, kNoCondition);
385 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
386 B25 | B24 | ((imm16 >> 12) << 16) |
387 static_cast<int32_t>(rd) << kRdShift | (imm16 & 0xfff);
388 Emit(encoding);
389}
390
391
392void Assembler::movt(Register rd, uint16_t imm16, Condition cond) {
393 CHECK_NE(cond, kNoCondition);
394 int32_t encoding = static_cast<int32_t>(cond) << kConditionShift |
395 B25 | B24 | B22 | ((imm16 >> 12) << 16) |
396 static_cast<int32_t>(rd) << kRdShift | (imm16 & 0xfff);
397 Emit(encoding);
398}
399
400
401void Assembler::EmitMulOp(Condition cond, int32_t opcode,
402 Register rd, Register rn,
403 Register rm, Register rs) {
404 CHECK_NE(rd, kNoRegister);
405 CHECK_NE(rn, kNoRegister);
406 CHECK_NE(rm, kNoRegister);
407 CHECK_NE(rs, kNoRegister);
408 CHECK_NE(cond, kNoCondition);
409 int32_t encoding = opcode |
410 (static_cast<int32_t>(cond) << kConditionShift) |
411 (static_cast<int32_t>(rn) << kRnShift) |
412 (static_cast<int32_t>(rd) << kRdShift) |
413 (static_cast<int32_t>(rs) << kRsShift) |
414 B7 | B4 |
415 (static_cast<int32_t>(rm) << kRmShift);
416 Emit(encoding);
417}
418
419
420void Assembler::mul(Register rd, Register rn,
421 Register rm, Condition cond) {
422 // Assembler registers rd, rn, rm are encoded as rn, rm, rs.
423 EmitMulOp(cond, 0, R0, rd, rn, rm);
424}
425
426
427void Assembler::mla(Register rd, Register rn,
428 Register rm, Register ra, Condition cond) {
429 // Assembler registers rd, rn, rm, ra are encoded as rn, rm, rs, rd.
430 EmitMulOp(cond, B21, ra, rd, rn, rm);
431}
432
433
434void Assembler::mls(Register rd, Register rn,
435 Register rm, Register ra, Condition cond) {
436 // Assembler registers rd, rn, rm, ra are encoded as rn, rm, rs, rd.
437 EmitMulOp(cond, B22 | B21, ra, rd, rn, rm);
438}
439
440
441void Assembler::umull(Register rd_lo, Register rd_hi,
442 Register rn, Register rm, Condition cond) {
443 // Assembler registers rd_lo, rd_hi, rn, rm are encoded as rd, rn, rm, rs.
444 EmitMulOp(cond, B23, rd_lo, rd_hi, rn, rm);
445}
446
447
448void Assembler::ldr(Register rd, Address ad, Condition cond) {
449 EmitMemOp(cond, true, false, rd, ad);
450}
451
452
453void Assembler::str(Register rd, Address ad, Condition cond) {
454 EmitMemOp(cond, false, false, rd, ad);
455}
456
457
458void Assembler::ldrb(Register rd, Address ad, Condition cond) {
459 EmitMemOp(cond, true, true, rd, ad);
460}
461
462
463void Assembler::strb(Register rd, Address ad, Condition cond) {
464 EmitMemOp(cond, false, true, rd, ad);
465}
466
467
468void Assembler::ldrh(Register rd, Address ad, Condition cond) {
469 EmitMemOpAddressMode3(cond, L | B7 | H | B4, rd, ad);
470}
471
472
473void Assembler::strh(Register rd, Address ad, Condition cond) {
474 EmitMemOpAddressMode3(cond, B7 | H | B4, rd, ad);
475}
476
477
478void Assembler::ldrsb(Register rd, Address ad, Condition cond) {
479 EmitMemOpAddressMode3(cond, L | B7 | B6 | B4, rd, ad);
480}
481
482
483void Assembler::ldrsh(Register rd, Address ad, Condition cond) {
484 EmitMemOpAddressMode3(cond, L | B7 | B6 | H | B4, rd, ad);
485}
486
487
488void Assembler::ldrd(Register rd, Address ad, Condition cond) {
489 CHECK_EQ(rd % 2, 0);
490 EmitMemOpAddressMode3(cond, B7 | B6 | B4, rd, ad);
491}
492
493
494void Assembler::strd(Register rd, Address ad, Condition cond) {
495 CHECK_EQ(rd % 2, 0);
496 EmitMemOpAddressMode3(cond, B7 | B6 | B5 | B4, rd, ad);
497}
498
499
500void Assembler::ldm(BlockAddressMode am,
501 Register base,
502 RegList regs,
503 Condition cond) {
504 EmitMultiMemOp(cond, am, true, base, regs);
505}
506
507
508void Assembler::stm(BlockAddressMode am,
509 Register base,
510 RegList regs,
511 Condition cond) {
512 EmitMultiMemOp(cond, am, false, base, regs);
513}
514
515
516void Assembler::ldrex(Register rt, Register rn, Condition cond) {
517 CHECK_NE(rn, kNoRegister);
518 CHECK_NE(rt, kNoRegister);
519 CHECK_NE(cond, kNoCondition);
520 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
521 B24 |
522 B23 |
523 L |
524 (static_cast<int32_t>(rn) << kLdExRnShift) |
525 (static_cast<int32_t>(rt) << kLdExRtShift) |
526 B11 | B10 | B9 | B8 | B7 | B4 | B3 | B2 | B1 | B0;
527 Emit(encoding);
528}
529
530
531void Assembler::strex(Register rd,
532 Register rt,
533 Register rn,
534 Condition cond) {
535 CHECK_NE(rn, kNoRegister);
536 CHECK_NE(rd, kNoRegister);
537 CHECK_NE(rt, kNoRegister);
538 CHECK_NE(cond, kNoCondition);
539 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
540 B24 |
541 B23 |
542 (static_cast<int32_t>(rn) << kStrExRnShift) |
543 (static_cast<int32_t>(rd) << kStrExRdShift) |
544 B11 | B10 | B9 | B8 | B7 | B4 |
545 (static_cast<int32_t>(rt) << kStrExRtShift);
546 Emit(encoding);
547}
548
549
550void Assembler::clrex() {
551 int32_t encoding = (kSpecialCondition << kConditionShift) |
552 B26 | B24 | B22 | B21 | B20 | (0xff << 12) | B4 | 0xf;
553 Emit(encoding);
554}
555
556
557void Assembler::nop(Condition cond) {
558 CHECK_NE(cond, kNoCondition);
559 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
560 B25 | B24 | B21 | (0xf << 12);
561 Emit(encoding);
562}
563
564
565void Assembler::vmovsr(SRegister sn, Register rt, Condition cond) {
566 CHECK_NE(sn, kNoSRegister);
567 CHECK_NE(rt, kNoRegister);
568 CHECK_NE(rt, SP);
569 CHECK_NE(rt, PC);
570 CHECK_NE(cond, kNoCondition);
571 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
572 B27 | B26 | B25 |
573 ((static_cast<int32_t>(sn) >> 1)*B16) |
574 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
575 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
576 Emit(encoding);
577}
578
579
580void Assembler::vmovrs(Register rt, SRegister sn, Condition cond) {
581 CHECK_NE(sn, kNoSRegister);
582 CHECK_NE(rt, kNoRegister);
583 CHECK_NE(rt, SP);
584 CHECK_NE(rt, PC);
585 CHECK_NE(cond, kNoCondition);
586 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
587 B27 | B26 | B25 | B20 |
588 ((static_cast<int32_t>(sn) >> 1)*B16) |
589 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
590 ((static_cast<int32_t>(sn) & 1)*B7) | B4;
591 Emit(encoding);
592}
593
594
595void Assembler::vmovsrr(SRegister sm, Register rt, Register rt2,
596 Condition cond) {
597 CHECK_NE(sm, kNoSRegister);
598 CHECK_NE(sm, S31);
599 CHECK_NE(rt, kNoRegister);
600 CHECK_NE(rt, SP);
601 CHECK_NE(rt, PC);
602 CHECK_NE(rt2, kNoRegister);
603 CHECK_NE(rt2, SP);
604 CHECK_NE(rt2, PC);
605 CHECK_NE(cond, kNoCondition);
606 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
607 B27 | B26 | B22 |
608 (static_cast<int32_t>(rt2)*B16) |
609 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
610 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
611 (static_cast<int32_t>(sm) >> 1);
612 Emit(encoding);
613}
614
615
616void Assembler::vmovrrs(Register rt, Register rt2, SRegister sm,
617 Condition cond) {
618 CHECK_NE(sm, kNoSRegister);
619 CHECK_NE(sm, S31);
620 CHECK_NE(rt, kNoRegister);
621 CHECK_NE(rt, SP);
622 CHECK_NE(rt, PC);
623 CHECK_NE(rt2, kNoRegister);
624 CHECK_NE(rt2, SP);
625 CHECK_NE(rt2, PC);
626 CHECK_NE(rt, rt2);
627 CHECK_NE(cond, kNoCondition);
628 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
629 B27 | B26 | B22 | B20 |
630 (static_cast<int32_t>(rt2)*B16) |
631 (static_cast<int32_t>(rt)*B12) | B11 | B9 |
632 ((static_cast<int32_t>(sm) & 1)*B5) | B4 |
633 (static_cast<int32_t>(sm) >> 1);
634 Emit(encoding);
635}
636
637
638void Assembler::vmovdrr(DRegister dm, Register rt, Register rt2,
639 Condition cond) {
640 CHECK_NE(dm, kNoDRegister);
641 CHECK_NE(rt, kNoRegister);
642 CHECK_NE(rt, SP);
643 CHECK_NE(rt, PC);
644 CHECK_NE(rt2, kNoRegister);
645 CHECK_NE(rt2, SP);
646 CHECK_NE(rt2, PC);
647 CHECK_NE(cond, kNoCondition);
648 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
649 B27 | B26 | B22 |
650 (static_cast<int32_t>(rt2)*B16) |
651 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
652 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
653 (static_cast<int32_t>(dm) & 0xf);
654 Emit(encoding);
655}
656
657
658void Assembler::vmovrrd(Register rt, Register rt2, DRegister dm,
659 Condition cond) {
660 CHECK_NE(dm, kNoDRegister);
661 CHECK_NE(rt, kNoRegister);
662 CHECK_NE(rt, SP);
663 CHECK_NE(rt, PC);
664 CHECK_NE(rt2, kNoRegister);
665 CHECK_NE(rt2, SP);
666 CHECK_NE(rt2, PC);
667 CHECK_NE(rt, rt2);
668 CHECK_NE(cond, kNoCondition);
669 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
670 B27 | B26 | B22 | B20 |
671 (static_cast<int32_t>(rt2)*B16) |
672 (static_cast<int32_t>(rt)*B12) | B11 | B9 | B8 |
673 ((static_cast<int32_t>(dm) >> 4)*B5) | B4 |
674 (static_cast<int32_t>(dm) & 0xf);
675 Emit(encoding);
676}
677
678
679void Assembler::vldrs(SRegister sd, Address ad, Condition cond) {
680 CHECK_NE(sd, kNoSRegister);
681 CHECK_NE(cond, kNoCondition);
682 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
683 B27 | B26 | B24 | B20 |
684 ((static_cast<int32_t>(sd) & 1)*B22) |
685 ((static_cast<int32_t>(sd) >> 1)*B12) |
686 B11 | B9 | ad.vencoding();
687 Emit(encoding);
688}
689
690
691void Assembler::vstrs(SRegister sd, Address ad, Condition cond) {
692 CHECK_NE(static_cast<Register>(ad.encoding_ & (0xf << kRnShift)), PC);
693 CHECK_NE(sd, kNoSRegister);
694 CHECK_NE(cond, kNoCondition);
695 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
696 B27 | B26 | B24 |
697 ((static_cast<int32_t>(sd) & 1)*B22) |
698 ((static_cast<int32_t>(sd) >> 1)*B12) |
699 B11 | B9 | ad.vencoding();
700 Emit(encoding);
701}
702
703
704void Assembler::vldrd(DRegister dd, Address ad, Condition cond) {
705 CHECK_NE(dd, kNoDRegister);
706 CHECK_NE(cond, kNoCondition);
707 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
708 B27 | B26 | B24 | B20 |
709 ((static_cast<int32_t>(dd) >> 4)*B22) |
710 ((static_cast<int32_t>(dd) & 0xf)*B12) |
711 B11 | B9 | B8 | ad.vencoding();
712 Emit(encoding);
713}
714
715
716void Assembler::vstrd(DRegister dd, Address ad, Condition cond) {
717 CHECK_NE(static_cast<Register>(ad.encoding_ & (0xf << kRnShift)), PC);
718 CHECK_NE(dd, kNoDRegister);
719 CHECK_NE(cond, kNoCondition);
720 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
721 B27 | B26 | B24 |
722 ((static_cast<int32_t>(dd) >> 4)*B22) |
723 ((static_cast<int32_t>(dd) & 0xf)*B12) |
724 B11 | B9 | B8 | ad.vencoding();
725 Emit(encoding);
726}
727
728
729void Assembler::EmitVFPsss(Condition cond, int32_t opcode,
730 SRegister sd, SRegister sn, SRegister sm) {
731 CHECK_NE(sd, kNoSRegister);
732 CHECK_NE(sn, kNoSRegister);
733 CHECK_NE(sm, kNoSRegister);
734 CHECK_NE(cond, kNoCondition);
735 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
736 B27 | B26 | B25 | B11 | B9 | opcode |
737 ((static_cast<int32_t>(sd) & 1)*B22) |
738 ((static_cast<int32_t>(sn) >> 1)*B16) |
739 ((static_cast<int32_t>(sd) >> 1)*B12) |
740 ((static_cast<int32_t>(sn) & 1)*B7) |
741 ((static_cast<int32_t>(sm) & 1)*B5) |
742 (static_cast<int32_t>(sm) >> 1);
743 Emit(encoding);
744}
745
746
747void Assembler::EmitVFPddd(Condition cond, int32_t opcode,
748 DRegister dd, DRegister dn, DRegister dm) {
749 CHECK_NE(dd, kNoDRegister);
750 CHECK_NE(dn, kNoDRegister);
751 CHECK_NE(dm, kNoDRegister);
752 CHECK_NE(cond, kNoCondition);
753 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
754 B27 | B26 | B25 | B11 | B9 | B8 | opcode |
755 ((static_cast<int32_t>(dd) >> 4)*B22) |
756 ((static_cast<int32_t>(dn) & 0xf)*B16) |
757 ((static_cast<int32_t>(dd) & 0xf)*B12) |
758 ((static_cast<int32_t>(dn) >> 4)*B7) |
759 ((static_cast<int32_t>(dm) >> 4)*B5) |
760 (static_cast<int32_t>(dm) & 0xf);
761 Emit(encoding);
762}
763
764
765void Assembler::vmovs(SRegister sd, SRegister sm, Condition cond) {
766 EmitVFPsss(cond, B23 | B21 | B20 | B6, sd, S0, sm);
767}
768
769
770void Assembler::vmovd(DRegister dd, DRegister dm, Condition cond) {
771 EmitVFPddd(cond, B23 | B21 | B20 | B6, dd, D0, dm);
772}
773
774
775bool Assembler::vmovs(SRegister sd, float s_imm, Condition cond) {
776 uint32_t imm32 = bit_cast<uint32_t, float>(s_imm);
777 if (((imm32 & ((1 << 19) - 1)) == 0) &&
778 ((((imm32 >> 25) & ((1 << 6) - 1)) == (1 << 5)) ||
779 (((imm32 >> 25) & ((1 << 6) - 1)) == ((1 << 5) -1)))) {
780 uint8_t imm8 = ((imm32 >> 31) << 7) | (((imm32 >> 29) & 1) << 6) |
781 ((imm32 >> 19) & ((1 << 6) -1));
782 EmitVFPsss(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | (imm8 & 0xf),
783 sd, S0, S0);
784 return true;
785 }
786 return false;
787}
788
789
790bool Assembler::vmovd(DRegister dd, double d_imm, Condition cond) {
791 uint64_t imm64 = bit_cast<uint64_t, double>(d_imm);
792 if (((imm64 & ((1LL << 48) - 1)) == 0) &&
793 ((((imm64 >> 54) & ((1 << 9) - 1)) == (1 << 8)) ||
794 (((imm64 >> 54) & ((1 << 9) - 1)) == ((1 << 8) -1)))) {
795 uint8_t imm8 = ((imm64 >> 63) << 7) | (((imm64 >> 61) & 1) << 6) |
796 ((imm64 >> 48) & ((1 << 6) -1));
797 EmitVFPddd(cond, B23 | B21 | B20 | ((imm8 >> 4)*B16) | B8 | (imm8 & 0xf),
798 dd, D0, D0);
799 return true;
800 }
801 return false;
802}
803
804
805void Assembler::vadds(SRegister sd, SRegister sn, SRegister sm,
806 Condition cond) {
807 EmitVFPsss(cond, B21 | B20, sd, sn, sm);
808}
809
810
811void Assembler::vaddd(DRegister dd, DRegister dn, DRegister dm,
812 Condition cond) {
813 EmitVFPddd(cond, B21 | B20, dd, dn, dm);
814}
815
816
817void Assembler::vsubs(SRegister sd, SRegister sn, SRegister sm,
818 Condition cond) {
819 EmitVFPsss(cond, B21 | B20 | B6, sd, sn, sm);
820}
821
822
823void Assembler::vsubd(DRegister dd, DRegister dn, DRegister dm,
824 Condition cond) {
825 EmitVFPddd(cond, B21 | B20 | B6, dd, dn, dm);
826}
827
828
829void Assembler::vmuls(SRegister sd, SRegister sn, SRegister sm,
830 Condition cond) {
831 EmitVFPsss(cond, B21, sd, sn, sm);
832}
833
834
835void Assembler::vmuld(DRegister dd, DRegister dn, DRegister dm,
836 Condition cond) {
837 EmitVFPddd(cond, B21, dd, dn, dm);
838}
839
840
841void Assembler::vmlas(SRegister sd, SRegister sn, SRegister sm,
842 Condition cond) {
843 EmitVFPsss(cond, 0, sd, sn, sm);
844}
845
846
847void Assembler::vmlad(DRegister dd, DRegister dn, DRegister dm,
848 Condition cond) {
849 EmitVFPddd(cond, 0, dd, dn, dm);
850}
851
852
853void Assembler::vmlss(SRegister sd, SRegister sn, SRegister sm,
854 Condition cond) {
855 EmitVFPsss(cond, B6, sd, sn, sm);
856}
857
858
859void Assembler::vmlsd(DRegister dd, DRegister dn, DRegister dm,
860 Condition cond) {
861 EmitVFPddd(cond, B6, dd, dn, dm);
862}
863
864
865void Assembler::vdivs(SRegister sd, SRegister sn, SRegister sm,
866 Condition cond) {
867 EmitVFPsss(cond, B23, sd, sn, sm);
868}
869
870
871void Assembler::vdivd(DRegister dd, DRegister dn, DRegister dm,
872 Condition cond) {
873 EmitVFPddd(cond, B23, dd, dn, dm);
874}
875
876
877void Assembler::vabss(SRegister sd, SRegister sm, Condition cond) {
878 EmitVFPsss(cond, B23 | B21 | B20 | B7 | B6, sd, S0, sm);
879}
880
881
882void Assembler::vabsd(DRegister dd, DRegister dm, Condition cond) {
883 EmitVFPddd(cond, B23 | B21 | B20 | B7 | B6, dd, D0, dm);
884}
885
886
887void Assembler::vnegs(SRegister sd, SRegister sm, Condition cond) {
888 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B6, sd, S0, sm);
889}
890
891
892void Assembler::vnegd(DRegister dd, DRegister dm, Condition cond) {
893 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B6, dd, D0, dm);
894}
895
896
897void Assembler::vsqrts(SRegister sd, SRegister sm, Condition cond) {
898 EmitVFPsss(cond, B23 | B21 | B20 | B16 | B7 | B6, sd, S0, sm);
899}
900
901void Assembler::vsqrtd(DRegister dd, DRegister dm, Condition cond) {
902 EmitVFPddd(cond, B23 | B21 | B20 | B16 | B7 | B6, dd, D0, dm);
903}
904
905
906void Assembler::EmitVFPsd(Condition cond, int32_t opcode,
907 SRegister sd, DRegister dm) {
908 CHECK_NE(sd, kNoSRegister);
909 CHECK_NE(dm, kNoDRegister);
910 CHECK_NE(cond, kNoCondition);
911 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
912 B27 | B26 | B25 | B11 | B9 | opcode |
913 ((static_cast<int32_t>(sd) & 1)*B22) |
914 ((static_cast<int32_t>(sd) >> 1)*B12) |
915 ((static_cast<int32_t>(dm) >> 4)*B5) |
916 (static_cast<int32_t>(dm) & 0xf);
917 Emit(encoding);
918}
919
920
921void Assembler::EmitVFPds(Condition cond, int32_t opcode,
922 DRegister dd, SRegister sm) {
923 CHECK_NE(dd, kNoDRegister);
924 CHECK_NE(sm, kNoSRegister);
925 CHECK_NE(cond, kNoCondition);
926 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
927 B27 | B26 | B25 | B11 | B9 | opcode |
928 ((static_cast<int32_t>(dd) >> 4)*B22) |
929 ((static_cast<int32_t>(dd) & 0xf)*B12) |
930 ((static_cast<int32_t>(sm) & 1)*B5) |
931 (static_cast<int32_t>(sm) >> 1);
932 Emit(encoding);
933}
934
935
936void Assembler::vcvtsd(SRegister sd, DRegister dm, Condition cond) {
937 EmitVFPsd(cond, B23 | B21 | B20 | B18 | B17 | B16 | B8 | B7 | B6, sd, dm);
938}
939
940
941void Assembler::vcvtds(DRegister dd, SRegister sm, Condition cond) {
942 EmitVFPds(cond, B23 | B21 | B20 | B18 | B17 | B16 | B7 | B6, dd, sm);
943}
944
945
946void Assembler::vcvtis(SRegister sd, SRegister sm, Condition cond) {
947 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B16 | B7 | B6, sd, S0, sm);
948}
949
950
951void Assembler::vcvtid(SRegister sd, DRegister dm, Condition cond) {
952 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B16 | B8 | B7 | B6, sd, dm);
953}
954
955
956void Assembler::vcvtsi(SRegister sd, SRegister sm, Condition cond) {
957 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B7 | B6, sd, S0, sm);
958}
959
960
961void Assembler::vcvtdi(DRegister dd, SRegister sm, Condition cond) {
962 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B7 | B6, dd, sm);
963}
964
965
966void Assembler::vcvtus(SRegister sd, SRegister sm, Condition cond) {
967 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B18 | B7 | B6, sd, S0, sm);
968}
969
970
971void Assembler::vcvtud(SRegister sd, DRegister dm, Condition cond) {
972 EmitVFPsd(cond, B23 | B21 | B20 | B19 | B18 | B8 | B7 | B6, sd, dm);
973}
974
975
976void Assembler::vcvtsu(SRegister sd, SRegister sm, Condition cond) {
977 EmitVFPsss(cond, B23 | B21 | B20 | B19 | B6, sd, S0, sm);
978}
979
980
981void Assembler::vcvtdu(DRegister dd, SRegister sm, Condition cond) {
982 EmitVFPds(cond, B23 | B21 | B20 | B19 | B8 | B6, dd, sm);
983}
984
985
986void Assembler::vcmps(SRegister sd, SRegister sm, Condition cond) {
987 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B6, sd, S0, sm);
988}
989
990
991void Assembler::vcmpd(DRegister dd, DRegister dm, Condition cond) {
992 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B6, dd, D0, dm);
993}
994
995
996void Assembler::vcmpsz(SRegister sd, Condition cond) {
997 EmitVFPsss(cond, B23 | B21 | B20 | B18 | B16 | B6, sd, S0, S0);
998}
999
1000
1001void Assembler::vcmpdz(DRegister dd, Condition cond) {
1002 EmitVFPddd(cond, B23 | B21 | B20 | B18 | B16 | B6, dd, D0, D0);
1003}
1004
1005
1006void Assembler::vmstat(Condition cond) { // VMRS APSR_nzcv, FPSCR
1007 CHECK_NE(cond, kNoCondition);
1008 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
1009 B27 | B26 | B25 | B23 | B22 | B21 | B20 | B16 |
1010 (static_cast<int32_t>(PC)*B12) |
1011 B11 | B9 | B4;
1012 Emit(encoding);
1013}
1014
1015
1016void Assembler::svc(uint32_t imm24) {
1017 CHECK(IsUint(24, imm24));
1018 int32_t encoding = (AL << kConditionShift) | B27 | B26 | B25 | B24 | imm24;
1019 Emit(encoding);
1020}
1021
1022
1023void Assembler::bkpt(uint16_t imm16) {
1024 int32_t encoding = (AL << kConditionShift) | B24 | B21 |
1025 ((imm16 >> 4) << 8) | B6 | B5 | B4 | (imm16 & 0xf);
1026 Emit(encoding);
1027}
1028
1029
1030void Assembler::b(Label* label, Condition cond) {
1031 EmitBranch(cond, label, false);
1032}
1033
1034
1035void Assembler::bl(Label* label, Condition cond) {
1036 EmitBranch(cond, label, true);
1037}
1038
1039
1040void Assembler::blx(Register rm, Condition cond) {
1041 CHECK_NE(rm, kNoRegister);
1042 CHECK_NE(cond, kNoCondition);
1043 int32_t encoding = (static_cast<int32_t>(cond) << kConditionShift) |
1044 B24 | B21 | (0xfff << 8) | B5 | B4 |
1045 (static_cast<int32_t>(rm) << kRmShift);
1046 Emit(encoding);
1047}
1048
1049
1050void Assembler::MarkExceptionHandler(Label* label) {
1051 EmitType01(AL, 1, TST, 1, PC, R0, ShifterOperand(0));
1052 Label l;
1053 b(&l);
1054 EmitBranch(AL, label, false);
1055 Bind(&l);
1056}
1057
1058
1059void Assembler::Bind(Label* label) {
1060 CHECK(!label->IsBound());
1061 int bound_pc = buffer_.Size();
1062 while (label->IsLinked()) {
1063 int32_t position = label->Position();
1064 int32_t next = buffer_.Load<int32_t>(position);
1065 int32_t encoded = Assembler::EncodeBranchOffset(bound_pc - position, next);
1066 buffer_.Store<int32_t>(position, encoded);
1067 label->position_ = Assembler::DecodeBranchOffset(next);
1068 }
1069 label->BindTo(bound_pc);
1070}
1071
1072
1073void Assembler::EncodeUint32InTstInstructions(uint32_t data) {
1074 // TODO: Consider using movw ip, <16 bits>.
1075 while (!IsUint(8, data)) {
1076 tst(R0, ShifterOperand(data & 0xFF), VS);
1077 data >>= 8;
1078 }
1079 tst(R0, ShifterOperand(data), MI);
1080}
1081
Ian Rogersb033c752011-07-20 12:22:35 -07001082
Carl Shapiroa2e18e12011-06-21 18:57:55 -07001083int32_t Assembler::EncodeBranchOffset(int offset, int32_t inst) {
1084 // The offset is off by 8 due to the way the ARM CPUs read PC.
1085 offset -= 8;
1086 CHECK(IsAligned(offset, 4));
1087 CHECK(IsInt(CountOneBits(kBranchOffsetMask), offset));
1088
1089 // Properly preserve only the bits supported in the instruction.
1090 offset >>= 2;
1091 offset &= kBranchOffsetMask;
1092 return (inst & ~kBranchOffsetMask) | offset;
1093}
1094
1095
1096int Assembler::DecodeBranchOffset(int32_t inst) {
1097 // Sign-extend, left-shift by 2, then add 8.
1098 return ((((inst & kBranchOffsetMask) << 8) >> 6) + 8);
1099}
1100
Ian Rogersb033c752011-07-20 12:22:35 -07001101void Assembler::AddConstant(Register rd, int32_t value, Condition cond) {
1102 AddConstant(rd, rd, value, cond);
1103}
1104
1105
1106void Assembler::AddConstant(Register rd, Register rn, int32_t value,
1107 Condition cond) {
1108 if (value == 0) {
1109 if (rd != rn) {
1110 mov(rd, ShifterOperand(rn), cond);
1111 }
1112 return;
1113 }
1114 // We prefer to select the shorter code sequence rather than selecting add for
1115 // positive values and sub for negatives ones, which would slightly improve
1116 // the readability of generated code for some constants.
1117 ShifterOperand shifter_op;
1118 if (ShifterOperand::CanHold(value, &shifter_op)) {
1119 add(rd, rn, shifter_op, cond);
1120 } else if (ShifterOperand::CanHold(-value, &shifter_op)) {
1121 sub(rd, rn, shifter_op, cond);
1122 } else {
1123 CHECK(rn != IP);
1124 if (ShifterOperand::CanHold(~value, &shifter_op)) {
1125 mvn(IP, shifter_op, cond);
1126 add(rd, rn, ShifterOperand(IP), cond);
1127 } else if (ShifterOperand::CanHold(~(-value), &shifter_op)) {
1128 mvn(IP, shifter_op, cond);
1129 sub(rd, rn, ShifterOperand(IP), cond);
1130 } else {
1131 movw(IP, Low16Bits(value), cond);
1132 uint16_t value_high = High16Bits(value);
1133 if (value_high != 0) {
1134 movt(IP, value_high, cond);
1135 }
1136 add(rd, rn, ShifterOperand(IP), cond);
1137 }
1138 }
1139}
1140
1141
1142void Assembler::AddConstantSetFlags(Register rd, Register rn, int32_t value,
1143 Condition cond) {
1144 ShifterOperand shifter_op;
1145 if (ShifterOperand::CanHold(value, &shifter_op)) {
1146 adds(rd, rn, shifter_op, cond);
1147 } else if (ShifterOperand::CanHold(-value, &shifter_op)) {
1148 subs(rd, rn, shifter_op, cond);
1149 } else {
1150 CHECK(rn != IP);
1151 if (ShifterOperand::CanHold(~value, &shifter_op)) {
1152 mvn(IP, shifter_op, cond);
1153 adds(rd, rn, ShifterOperand(IP), cond);
1154 } else if (ShifterOperand::CanHold(~(-value), &shifter_op)) {
1155 mvn(IP, shifter_op, cond);
1156 subs(rd, rn, ShifterOperand(IP), cond);
1157 } else {
1158 movw(IP, Low16Bits(value), cond);
1159 uint16_t value_high = High16Bits(value);
1160 if (value_high != 0) {
1161 movt(IP, value_high, cond);
1162 }
1163 adds(rd, rn, ShifterOperand(IP), cond);
1164 }
1165 }
1166}
1167
1168
1169void Assembler::LoadImmediate(Register rd, int32_t value, Condition cond) {
1170 ShifterOperand shifter_op;
1171 if (ShifterOperand::CanHold(value, &shifter_op)) {
1172 mov(rd, shifter_op, cond);
1173 } else if (ShifterOperand::CanHold(~value, &shifter_op)) {
1174 mvn(rd, shifter_op, cond);
1175 } else {
1176 movw(rd, Low16Bits(value), cond);
1177 uint16_t value_high = High16Bits(value);
1178 if (value_high != 0) {
1179 movt(rd, value_high, cond);
1180 }
1181 }
1182}
1183
1184
1185bool Address::CanHoldLoadOffset(LoadOperandType type, int offset) {
1186 switch (type) {
1187 case kLoadSignedByte:
1188 case kLoadSignedHalfword:
1189 case kLoadUnsignedHalfword:
1190 case kLoadWordPair:
1191 return IsAbsoluteUint(8, offset); // Addressing mode 3.
1192 case kLoadUnsignedByte:
1193 case kLoadWord:
1194 return IsAbsoluteUint(12, offset); // Addressing mode 2.
1195 case kLoadSWord:
1196 case kLoadDWord:
1197 return IsAbsoluteUint(10, offset); // VFP addressing mode.
1198 default:
1199 LOG(FATAL) << "UNREACHABLE";
1200 return false;
1201 }
1202}
1203
1204
1205bool Address::CanHoldStoreOffset(StoreOperandType type, int offset) {
1206 switch (type) {
1207 case kStoreHalfword:
1208 case kStoreWordPair:
1209 return IsAbsoluteUint(8, offset); // Addressing mode 3.
1210 case kStoreByte:
1211 case kStoreWord:
1212 return IsAbsoluteUint(12, offset); // Addressing mode 2.
1213 case kStoreSWord:
1214 case kStoreDWord:
1215 return IsAbsoluteUint(10, offset); // VFP addressing mode.
1216 default:
1217 LOG(FATAL) << "UNREACHABLE";
1218 return false;
1219 }
1220}
1221
1222
1223// Implementation note: this method must emit at most one instruction when
1224// Address::CanHoldLoadOffset.
1225void Assembler::LoadFromOffset(LoadOperandType type,
1226 Register reg,
1227 Register base,
1228 int32_t offset,
1229 Condition cond) {
1230 if (!Address::CanHoldLoadOffset(type, offset)) {
1231 CHECK(base != IP);
1232 LoadImmediate(IP, offset, cond);
1233 add(IP, IP, ShifterOperand(base), cond);
1234 base = IP;
1235 offset = 0;
1236 }
1237 CHECK(Address::CanHoldLoadOffset(type, offset));
1238 switch (type) {
1239 case kLoadSignedByte:
1240 ldrsb(reg, Address(base, offset), cond);
1241 break;
1242 case kLoadUnsignedByte:
1243 ldrb(reg, Address(base, offset), cond);
1244 break;
1245 case kLoadSignedHalfword:
1246 ldrsh(reg, Address(base, offset), cond);
1247 break;
1248 case kLoadUnsignedHalfword:
1249 ldrh(reg, Address(base, offset), cond);
1250 break;
1251 case kLoadWord:
1252 ldr(reg, Address(base, offset), cond);
1253 break;
1254 case kLoadWordPair:
1255 ldrd(reg, Address(base, offset), cond);
1256 break;
1257 default:
1258 LOG(FATAL) << "UNREACHABLE";
1259 }
1260}
1261
Carl Shapiroe2d373e2011-07-25 15:20:06 -07001262// Implementation note: this method must emit at most one instruction when
1263// Address::CanHoldLoadOffset, as expected by JIT::GuardedLoadFromOffset.
1264void Assembler::LoadSFromOffset(SRegister reg,
1265 Register base,
1266 int32_t offset,
1267 Condition cond) {
1268 if (!Address::CanHoldLoadOffset(kLoadSWord, offset)) {
1269 CHECK_NE(base, IP);
1270 LoadImmediate(IP, offset, cond);
1271 add(IP, IP, ShifterOperand(base), cond);
1272 base = IP;
1273 offset = 0;
1274 }
1275 CHECK(Address::CanHoldLoadOffset(kLoadSWord, offset));
1276 vldrs(reg, Address(base, offset), cond);
1277}
1278
1279// Implementation note: this method must emit at most one instruction when
1280// Address::CanHoldLoadOffset, as expected by JIT::GuardedLoadFromOffset.
1281void Assembler::LoadDFromOffset(DRegister reg,
1282 Register base,
1283 int32_t offset,
1284 Condition cond) {
1285 if (!Address::CanHoldLoadOffset(kLoadDWord, offset)) {
1286 CHECK_NE(base, IP);
1287 LoadImmediate(IP, offset, cond);
1288 add(IP, IP, ShifterOperand(base), cond);
1289 base = IP;
1290 offset = 0;
1291 }
1292 CHECK(Address::CanHoldLoadOffset(kLoadDWord, offset));
1293 vldrd(reg, Address(base, offset), cond);
1294}
Ian Rogersb033c752011-07-20 12:22:35 -07001295
1296// Implementation note: this method must emit at most one instruction when
1297// Address::CanHoldStoreOffset.
1298void Assembler::StoreToOffset(StoreOperandType type,
1299 Register reg,
1300 Register base,
1301 int32_t offset,
1302 Condition cond) {
1303 if (!Address::CanHoldStoreOffset(type, offset)) {
1304 CHECK(reg != IP);
1305 CHECK(base != IP);
1306 LoadImmediate(IP, offset, cond);
1307 add(IP, IP, ShifterOperand(base), cond);
1308 base = IP;
1309 offset = 0;
1310 }
1311 CHECK(Address::CanHoldStoreOffset(type, offset));
1312 switch (type) {
1313 case kStoreByte:
1314 strb(reg, Address(base, offset), cond);
1315 break;
1316 case kStoreHalfword:
1317 strh(reg, Address(base, offset), cond);
1318 break;
1319 case kStoreWord:
1320 str(reg, Address(base, offset), cond);
1321 break;
1322 case kStoreWordPair:
1323 strd(reg, Address(base, offset), cond);
1324 break;
1325 default:
1326 LOG(FATAL) << "UNREACHABLE";
1327 }
1328}
1329
Carl Shapiroe2d373e2011-07-25 15:20:06 -07001330// Implementation note: this method must emit at most one instruction when
1331// Address::CanHoldStoreOffset, as expected by JIT::GuardedStoreToOffset.
1332void Assembler::StoreSToOffset(SRegister reg,
1333 Register base,
1334 int32_t offset,
1335 Condition cond) {
1336 if (!Address::CanHoldStoreOffset(kStoreSWord, offset)) {
1337 CHECK_NE(base, IP);
1338 LoadImmediate(IP, offset, cond);
1339 add(IP, IP, ShifterOperand(base), cond);
1340 base = IP;
1341 offset = 0;
1342 }
1343 CHECK(Address::CanHoldStoreOffset(kStoreSWord, offset));
1344 vstrs(reg, Address(base, offset), cond);
1345}
1346
1347// Implementation note: this method must emit at most one instruction when
1348// Address::CanHoldStoreOffset, as expected by JIT::GuardedStoreSToOffset.
1349void Assembler::StoreDToOffset(DRegister reg,
1350 Register base,
1351 int32_t offset,
1352 Condition cond) {
1353 if (!Address::CanHoldStoreOffset(kStoreDWord, offset)) {
1354 CHECK_NE(base, IP);
1355 LoadImmediate(IP, offset, cond);
1356 add(IP, IP, ShifterOperand(base), cond);
1357 base = IP;
1358 offset = 0;
1359 }
1360 CHECK(Address::CanHoldStoreOffset(kStoreDWord, offset));
1361 vstrd(reg, Address(base, offset), cond);
1362}
1363
Carl Shapiro9b9ba282011-08-14 15:30:39 -07001364void Assembler::Push(Register rd, Condition cond) {
1365 str(rd, Address(SP, -kRegisterSize, Address::PreIndex), cond);
1366}
1367
1368void Assembler::Pop(Register rd, Condition cond) {
1369 ldr(rd, Address(SP, kRegisterSize, Address::PostIndex), cond);
1370}
1371
1372void Assembler::PushList(RegList regs, Condition cond) {
1373 stm(DB_W, SP, regs, cond);
1374}
1375
1376void Assembler::PopList(RegList regs, Condition cond) {
1377 ldm(IA_W, SP, regs, cond);
1378}
1379
1380void Assembler::Mov(Register rd, Register rm, Condition cond) {
1381 if (rd != rm) {
1382 mov(rd, ShifterOperand(rm), cond);
1383 }
1384}
1385
1386void Assembler::Lsl(Register rd, Register rm, uint32_t shift_imm,
1387 Condition cond) {
1388 CHECK_NE(shift_imm, 0u); // Do not use Lsl if no shift is wanted.
1389 mov(rd, ShifterOperand(rm, LSL, shift_imm), cond);
1390}
1391
1392void Assembler::Lsr(Register rd, Register rm, uint32_t shift_imm,
1393 Condition cond) {
1394 CHECK_NE(shift_imm, 0u); // Do not use Lsr if no shift is wanted.
1395 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
1396 mov(rd, ShifterOperand(rm, LSR, shift_imm), cond);
1397}
1398
1399void Assembler::Asr(Register rd, Register rm, uint32_t shift_imm,
1400 Condition cond) {
1401 CHECK_NE(shift_imm, 0u); // Do not use Asr if no shift is wanted.
1402 if (shift_imm == 32) shift_imm = 0; // Comply to UAL syntax.
1403 mov(rd, ShifterOperand(rm, ASR, shift_imm), cond);
1404}
1405
1406void Assembler::Ror(Register rd, Register rm, uint32_t shift_imm,
1407 Condition cond) {
1408 CHECK_NE(shift_imm, 0u); // Use Rrx instruction.
1409 mov(rd, ShifterOperand(rm, ROR, shift_imm), cond);
1410}
1411
1412void Assembler::Rrx(Register rd, Register rm, Condition cond) {
1413 mov(rd, ShifterOperand(rm, ROR, 0), cond);
1414}
1415
Ian Rogersb033c752011-07-20 12:22:35 -07001416// Emit code that will create an activation on the stack
1417void Assembler::BuildFrame(size_t frame_size, ManagedRegister method_reg) {
1418 CHECK(IsAligned(frame_size, 16));
1419 // TODO: use stm/ldm
Ian Rogersb033c752011-07-20 12:22:35 -07001420 AddConstant(SP, -frame_size);
Carl Shapiroe2d373e2011-07-25 15:20:06 -07001421 StoreToOffset(kStoreWord, LR, SP, frame_size - 4);
1422 StoreToOffset(kStoreWord, method_reg.AsCoreRegister(), SP, 0);
Ian Rogersb033c752011-07-20 12:22:35 -07001423}
1424
1425// Emit code that will remove an activation from the stack
1426void Assembler::RemoveFrame(size_t frame_size) {
1427 CHECK(IsAligned(frame_size, 16));
Carl Shapiroe2d373e2011-07-25 15:20:06 -07001428 LoadFromOffset(kLoadWord, LR, SP, frame_size - 4);
Ian Rogersb033c752011-07-20 12:22:35 -07001429 AddConstant(SP, frame_size);
1430 mov(PC, ShifterOperand(LR));
1431}
1432
1433void Assembler::IncreaseFrameSize(size_t adjust) {
1434 CHECK(IsAligned(adjust, 16));
1435 AddConstant(SP, -adjust);
1436}
1437
1438void Assembler::DecreaseFrameSize(size_t adjust) {
1439 CHECK(IsAligned(adjust, 16));
1440 AddConstant(SP, adjust);
1441}
1442
1443// Store bytes from the given register onto the stack
1444void Assembler::Store(FrameOffset dest, ManagedRegister src, size_t size) {
Carl Shapiroe2d373e2011-07-25 15:20:06 -07001445 if (src.IsNoRegister()) {
1446 CHECK_EQ(0u, size);
1447 } else if (src.IsCoreRegister()) {
Ian Rogersb033c752011-07-20 12:22:35 -07001448 CHECK_EQ(4u, size);
1449 StoreToOffset(kStoreWord, src.AsCoreRegister(), SP, dest.Int32Value());
Carl Shapiroe2d373e2011-07-25 15:20:06 -07001450 } else if (src.IsRegisterPair()) {
1451 CHECK_EQ(8u, size);
1452 StoreToOffset(kStoreWord, src.AsRegisterPairLow(), SP, dest.Int32Value());
1453 StoreToOffset(kStoreWord, src.AsRegisterPairHigh(),
1454 SP, dest.Int32Value() + 4);
1455 } else if (src.IsSRegister()) {
1456 StoreSToOffset(src.AsSRegister(), SP, dest.Int32Value());
Ian Rogersb033c752011-07-20 12:22:35 -07001457 } else {
Carl Shapiroe2d373e2011-07-25 15:20:06 -07001458 CHECK(src.IsDRegister());
1459 StoreDToOffset(src.AsDRegister(), SP, dest.Int32Value());
Ian Rogersb033c752011-07-20 12:22:35 -07001460 }
1461}
1462
1463void Assembler::StoreRef(FrameOffset dest, ManagedRegister src) {
1464 CHECK(src.IsCoreRegister());
1465 StoreToOffset(kStoreWord, src.AsCoreRegister(), SP, dest.Int32Value());
1466}
1467
Ian Rogersdf20fe02011-07-20 20:34:16 -07001468void Assembler::StoreRawPtr(FrameOffset dest, ManagedRegister src) {
1469 CHECK(src.IsCoreRegister());
1470 StoreToOffset(kStoreWord, src.AsCoreRegister(), SP, dest.Int32Value());
1471}
1472
Ian Rogersb033c752011-07-20 12:22:35 -07001473void Assembler::CopyRef(FrameOffset dest, FrameOffset src,
1474 ManagedRegister scratch) {
1475 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(), SP, src.Int32Value());
1476 StoreToOffset(kStoreWord, scratch.AsCoreRegister(), SP, dest.Int32Value());
1477}
1478
1479void Assembler::LoadRef(ManagedRegister dest, ManagedRegister base,
1480 MemberOffset offs) {
1481 CHECK(dest.IsCoreRegister() && dest.IsCoreRegister());
1482 LoadFromOffset(kLoadWord, dest.AsCoreRegister(),
1483 base.AsCoreRegister(), offs.Int32Value());
1484}
1485
1486void Assembler::StoreImmediateToFrame(FrameOffset dest, uint32_t imm,
1487 ManagedRegister scratch) {
1488 CHECK(scratch.IsCoreRegister());
1489 LoadImmediate(scratch.AsCoreRegister(), imm);
1490 StoreToOffset(kStoreWord, scratch.AsCoreRegister(), SP, dest.Int32Value());
1491}
1492
1493void Assembler::StoreImmediateToThread(ThreadOffset dest, uint32_t imm,
1494 ManagedRegister scratch) {
1495 CHECK(scratch.IsCoreRegister());
1496 LoadImmediate(scratch.AsCoreRegister(), imm);
1497 StoreToOffset(kStoreWord, scratch.AsCoreRegister(), TR, dest.Int32Value());
1498}
1499
1500void Assembler::Load(ManagedRegister dest, FrameOffset src, size_t size) {
Carl Shapiroe2d373e2011-07-25 15:20:06 -07001501 if (dest.IsNoRegister()) {
1502 CHECK_EQ(0u, size);
1503 } else if (dest.IsCoreRegister()) {
Ian Rogersb033c752011-07-20 12:22:35 -07001504 CHECK_EQ(4u, size);
1505 LoadFromOffset(kLoadWord, dest.AsCoreRegister(), SP, src.Int32Value());
Carl Shapiroe2d373e2011-07-25 15:20:06 -07001506 } else if (dest.IsRegisterPair()) {
1507 CHECK_EQ(8u, size);
1508 LoadFromOffset(kLoadWord, dest.AsRegisterPairLow(), SP, src.Int32Value());
1509 LoadFromOffset(kLoadWord, dest.AsRegisterPairHigh(),
1510 SP, src.Int32Value() + 4);
1511 } else if (dest.IsSRegister()) {
1512 LoadSFromOffset(dest.AsSRegister(), SP, src.Int32Value());
Ian Rogersb033c752011-07-20 12:22:35 -07001513 } else {
Carl Shapiroe2d373e2011-07-25 15:20:06 -07001514 CHECK(dest.IsDRegister());
1515 LoadDFromOffset(dest.AsDRegister(), SP, src.Int32Value());
Ian Rogersb033c752011-07-20 12:22:35 -07001516 }
1517}
1518
1519void Assembler::LoadRawPtrFromThread(ManagedRegister dest, ThreadOffset offs) {
1520 CHECK(dest.IsCoreRegister());
1521 LoadFromOffset(kLoadWord, dest.AsCoreRegister(),
1522 TR, offs.Int32Value());
1523}
1524
1525void Assembler::CopyRawPtrFromThread(FrameOffset fr_offs, ThreadOffset thr_offs,
Carl Shapiroe2d373e2011-07-25 15:20:06 -07001526 ManagedRegister scratch) {
Ian Rogersb033c752011-07-20 12:22:35 -07001527 CHECK(scratch.IsCoreRegister());
1528 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(),
1529 TR, thr_offs.Int32Value());
1530 StoreToOffset(kStoreWord, scratch.AsCoreRegister(),
1531 SP, fr_offs.Int32Value());
1532}
1533
1534void Assembler::CopyRawPtrToThread(ThreadOffset thr_offs, FrameOffset fr_offs,
Carl Shapiroe2d373e2011-07-25 15:20:06 -07001535 ManagedRegister scratch) {
Ian Rogersb033c752011-07-20 12:22:35 -07001536 CHECK(scratch.IsCoreRegister());
1537 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(),
1538 SP, fr_offs.Int32Value());
1539 StoreToOffset(kStoreWord, scratch.AsCoreRegister(),
1540 TR, thr_offs.Int32Value());
1541}
1542
1543void Assembler::StoreStackOffsetToThread(ThreadOffset thr_offs,
1544 FrameOffset fr_offs,
1545 ManagedRegister scratch) {
1546 CHECK(scratch.IsCoreRegister());
1547 AddConstant(scratch.AsCoreRegister(), SP, fr_offs.Int32Value(), AL);
1548 StoreToOffset(kStoreWord, scratch.AsCoreRegister(),
1549 TR, thr_offs.Int32Value());
1550}
1551
Ian Rogers45a76cb2011-07-21 22:00:15 -07001552void Assembler::StoreStackPointerToThread(ThreadOffset thr_offs) {
1553 StoreToOffset(kStoreWord, SP, TR, thr_offs.Int32Value());
1554}
1555
Ian Rogersb033c752011-07-20 12:22:35 -07001556void Assembler::Move(ManagedRegister dest, ManagedRegister src) {
Carl Shapiroe2d373e2011-07-25 15:20:06 -07001557 if (!dest.Equals(src)) {
1558 if (dest.IsCoreRegister()) {
1559 CHECK(src.IsCoreRegister());
1560 mov(dest.AsCoreRegister(), ShifterOperand(src.AsCoreRegister()));
1561 } else {
1562 // TODO: VFP
Elliott Hughes53b61312011-08-12 18:28:20 -07001563 UNIMPLEMENTED(FATAL) << ": VFP";
Carl Shapiroe2d373e2011-07-25 15:20:06 -07001564 }
Ian Rogersb033c752011-07-20 12:22:35 -07001565 }
1566}
1567
1568void Assembler::Copy(FrameOffset dest, FrameOffset src, ManagedRegister scratch,
1569 size_t size) {
1570 CHECK(scratch.IsCoreRegister());
Shih-wei Liao5381cf92011-07-27 00:28:04 -07001571 CHECK(size == 4 || size == 8);
Ian Rogersb033c752011-07-20 12:22:35 -07001572 if (size == 4) {
1573 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(),
1574 SP, src.Int32Value());
1575 StoreToOffset(kStoreWord, scratch.AsCoreRegister(),
1576 SP, dest.Int32Value());
Shih-wei Liao5381cf92011-07-27 00:28:04 -07001577 } else if (size == 8) {
1578 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(),
1579 SP, src.Int32Value());
1580 StoreToOffset(kStoreWord, scratch.AsCoreRegister(),
1581 SP, dest.Int32Value());
1582 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(),
1583 SP, src.Int32Value() + 4);
1584 StoreToOffset(kStoreWord, scratch.AsCoreRegister(),
1585 SP, dest.Int32Value() + 4);
Ian Rogersb033c752011-07-20 12:22:35 -07001586 }
1587}
1588
1589void Assembler::CreateStackHandle(ManagedRegister out_reg,
1590 FrameOffset handle_offset,
1591 ManagedRegister in_reg, bool null_allowed) {
Carl Shapiroe2d373e2011-07-25 15:20:06 -07001592 CHECK(in_reg.IsNoRegister() || in_reg.IsCoreRegister());
Ian Rogersb033c752011-07-20 12:22:35 -07001593 CHECK(out_reg.IsCoreRegister());
1594 if (null_allowed) {
1595 // Null values get a handle value of 0. Otherwise, the handle value is
1596 // the address in the stack handle block holding the reference.
1597 // e.g. out_reg = (handle == 0) ? 0 : (SP+handle_offset)
Carl Shapiroe2d373e2011-07-25 15:20:06 -07001598 if (in_reg.IsNoRegister()) {
1599 LoadFromOffset(kLoadWord, out_reg.AsCoreRegister(),
1600 SP, handle_offset.Int32Value());
1601 in_reg = out_reg;
1602 }
Ian Rogersb033c752011-07-20 12:22:35 -07001603 cmp(in_reg.AsCoreRegister(), ShifterOperand(0));
1604 if (!out_reg.Equals(in_reg)) {
1605 LoadImmediate(out_reg.AsCoreRegister(), 0, EQ);
1606 }
1607 AddConstant(out_reg.AsCoreRegister(), SP, handle_offset.Int32Value(), NE);
1608 } else {
1609 AddConstant(out_reg.AsCoreRegister(), SP, handle_offset.Int32Value(), AL);
1610 }
1611}
1612
1613void Assembler::CreateStackHandle(FrameOffset out_off,
1614 FrameOffset handle_offset,
1615 ManagedRegister scratch, bool null_allowed) {
1616 CHECK(scratch.IsCoreRegister());
1617 if (null_allowed) {
1618 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(), SP,
1619 handle_offset.Int32Value());
1620 // Null values get a handle value of 0. Otherwise, the handle value is
1621 // the address in the stack handle block holding the reference.
1622 // e.g. scratch = (handle == 0) ? 0 : (SP+handle_offset)
1623 cmp(scratch.AsCoreRegister(), ShifterOperand(0));
1624 AddConstant(scratch.AsCoreRegister(), SP, handle_offset.Int32Value(), NE);
1625 } else {
1626 AddConstant(scratch.AsCoreRegister(), SP, handle_offset.Int32Value(), AL);
1627 }
1628 StoreToOffset(kStoreWord, scratch.AsCoreRegister(), SP, out_off.Int32Value());
1629}
1630
1631void Assembler::LoadReferenceFromStackHandle(ManagedRegister out_reg,
Ian Rogersdf20fe02011-07-20 20:34:16 -07001632 ManagedRegister in_reg) {
Ian Rogersb033c752011-07-20 12:22:35 -07001633 CHECK(out_reg.IsCoreRegister());
1634 CHECK(in_reg.IsCoreRegister());
1635 Label null_arg;
1636 if (!out_reg.Equals(in_reg)) {
1637 LoadImmediate(out_reg.AsCoreRegister(), 0, EQ);
1638 }
1639 cmp(in_reg.AsCoreRegister(), ShifterOperand(0));
Ian Rogersdf20fe02011-07-20 20:34:16 -07001640 LoadFromOffset(kLoadWord, out_reg.AsCoreRegister(),
1641 in_reg.AsCoreRegister(), 0, NE);
Ian Rogersb033c752011-07-20 12:22:35 -07001642}
1643
1644void Assembler::ValidateRef(ManagedRegister src, bool could_be_null) {
1645 // TODO: not validating references
1646}
1647
1648void Assembler::ValidateRef(FrameOffset src, bool could_be_null) {
1649 // TODO: not validating references
1650}
1651
Ian Rogersdf20fe02011-07-20 20:34:16 -07001652void Assembler::Call(ManagedRegister base, Offset offset,
Ian Rogersb033c752011-07-20 12:22:35 -07001653 ManagedRegister scratch) {
1654 CHECK(base.IsCoreRegister());
1655 CHECK(scratch.IsCoreRegister());
1656 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(),
1657 base.AsCoreRegister(), offset.Int32Value());
1658 blx(scratch.AsCoreRegister());
1659 // TODO: place reference map on call
1660}
1661
Carl Shapiroe2d373e2011-07-25 15:20:06 -07001662void Assembler::Call(FrameOffset base, Offset offset,
1663 ManagedRegister scratch) {
1664 CHECK(scratch.IsCoreRegister());
1665 // Call *(*(SP + base) + offset)
1666 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(),
1667 SP, base.Int32Value());
1668 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(),
1669 scratch.AsCoreRegister(), offset.Int32Value());
1670 blx(scratch.AsCoreRegister());
1671 // TODO: place reference map on call
1672}
1673
Ian Rogers45a76cb2011-07-21 22:00:15 -07001674// Generate code to check if Thread::Current()->suspend_count_ is non-zero
1675// and branch to a SuspendSlowPath if it is. The SuspendSlowPath will continue
1676// at the next instruction.
1677void Assembler::SuspendPoll(ManagedRegister scratch, ManagedRegister return_reg,
1678 FrameOffset return_save_location,
1679 size_t return_size) {
Carl Shapiroe2d373e2011-07-25 15:20:06 -07001680 SuspendCountSlowPath* slow = new SuspendCountSlowPath(return_reg,
1681 return_save_location,
1682 return_size);
1683 buffer_.EnqueueSlowPath(slow);
1684 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(),
1685 TR, Thread::SuspendCountOffset().Int32Value());
1686 cmp(scratch.AsCoreRegister(), ShifterOperand(0));
1687 b(slow->Entry(), NE);
1688 Bind(slow->Continuation());
1689}
1690
1691void SuspendCountSlowPath::Emit(Assembler* sp_asm) {
1692 sp_asm->Bind(&entry_);
1693 // Save return value
1694 sp_asm->Store(return_save_location_, return_register_, return_size_);
1695 // Pass top of stack as argument
1696 sp_asm->mov(R0, ShifterOperand(SP));
1697 sp_asm->LoadFromOffset(kLoadWord, R12, TR,
1698 Thread::SuspendCountEntryPointOffset().Int32Value());
1699 // Note: assume that link register will be spilled/filled on method entry/exit
1700 sp_asm->blx(R12);
1701 // Reload return value
1702 sp_asm->Load(return_register_, return_save_location_, return_size_);
1703 sp_asm->b(&continuation_);
Ian Rogers45a76cb2011-07-21 22:00:15 -07001704}
1705
1706// Generate code to check if Thread::Current()->exception_ is non-null
1707// and branch to a ExceptionSlowPath if it is.
1708void Assembler::ExceptionPoll(ManagedRegister scratch) {
Carl Shapiroe2d373e2011-07-25 15:20:06 -07001709 ExceptionSlowPath* slow = new ExceptionSlowPath();
1710 buffer_.EnqueueSlowPath(slow);
1711 LoadFromOffset(kLoadWord, scratch.AsCoreRegister(),
1712 TR, Thread::ExceptionOffset().Int32Value());
1713 cmp(scratch.AsCoreRegister(), ShifterOperand(0));
1714 b(slow->Entry(), NE);
1715 Bind(slow->Continuation());
1716}
1717
1718void ExceptionSlowPath::Emit(Assembler* sp_asm) {
1719 sp_asm->Bind(&entry_);
1720 // Pass top of stack as argument
1721 sp_asm->mov(R0, ShifterOperand(SP));
1722 sp_asm->LoadFromOffset(kLoadWord, R12, TR,
1723 Thread::ExceptionEntryPointOffset().Int32Value());
1724 // Note: assume that link register will be spilled/filled on method entry/exit
1725 sp_asm->blx(R12);
1726 // TODO: this call should never return as it should make a long jump to
1727 // the appropriate catch block
1728 sp_asm->b(&continuation_);
Ian Rogers45a76cb2011-07-21 22:00:15 -07001729}
1730
Carl Shapiro6b6b5f02011-06-21 15:05:09 -07001731} // namespace art