blob: d211b6fed82c31eeb9876790476b98b5f73e5f96 [file] [log] [blame]
Carl Shapiroa5d5cfd2011-06-21 12:46:59 -07001// Copyright 2011 Google Inc. All Rights Reserved.
2
3#ifndef ART_SRC_ASSEMBLER_H_
4#define ART_SRC_ASSEMBLER_H_
5
6#include "src/logging.h"
7#include "src/macros.h"
8#include "src/memory_region.h"
9
10namespace android {
11namespace runtime {
12
13class Assembler;
14class AssemblerBuffer;
15class AssemblerFixup;
16
17
18class Label {
19 public:
20 Label() : position_(0) {}
21
22 ~Label() {
23 // Assert if label is being destroyed with unresolved branches pending.
24 CHECK(!IsLinked());
25 }
26
27 // Returns the position for bound and linked labels. Cannot be used
28 // for unused labels.
29 int Position() const {
30 CHECK(!IsUnused());
31 return IsBound() ? -position_ - kPointerSize : position_ - kPointerSize;
32 }
33
34 int LinkPosition() const {
35 CHECK(IsLinked());
36 return position_ - kWordSize;
37 }
38
39 bool IsBound() const { return position_ < 0; }
40 bool IsUnused() const { return position_ == 0; }
41 bool IsLinked() const { return position_ > 0; }
42
43 private:
44 int position_;
45
46 void Reinitialize() {
47 position_ = 0;
48 }
49
50 void BindTo(int position) {
51 CHECK(!IsBound());
52 position_ = -position - kPointerSize;
53 CHECK(IsBound());
54 }
55
56 void LinkTo(int position) {
57 CHECK(!IsBound());
58 position_ = position + kPointerSize;
59 CHECK(IsLinked());
60 }
61
62 friend class Assembler;
63 DISALLOW_COPY_AND_ASSIGN(Label);
64};
65
66
67// Assembler fixups are positions in generated code that require processing
68// after the code has been copied to executable memory. This includes building
69// relocation information.
70class AssemblerFixup {
71 public:
72 virtual void Process(const MemoryRegion& region, int position) = 0;
73 virtual ~AssemblerFixup() {}
74
75 private:
76 AssemblerFixup* previous_;
77 int position_;
78
79 AssemblerFixup* previous() const { return previous_; }
80 void set_previous(AssemblerFixup* previous) { previous_ = previous; }
81
82 int position() const { return position_; }
83 void set_position(int position) { position_ = position; }
84
85 friend class AssemblerBuffer;
86};
87
88
89class AssemblerBuffer {
90 public:
91 AssemblerBuffer();
92 ~AssemblerBuffer();
93
94 // Basic support for emitting, loading, and storing.
95 template<typename T> void Emit(T value) {
96 CHECK(HasEnsuredCapacity());
97 *reinterpret_cast<T*>(cursor_) = value;
98 cursor_ += sizeof(T);
99 }
100
101 template<typename T> T Load(size_t position) {
102 CHECK_LE(position, Size() - static_cast<int>(sizeof(T)));
103 return *reinterpret_cast<T*>(contents_ + position);
104 }
105
106 template<typename T> void Store(size_t position, T value) {
107 CHECK_LE(position, Size() - static_cast<int>(sizeof(T)));
108 *reinterpret_cast<T*>(contents_ + position) = value;
109 }
110
111 // Emit a fixup at the current location.
112 void EmitFixup(AssemblerFixup* fixup) {
113 fixup->set_previous(fixup_);
114 fixup->set_position(Size());
115 fixup_ = fixup;
116 }
117
118 // Get the size of the emitted code.
119 size_t Size() const {
120 CHECK_GE(cursor_, contents_);
121 return cursor_ - contents_;
122 }
123
124 byte* contents() const { return contents_; }
125
126 // Copy the assembled instructions into the specified memory block
127 // and apply all fixups.
128 void FinalizeInstructions(const MemoryRegion& region);
129
130 // To emit an instruction to the assembler buffer, the EnsureCapacity helper
131 // must be used to guarantee that the underlying data area is big enough to
132 // hold the emitted instruction. Usage:
133 //
134 // AssemblerBuffer buffer;
135 // AssemblerBuffer::EnsureCapacity ensured(&buffer);
136 // ... emit bytes for single instruction ...
137
138#ifdef DEBUG
139
140 class EnsureCapacity {
141 public:
142 explicit EnsureCapacity(AssemblerBuffer* buffer) {
143 if (buffer->cursor() >= buffer->limit()) buffer->ExtendCapacity();
144 // In debug mode, we save the assembler buffer along with the gap
145 // size before we start emitting to the buffer. This allows us to
146 // check that any single generated instruction doesn't overflow the
147 // limit implied by the minimum gap size.
148 buffer_ = buffer;
149 gap_ = ComputeGap();
150 // Make sure that extending the capacity leaves a big enough gap
151 // for any kind of instruction.
152 CHECK_GE(gap_, kMinimumGap);
153 // Mark the buffer as having ensured the capacity.
154 CHECK(!buffer->HasEnsuredCapacity()); // Cannot nest.
155 buffer->has_ensured_capacity_ = true;
156 }
157
158 ~EnsureCapacity() {
159 // Unmark the buffer, so we cannot emit after this.
160 buffer_->has_ensured_capacity_ = false;
161 // Make sure the generated instruction doesn't take up more
162 // space than the minimum gap.
163 int delta = gap_ - ComputeGap();
164 CHECK(delta <= kMinimumGap);
165 }
166
167 private:
168 AssemblerBuffer* buffer_;
169 int gap_;
170
171 int ComputeGap() { return buffer_->Capacity() - buffer_->Size(); }
172 };
173
174 bool has_ensured_capacity_;
175 bool HasEnsuredCapacity() const { return has_ensured_capacity_; }
176
177#else
178
179 class EnsureCapacity {
180 public:
181 explicit EnsureCapacity(AssemblerBuffer* buffer) {
182 if (buffer->cursor() >= buffer->limit()) buffer->ExtendCapacity();
183 }
184 };
185
186 // When building the C++ tests, assertion code is enabled. To allow
187 // asserting that the user of the assembler buffer has ensured the
188 // capacity needed for emitting, we add a dummy method in non-debug mode.
189 bool HasEnsuredCapacity() const { return true; }
190
191#endif
192
193 // Returns the position in the instruction stream.
194 int GetPosition() { return cursor_ - contents_; }
195
196 private:
197 // The limit is set to kMinimumGap bytes before the end of the data area.
198 // This leaves enough space for the longest possible instruction and allows
199 // for a single, fast space check per instruction.
200 static const int kMinimumGap = 32;
201
202 byte* contents_;
203 byte* cursor_;
204 byte* limit_;
205 AssemblerFixup* fixup_;
206 bool fixups_processed_;
207
208 byte* cursor() const { return cursor_; }
209 byte* limit() const { return limit_; }
210 size_t Capacity() const {
211 CHECK_GE(limit_, contents_);
212 return (limit_ - contents_) + kMinimumGap;
213 }
214
215 // Process the fixup chain starting at the given fixup. The offset is
216 // non-zero for fixups in the body if the preamble is non-empty.
217 void ProcessFixups(const MemoryRegion& region);
218
219 // Compute the limit based on the data area and the capacity. See
220 // description of kMinimumGap for the reasoning behind the value.
221 static byte* ComputeLimit(byte* data, size_t capacity) {
222 return data + capacity - kMinimumGap;
223 }
224
225 void ExtendCapacity();
226
227 friend class AssemblerFixup;
228};
229
230} } // namespace android::runtime
231
232#if defined(__i386__)
233#include "src/assembler_x86.h"
234#elif defined(__arm__)
235#include "src/assembler_arm.h"
236#endif
237
238#endif // ART_SRC_ASSEMBLER_H_