blob: 855d66de8b1b98ed5c597471241b340c0f80d7ed [file] [log] [blame]
buzbee2502e002012-12-31 16:05:53 -08001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_COMPILER_DEX_LOCAL_VALUE_NUMBERING_H_
18#define ART_COMPILER_DEX_LOCAL_VALUE_NUMBERING_H_
buzbee2502e002012-12-31 16:05:53 -080019
Ian Rogers700a4022014-05-19 16:49:03 -070020#include <memory>
21
buzbee2502e002012-12-31 16:05:53 -080022#include "compiler_internals.h"
Vladimir Marko95a05972014-05-30 10:01:32 +010023#include "global_value_numbering.h"
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000024#include "utils/scoped_arena_allocator.h"
Vladimir Marko69f08ba2014-04-11 12:28:11 +010025#include "utils/scoped_arena_containers.h"
buzbee2502e002012-12-31 16:05:53 -080026
buzbee2502e002012-12-31 16:05:53 -080027namespace art {
28
Vladimir Markof59f18b2014-02-17 15:53:57 +000029class DexFile;
Vladimir Marko95a05972014-05-30 10:01:32 +010030
31// Enable/disable tracking values stored in the FILLED_NEW_ARRAY result.
32static constexpr bool kLocalValueNumberingEnableFilledNewArrayTracking = true;
buzbee2502e002012-12-31 16:05:53 -080033
buzbee311ca162013-02-28 15:56:43 -080034class LocalValueNumbering {
Vladimir Marko95a05972014-05-30 10:01:32 +010035 private:
36 static constexpr uint16_t kNoValue = GlobalValueNumbering::kNoValue;
37
Vladimir Marko2ac01fc2014-05-22 12:09:08 +010038 public:
Vladimir Markob19955d2014-07-29 12:04:10 +010039 LocalValueNumbering(GlobalValueNumbering* gvn, BasicBlockId id, ScopedArenaAllocator* allocator);
Vladimir Marko95a05972014-05-30 10:01:32 +010040
41 BasicBlockId Id() const {
42 return id_;
43 }
44
45 bool Equals(const LocalValueNumbering& other) const;
46
Vladimir Markob19955d2014-07-29 12:04:10 +010047 uint16_t GetSRegValueName(uint16_t s_reg) const {
48 return GetOperandValue(s_reg);
49 }
50
51 void SetValueNameNullChecked(uint16_t value_name) {
Vladimir Marko95a05972014-05-30 10:01:32 +010052 null_checked_.insert(value_name);
53 }
54
55 bool IsValueNullChecked(uint16_t value_name) const {
56 return null_checked_.find(value_name) != null_checked_.end();
57 }
58
59 bool IsSregValue(uint16_t s_reg, uint16_t value_name) const {
60 auto it = sreg_value_map_.find(s_reg);
61 if (it != sreg_value_map_.end()) {
62 return it->second == value_name;
63 } else {
64 return gvn_->HasValue(kNoValue, s_reg, kNoValue, kNoValue, value_name);
65 }
66 }
67
68 enum MergeType {
69 kNormalMerge,
70 kCatchMerge,
71 kReturnMerge, // RETURN or PHI+RETURN. Merge only sreg maps.
72 };
73
74 void MergeOne(const LocalValueNumbering& other, MergeType merge_type);
75 void Merge(MergeType merge_type); // Merge gvn_->merge_lvns_.
Vladimir Marko2ac01fc2014-05-22 12:09:08 +010076
77 uint16_t GetValueNumber(MIR* mir);
78
79 // LocalValueNumbering should be allocated on the ArenaStack (or the native stack).
80 static void* operator new(size_t size, ScopedArenaAllocator* allocator) {
Vladimir Markob19955d2014-07-29 12:04:10 +010081 return allocator->Alloc(sizeof(LocalValueNumbering), kArenaAllocMisc);
Vladimir Marko2ac01fc2014-05-22 12:09:08 +010082 }
83
84 // Allow delete-expression to destroy a LocalValueNumbering object without deallocation.
85 static void operator delete(void* ptr) { UNUSED(ptr); }
86
Vladimir Markof59f18b2014-02-17 15:53:57 +000087 private:
Vladimir Marko95a05972014-05-30 10:01:32 +010088 // A set of value names.
89 typedef GlobalValueNumbering::ValueNameSet ValueNameSet;
Vladimir Marko2ac01fc2014-05-22 12:09:08 +010090
Vladimir Markof59f18b2014-02-17 15:53:57 +000091 // Field types correspond to the ordering of GET/PUT instructions; this order is the same
92 // for IGET, IPUT, SGET, SPUT, AGET and APUT:
93 // op 0
94 // op_WIDE 1
95 // op_OBJECT 2
96 // op_BOOLEAN 3
97 // op_BYTE 4
98 // op_CHAR 5
99 // op_SHORT 6
100 static constexpr size_t kFieldTypeCount = 7;
101
Vladimir Marko95a05972014-05-30 10:01:32 +0100102 // Key is s_reg, value is value name.
103 typedef ScopedArenaSafeMap<uint16_t, uint16_t> SregValueMap;
Vladimir Markof59f18b2014-02-17 15:53:57 +0000104
Vladimir Marko95a05972014-05-30 10:01:32 +0100105 void SetOperandValueImpl(uint16_t s_reg, uint16_t value, SregValueMap* map) {
106 DCHECK_EQ(map->count(s_reg), 0u) << PrettyMethod(gvn_->cu_->method_idx, *gvn_->cu_->dex_file)
107 << " LVN id: " << id_ << ", s_reg: " << s_reg;
108 map->Put(s_reg, value);
109 }
110
111 uint16_t GetOperandValueImpl(int s_reg, const SregValueMap* map) const {
112 uint16_t res = kNoValue;
113 auto lb = map->find(s_reg);
114 if (lb != map->end()) {
115 res = lb->second;
116 } else {
117 // Using the original value; s_reg refers to an input reg.
118 res = gvn_->LookupValue(kNoValue, s_reg, kNoValue, kNoValue);
Vladimir Markof59f18b2014-02-17 15:53:57 +0000119 }
Vladimir Marko95a05972014-05-30 10:01:32 +0100120 return res;
121 }
122
123 void SetOperandValue(uint16_t s_reg, uint16_t value) {
124 SetOperandValueImpl(s_reg, value, &sreg_value_map_);
Vladimir Markof59f18b2014-02-17 15:53:57 +0000125 };
126
Vladimir Marko95a05972014-05-30 10:01:32 +0100127 uint16_t GetOperandValue(int s_reg) const {
128 return GetOperandValueImpl(s_reg, &sreg_value_map_);
129 };
130
131 void SetOperandValueWide(uint16_t s_reg, uint16_t value) {
132 SetOperandValueImpl(s_reg, value, &sreg_wide_value_map_);
133 };
134
135 uint16_t GetOperandValueWide(int s_reg) const {
136 return GetOperandValueImpl(s_reg, &sreg_wide_value_map_);
137 };
Vladimir Marko2ac01fc2014-05-22 12:09:08 +0100138
139 struct RangeCheckKey {
140 uint16_t array;
141 uint16_t index;
Vladimir Marko95a05972014-05-30 10:01:32 +0100142
143 // NOTE: Can't define this at namespace scope for a private struct.
144 bool operator==(const RangeCheckKey& other) const {
145 return array == other.array && index == other.index;
146 }
Vladimir Marko2ac01fc2014-05-22 12:09:08 +0100147 };
148
149 struct RangeCheckKeyComparator {
150 bool operator()(const RangeCheckKey& lhs, const RangeCheckKey& rhs) const {
151 if (lhs.array != rhs.array) {
152 return lhs.array < rhs.array;
153 }
154 return lhs.index < rhs.index;
155 }
156 };
157
158 typedef ScopedArenaSet<RangeCheckKey, RangeCheckKeyComparator> RangeCheckSet;
159
Vladimir Marko95a05972014-05-30 10:01:32 +0100160 // Maps instance field "location" (derived from base, field_id and type) to value name.
161 typedef ScopedArenaSafeMap<uint16_t, uint16_t> IFieldLocToValueMap;
Vladimir Marko2ac01fc2014-05-22 12:09:08 +0100162
Vladimir Marko95a05972014-05-30 10:01:32 +0100163 // Maps static field id to value name
164 typedef ScopedArenaSafeMap<uint16_t, uint16_t> SFieldToValueMap;
165
166 struct EscapedIFieldClobberKey {
167 uint16_t base; // Or array.
Vladimir Markof59f18b2014-02-17 15:53:57 +0000168 uint16_t type;
Vladimir Marko95a05972014-05-30 10:01:32 +0100169 uint16_t field_id; // None (kNoValue) for arrays and unresolved instance field stores.
170
171 // NOTE: Can't define this at namespace scope for a private struct.
172 bool operator==(const EscapedIFieldClobberKey& other) const {
173 return base == other.base && type == other.type && field_id == other.field_id;
174 }
Vladimir Markof59f18b2014-02-17 15:53:57 +0000175 };
176
Vladimir Marko95a05972014-05-30 10:01:32 +0100177 struct EscapedIFieldClobberKeyComparator {
178 bool operator()(const EscapedIFieldClobberKey& lhs, const EscapedIFieldClobberKey& rhs) const {
179 // Compare base first. This makes sequential iteration respect the order of base.
180 if (lhs.base != rhs.base) {
181 return lhs.base < rhs.base;
182 }
183 // Compare type second. This makes the type-clobber entries (field_id == kNoValue) last
184 // for given base and type and makes it easy to prune unnecessary entries when merging
185 // escaped_ifield_clobber_set_ from multiple LVNs.
Vladimir Marko2ac01fc2014-05-22 12:09:08 +0100186 if (lhs.type != rhs.type) {
187 return lhs.type < rhs.type;
Vladimir Markof59f18b2014-02-17 15:53:57 +0000188 }
Vladimir Marko95a05972014-05-30 10:01:32 +0100189 return lhs.field_id < rhs.field_id;
Vladimir Markof59f18b2014-02-17 15:53:57 +0000190 }
191 };
192
Vladimir Marko95a05972014-05-30 10:01:32 +0100193 typedef ScopedArenaSet<EscapedIFieldClobberKey, EscapedIFieldClobberKeyComparator>
194 EscapedIFieldClobberSet;
Vladimir Marko2ac01fc2014-05-22 12:09:08 +0100195
Vladimir Marko95a05972014-05-30 10:01:32 +0100196 struct EscapedArrayClobberKey {
Vladimir Marko2ac01fc2014-05-22 12:09:08 +0100197 uint16_t base;
198 uint16_t type;
Vladimir Marko95a05972014-05-30 10:01:32 +0100199
200 // NOTE: Can't define this at namespace scope for a private struct.
201 bool operator==(const EscapedArrayClobberKey& other) const {
202 return base == other.base && type == other.type;
203 }
Vladimir Marko2ac01fc2014-05-22 12:09:08 +0100204 };
205
Vladimir Marko95a05972014-05-30 10:01:32 +0100206 struct EscapedArrayClobberKeyComparator {
207 bool operator()(const EscapedArrayClobberKey& lhs, const EscapedArrayClobberKey& rhs) const {
208 // Compare base first. This makes sequential iteration respect the order of base.
209 if (lhs.base != rhs.base) {
210 return lhs.base < rhs.base;
Vladimir Marko2ac01fc2014-05-22 12:09:08 +0100211 }
Vladimir Marko95a05972014-05-30 10:01:32 +0100212 return lhs.type < rhs.type;
Vladimir Marko2ac01fc2014-05-22 12:09:08 +0100213 }
214 };
215
Vladimir Marko95a05972014-05-30 10:01:32 +0100216 // Clobber set for previously non-aliasing array refs that escaped.
217 typedef ScopedArenaSet<EscapedArrayClobberKey, EscapedArrayClobberKeyComparator>
218 EscapedArrayClobberSet;
Vladimir Marko2ac01fc2014-05-22 12:09:08 +0100219
Vladimir Marko95a05972014-05-30 10:01:32 +0100220 // Known location values for an aliasing set. The set can be tied to one of:
221 // 1. Instance field. The locations are aliasing references used to access the field.
222 // 2. Non-aliasing array reference. The locations are indexes to the array.
223 // 3. Aliasing array type. The locations are (reference, index) pair ids assigned by GVN.
224 // In each case we keep track of the last stored value, if any, and the set of locations
225 // where it was stored. We also keep track of all values known for the current write state
226 // (load_value_map), which can be known either because they have been loaded since the last
227 // store or because they contained the last_stored_value before the store and thus could not
228 // have changed as a result.
229 struct AliasingValues {
Vladimir Markob19955d2014-07-29 12:04:10 +0100230 explicit AliasingValues(LocalValueNumbering* lvn)
Vladimir Marko95a05972014-05-30 10:01:32 +0100231 : memory_version_before_stores(kNoValue),
232 last_stored_value(kNoValue),
Vladimir Markob19955d2014-07-29 12:04:10 +0100233 store_loc_set(std::less<uint16_t>(), lvn->null_checked_.get_allocator()),
Vladimir Marko95a05972014-05-30 10:01:32 +0100234 last_load_memory_version(kNoValue),
Vladimir Markob19955d2014-07-29 12:04:10 +0100235 load_value_map(std::less<uint16_t>(), lvn->null_checked_.get_allocator()) {
buzbee2502e002012-12-31 16:05:53 -0800236 }
buzbee2502e002012-12-31 16:05:53 -0800237
Vladimir Marko95a05972014-05-30 10:01:32 +0100238 uint16_t memory_version_before_stores; // kNoValue if start version for the field.
239 uint16_t last_stored_value; // Last stored value name, kNoValue if none.
240 ValueNameSet store_loc_set; // Where was last_stored_value stored.
Vladimir Marko2ac01fc2014-05-22 12:09:08 +0100241
Vladimir Marko95a05972014-05-30 10:01:32 +0100242 // Maps refs (other than stored_to) to currently known values for this field other. On write,
243 // anything that differs from the written value is removed as it may be overwritten.
244 uint16_t last_load_memory_version; // kNoValue if not known.
245 ScopedArenaSafeMap<uint16_t, uint16_t> load_value_map;
Vladimir Marko2ac01fc2014-05-22 12:09:08 +0100246
Vladimir Marko95a05972014-05-30 10:01:32 +0100247 // NOTE: Can't define this at namespace scope for a private struct.
248 bool operator==(const AliasingValues& other) const {
249 return memory_version_before_stores == other.memory_version_before_stores &&
250 last_load_memory_version == other.last_load_memory_version &&
251 last_stored_value == other.last_stored_value &&
252 store_loc_set == other.store_loc_set &&
253 load_value_map == other.load_value_map;
buzbee2502e002012-12-31 16:05:53 -0800254 }
255 };
256
Vladimir Marko95a05972014-05-30 10:01:32 +0100257 // Maps instance field id to AliasingValues, locations are object refs.
258 typedef ScopedArenaSafeMap<uint16_t, AliasingValues> AliasingIFieldValuesMap;
buzbee2502e002012-12-31 16:05:53 -0800259
Vladimir Marko95a05972014-05-30 10:01:32 +0100260 // Maps non-aliasing array reference to AliasingValues, locations are array indexes.
261 typedef ScopedArenaSafeMap<uint16_t, AliasingValues> NonAliasingArrayValuesMap;
buzbee2502e002012-12-31 16:05:53 -0800262
Vladimir Marko95a05972014-05-30 10:01:32 +0100263 // Maps aliasing array type to AliasingValues, locations are (array, index) pair ids.
264 typedef ScopedArenaSafeMap<uint16_t, AliasingValues> AliasingArrayValuesMap;
buzbee2502e002012-12-31 16:05:53 -0800265
Vladimir Marko95a05972014-05-30 10:01:32 +0100266 // Helper classes defining versions for updating and merging the AliasingValues maps above.
267 class AliasingIFieldVersions;
268 class NonAliasingArrayVersions;
269 class AliasingArrayVersions;
270
271 template <typename Map>
272 AliasingValues* GetAliasingValues(Map* map, const typename Map::key_type& key);
273
274 template <typename Versions, typename KeyType>
275 void UpdateAliasingValuesLoadVersion(const KeyType& key, AliasingValues* values);
276
277 template <typename Versions, typename Map>
278 static uint16_t AliasingValuesMergeGet(GlobalValueNumbering* gvn,
279 const LocalValueNumbering* lvn,
280 Map* map, const typename Map::key_type& key,
281 uint16_t location);
282
283 template <typename Versions, typename Map>
284 uint16_t HandleAliasingValuesGet(Map* map, const typename Map::key_type& key,
285 uint16_t location);
286
287 template <typename Versions, typename Map>
288 bool HandleAliasingValuesPut(Map* map, const typename Map::key_type& key,
289 uint16_t location, uint16_t value);
290
Vladimir Markob19955d2014-07-29 12:04:10 +0100291 template <typename K>
292 void CopyAliasingValuesMap(ScopedArenaSafeMap<K, AliasingValues>* dest,
293 const ScopedArenaSafeMap<K, AliasingValues>& src);
294
Vladimir Markof59f18b2014-02-17 15:53:57 +0000295 uint16_t MarkNonAliasingNonNull(MIR* mir);
Vladimir Marko95a05972014-05-30 10:01:32 +0100296 bool IsNonAliasing(uint16_t reg) const;
297 bool IsNonAliasingIField(uint16_t reg, uint16_t field_id, uint16_t type) const;
298 bool IsNonAliasingArray(uint16_t reg, uint16_t type) const;
Vladimir Markof59f18b2014-02-17 15:53:57 +0000299 void HandleNullCheck(MIR* mir, uint16_t reg);
300 void HandleRangeCheck(MIR* mir, uint16_t array, uint16_t index);
301 void HandlePutObject(MIR* mir);
Vladimir Marko2ac01fc2014-05-22 12:09:08 +0100302 void HandleEscapingRef(uint16_t base);
Vladimir Marko95a05972014-05-30 10:01:32 +0100303 uint16_t HandlePhi(MIR* mir);
Vladimir Marko2ac01fc2014-05-22 12:09:08 +0100304 uint16_t HandleAGet(MIR* mir, uint16_t opcode);
305 void HandleAPut(MIR* mir, uint16_t opcode);
306 uint16_t HandleIGet(MIR* mir, uint16_t opcode);
307 void HandleIPut(MIR* mir, uint16_t opcode);
308 uint16_t HandleSGet(MIR* mir, uint16_t opcode);
309 void HandleSPut(MIR* mir, uint16_t opcode);
Vladimir Marko95a05972014-05-30 10:01:32 +0100310 void RemoveSFieldsForType(uint16_t type);
Vladimir Markof418f322014-07-09 14:45:36 +0100311 void HandleInvokeOrClInit(MIR* mir);
Vladimir Markof59f18b2014-02-17 15:53:57 +0000312
Vladimir Marko95a05972014-05-30 10:01:32 +0100313 bool SameMemoryVersion(const LocalValueNumbering& other) const;
Vladimir Marko2ac01fc2014-05-22 12:09:08 +0100314
Vladimir Marko95a05972014-05-30 10:01:32 +0100315 uint16_t NewMemoryVersion(uint16_t* new_version);
316 void MergeMemoryVersions(bool clobbered_catch);
317
318 void PruneNonAliasingRefsForCatch();
319
320 template <typename Set, Set LocalValueNumbering::* set_ptr>
321 void IntersectSets();
322
Vladimir Markob19955d2014-07-29 12:04:10 +0100323 void CopyLiveSregValues(SregValueMap* dest, const SregValueMap& src);
324
Vladimir Marko95a05972014-05-30 10:01:32 +0100325 // Intersect maps as sets. The value type must be equality-comparable.
Vladimir Markob19955d2014-07-29 12:04:10 +0100326 template <SregValueMap LocalValueNumbering::* map_ptr>
327 void IntersectSregValueMaps();
Vladimir Marko95a05972014-05-30 10:01:32 +0100328
329 // Intersect maps as sets. The value type must be equality-comparable.
330 template <typename Map>
331 static void InPlaceIntersectMaps(Map* work_map, const Map& other_map);
332
333 template <typename Set, Set LocalValueNumbering::*set_ptr, void (LocalValueNumbering::*MergeFn)(
334 const typename Set::value_type& entry, typename Set::iterator hint)>
335 void MergeSets();
336
337 void IntersectAliasingValueLocations(AliasingValues* work_values, const AliasingValues* values);
338
339 void MergeEscapedRefs(const ValueNameSet::value_type& entry, ValueNameSet::iterator hint);
340 void MergeEscapedIFieldTypeClobberSets(const EscapedIFieldClobberSet::value_type& entry,
341 EscapedIFieldClobberSet::iterator hint);
342 void MergeEscapedIFieldClobberSets(const EscapedIFieldClobberSet::value_type& entry,
343 EscapedIFieldClobberSet::iterator hint);
344 void MergeEscapedArrayClobberSets(const EscapedArrayClobberSet::value_type& entry,
345 EscapedArrayClobberSet::iterator hint);
346 void MergeNullChecked(const ValueNameSet::value_type& entry, ValueNameSet::iterator hint);
347 void MergeSFieldValues(const SFieldToValueMap::value_type& entry,
348 SFieldToValueMap::iterator hint);
349 void MergeNonAliasingIFieldValues(const IFieldLocToValueMap::value_type& entry,
350 IFieldLocToValueMap::iterator hint);
351
352 template <typename Map, Map LocalValueNumbering::*map_ptr, typename Versions>
353 void MergeAliasingValues(const typename Map::value_type& entry, typename Map::iterator hint);
354
355 GlobalValueNumbering* gvn_;
356
357 // We're using the block id as a 16-bit operand value for some lookups.
358 COMPILE_ASSERT(sizeof(BasicBlockId) == sizeof(uint16_t), BasicBlockId_must_be_16_bit);
359 BasicBlockId id_;
Vladimir Marko2ac01fc2014-05-22 12:09:08 +0100360
buzbee2502e002012-12-31 16:05:53 -0800361 SregValueMap sreg_value_map_;
362 SregValueMap sreg_wide_value_map_;
Vladimir Marko95a05972014-05-30 10:01:32 +0100363
364 SFieldToValueMap sfield_value_map_;
365 IFieldLocToValueMap non_aliasing_ifield_value_map_;
366 AliasingIFieldValuesMap aliasing_ifield_value_map_;
367 NonAliasingArrayValuesMap non_aliasing_array_value_map_;
368 AliasingArrayValuesMap aliasing_array_value_map_;
Vladimir Marko2ac01fc2014-05-22 12:09:08 +0100369
370 // Data for dealing with memory clobbering and store/load aliasing.
Vladimir Markof59f18b2014-02-17 15:53:57 +0000371 uint16_t global_memory_version_;
372 uint16_t unresolved_sfield_version_[kFieldTypeCount];
373 uint16_t unresolved_ifield_version_[kFieldTypeCount];
Vladimir Markof59f18b2014-02-17 15:53:57 +0000374 // Value names of references to objects that cannot be reached through a different value name.
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000375 ValueNameSet non_aliasing_refs_;
Vladimir Marko95a05972014-05-30 10:01:32 +0100376 // Previously non-aliasing refs that escaped but can still be used for non-aliasing AGET/IGET.
377 ValueNameSet escaped_refs_;
378 // Blacklists for cases where escaped_refs_ can't be used.
379 EscapedIFieldClobberSet escaped_ifield_clobber_set_;
380 EscapedArrayClobberSet escaped_array_clobber_set_;
Vladimir Marko2ac01fc2014-05-22 12:09:08 +0100381
382 // Range check and null check elimination.
383 RangeCheckSet range_checked_;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000384 ValueNameSet null_checked_;
385
Vladimir Marko95a05972014-05-30 10:01:32 +0100386 // Reuse one vector for all merges to avoid leaking too much memory on the ArenaStack.
387 ScopedArenaVector<BasicBlockId> merge_names_;
388 // Map to identify when different locations merge the same values.
389 ScopedArenaSafeMap<ScopedArenaVector<BasicBlockId>, uint16_t> merge_map_;
390 // New memory version for merge, kNoValue if all memory versions matched.
391 uint16_t merge_new_memory_version_;
392
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000393 DISALLOW_COPY_AND_ASSIGN(LocalValueNumbering);
buzbee2502e002012-12-31 16:05:53 -0800394};
395
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700396} // namespace art
buzbee2502e002012-12-31 16:05:53 -0800397
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700398#endif // ART_COMPILER_DEX_LOCAL_VALUE_NUMBERING_H_