David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2018 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #ifndef ART_LIBARTBASE_BASE_BIT_TABLE_H_ |
| 18 | #define ART_LIBARTBASE_BASE_BIT_TABLE_H_ |
| 19 | |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 20 | #include <array> |
| 21 | #include <numeric> |
| 22 | #include <string.h> |
| 23 | #include <type_traits> |
| 24 | #include <unordered_map> |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 25 | |
| 26 | #include "base/bit_memory_region.h" |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 27 | #include "base/casts.h" |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 28 | #include "base/memory_region.h" |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 29 | #include "base/scoped_arena_containers.h" |
| 30 | #include "base/stl_util.h" |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 31 | |
| 32 | namespace art { |
| 33 | |
| 34 | constexpr uint32_t kVarintHeaderBits = 4; |
| 35 | constexpr uint32_t kVarintSmallValue = 11; // Maximum value which is stored as-is. |
| 36 | |
| 37 | // Load variable-length bit-packed integer from `data` starting at `bit_offset`. |
| 38 | // The first four bits determine the variable length of the encoded integer: |
| 39 | // Values 0..11 represent the result as-is, with no further following bits. |
| 40 | // Values 12..15 mean the result is in the next 8/16/24/32-bits respectively. |
| 41 | ALWAYS_INLINE static inline uint32_t DecodeVarintBits(BitMemoryRegion region, size_t* bit_offset) { |
| 42 | uint32_t x = region.LoadBitsAndAdvance(bit_offset, kVarintHeaderBits); |
| 43 | if (x > kVarintSmallValue) { |
| 44 | x = region.LoadBitsAndAdvance(bit_offset, (x - kVarintSmallValue) * kBitsPerByte); |
| 45 | } |
| 46 | return x; |
| 47 | } |
| 48 | |
| 49 | // Store variable-length bit-packed integer from `data` starting at `bit_offset`. |
| 50 | template<typename Vector> |
| 51 | ALWAYS_INLINE static inline void EncodeVarintBits(Vector* out, size_t* bit_offset, uint32_t value) { |
| 52 | if (value <= kVarintSmallValue) { |
| 53 | out->resize(BitsToBytesRoundUp(*bit_offset + kVarintHeaderBits)); |
| 54 | BitMemoryRegion region(MemoryRegion(out->data(), out->size())); |
| 55 | region.StoreBitsAndAdvance(bit_offset, value, kVarintHeaderBits); |
| 56 | } else { |
| 57 | uint32_t num_bits = RoundUp(MinimumBitsToStore(value), kBitsPerByte); |
| 58 | out->resize(BitsToBytesRoundUp(*bit_offset + kVarintHeaderBits + num_bits)); |
| 59 | BitMemoryRegion region(MemoryRegion(out->data(), out->size())); |
| 60 | uint32_t header = kVarintSmallValue + num_bits / kBitsPerByte; |
| 61 | region.StoreBitsAndAdvance(bit_offset, header, kVarintHeaderBits); |
| 62 | region.StoreBitsAndAdvance(bit_offset, value, num_bits); |
| 63 | } |
| 64 | } |
| 65 | |
| 66 | template<uint32_t kNumColumns> |
| 67 | class BitTable { |
| 68 | public: |
| 69 | class Accessor { |
| 70 | public: |
| 71 | static constexpr uint32_t kNoValue = std::numeric_limits<uint32_t>::max(); |
| 72 | |
| 73 | Accessor(const BitTable* table, uint32_t row) : table_(table), row_(row) {} |
| 74 | |
| 75 | ALWAYS_INLINE uint32_t Row() const { return row_; } |
| 76 | |
| 77 | ALWAYS_INLINE bool IsValid() const { return table_ != nullptr && row_ < table_->NumRows(); } |
| 78 | |
| 79 | template<uint32_t Column> |
| 80 | ALWAYS_INLINE uint32_t Get() const { |
| 81 | static_assert(Column < kNumColumns, "Column out of bounds"); |
| 82 | return table_->Get(row_, Column); |
| 83 | } |
| 84 | |
| 85 | ALWAYS_INLINE bool Equals(const Accessor& other) { |
| 86 | return this->table_ == other.table_ && this->row_ == other.row_; |
| 87 | } |
| 88 | |
| 89 | Accessor& operator++() { |
| 90 | row_++; |
| 91 | return *this; |
| 92 | } |
| 93 | |
| 94 | protected: |
| 95 | const BitTable* table_; |
| 96 | uint32_t row_; |
| 97 | }; |
| 98 | |
| 99 | static constexpr uint32_t kValueBias = -1; |
| 100 | |
| 101 | BitTable() {} |
| 102 | BitTable(void* data, size_t size, size_t* bit_offset = 0) { |
| 103 | Decode(BitMemoryRegion(MemoryRegion(data, size)), bit_offset); |
| 104 | } |
| 105 | |
| 106 | ALWAYS_INLINE void Decode(BitMemoryRegion region, size_t* bit_offset) { |
| 107 | // Decode row count and column sizes from the table header. |
| 108 | num_rows_ = DecodeVarintBits(region, bit_offset); |
| 109 | if (num_rows_ != 0) { |
| 110 | column_offset_[0] = 0; |
| 111 | for (uint32_t i = 0; i < kNumColumns; i++) { |
| 112 | size_t column_end = column_offset_[i] + DecodeVarintBits(region, bit_offset); |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 113 | column_offset_[i + 1] = dchecked_integral_cast<uint16_t>(column_end); |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 114 | } |
| 115 | } |
| 116 | |
| 117 | // Record the region which contains the table data and skip past it. |
| 118 | table_data_ = region.Subregion(*bit_offset, num_rows_ * NumRowBits()); |
| 119 | *bit_offset += table_data_.size_in_bits(); |
| 120 | } |
| 121 | |
| 122 | ALWAYS_INLINE uint32_t Get(uint32_t row, uint32_t column = 0) const { |
| 123 | DCHECK_LT(row, num_rows_); |
| 124 | DCHECK_LT(column, kNumColumns); |
| 125 | size_t offset = row * NumRowBits() + column_offset_[column]; |
| 126 | return table_data_.LoadBits(offset, NumColumnBits(column)) + kValueBias; |
| 127 | } |
| 128 | |
David Srbecky | 5513c2b | 2018-05-24 15:03:20 +0100 | [diff] [blame^] | 129 | ALWAYS_INLINE BitMemoryRegion GetBitMemoryRegion(uint32_t row, uint32_t column = 0) const { |
| 130 | DCHECK_LT(row, num_rows_); |
| 131 | DCHECK_LT(column, kNumColumns); |
| 132 | size_t offset = row * NumRowBits() + column_offset_[column]; |
| 133 | return table_data_.Subregion(offset, NumColumnBits(column)); |
| 134 | } |
| 135 | |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 136 | size_t NumRows() const { return num_rows_; } |
| 137 | |
| 138 | uint32_t NumRowBits() const { return column_offset_[kNumColumns]; } |
| 139 | |
| 140 | constexpr size_t NumColumns() const { return kNumColumns; } |
| 141 | |
| 142 | uint32_t NumColumnBits(uint32_t column) const { |
| 143 | return column_offset_[column + 1] - column_offset_[column]; |
| 144 | } |
| 145 | |
| 146 | size_t DataBitSize() const { return num_rows_ * column_offset_[kNumColumns]; } |
| 147 | |
| 148 | protected: |
| 149 | BitMemoryRegion table_data_; |
| 150 | size_t num_rows_ = 0; |
| 151 | |
| 152 | uint16_t column_offset_[kNumColumns + 1] = {}; |
| 153 | }; |
| 154 | |
| 155 | template<uint32_t kNumColumns> |
| 156 | constexpr uint32_t BitTable<kNumColumns>::Accessor::kNoValue; |
| 157 | |
| 158 | template<uint32_t kNumColumns> |
| 159 | constexpr uint32_t BitTable<kNumColumns>::kValueBias; |
| 160 | |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 161 | // Helper class for encoding BitTable. It can optionally de-duplicate the inputs. |
| 162 | // Type 'T' must be POD type consisting of uint32_t fields (one for each column). |
| 163 | template<typename T> |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 164 | class BitTableBuilder { |
| 165 | public: |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 166 | static_assert(std::is_pod<T>::value, "Type 'T' must be POD"); |
| 167 | static constexpr size_t kNumColumns = sizeof(T) / sizeof(uint32_t); |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 168 | |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 169 | explicit BitTableBuilder(ScopedArenaAllocator* allocator) |
David Srbecky | 159c9dd | 2018-05-24 14:56:51 +0100 | [diff] [blame] | 170 | : rows_(allocator->Adapter(kArenaAllocBitTableBuilder)), |
| 171 | dedup_(8, allocator->Adapter(kArenaAllocBitTableBuilder)) { |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 172 | } |
| 173 | |
| 174 | T& operator[](size_t row) { return rows_[row]; } |
| 175 | const T& operator[](size_t row) const { return rows_[row]; } |
| 176 | size_t size() const { return rows_.size(); } |
| 177 | |
David Srbecky | 159c9dd | 2018-05-24 14:56:51 +0100 | [diff] [blame] | 178 | // Append given value to the vector without de-duplication. |
| 179 | // This will not add the element to the dedup map to avoid its associated costs. |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 180 | void Add(T value) { |
| 181 | rows_.push_back(value); |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 182 | } |
| 183 | |
David Srbecky | 159c9dd | 2018-05-24 14:56:51 +0100 | [diff] [blame] | 184 | // Append given list of values and return the index of the first value. |
| 185 | // If the exact same set of values was already added, return the old index. |
| 186 | uint32_t Dedup(T* values, size_t count = 1) { |
| 187 | FNVHash<MemoryRegion> hasher; |
| 188 | uint32_t hash = hasher(MemoryRegion(values, sizeof(T) * count)); |
| 189 | |
| 190 | // Check if we have already added identical set of values. |
| 191 | auto range = dedup_.equal_range(hash); |
| 192 | for (auto it = range.first; it != range.second; ++it) { |
| 193 | uint32_t index = it->second; |
| 194 | if (count <= size() - index && |
| 195 | std::equal(values, |
| 196 | values + count, |
| 197 | &rows_[index], |
| 198 | [](const T& lhs, const T& rhs) { |
| 199 | return memcmp(&lhs, &rhs, sizeof(T)) == 0; |
| 200 | })) { |
| 201 | return index; |
| 202 | } |
| 203 | } |
| 204 | |
| 205 | // Add the set of values and add the index to the dedup map. |
| 206 | uint32_t index = size(); |
| 207 | rows_.insert(rows_.end(), values, values + count); |
| 208 | dedup_.emplace(hash, index); |
| 209 | return index; |
| 210 | } |
| 211 | |
| 212 | // Check if the table already contains given values starting at the given index. |
| 213 | bool RangeEquals(uint32_t index, T* values, size_t count = 1) { |
| 214 | DCHECK_LE(index, size()); |
| 215 | DCHECK_LE(count, size() - index); |
| 216 | for (uint32_t i = 0; i < count; i++) { |
| 217 | if (memcmp(&values[i], &rows_[index + i], sizeof(T)) != 0) { |
| 218 | return false; |
| 219 | } |
| 220 | } |
| 221 | return true; |
| 222 | } |
| 223 | |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 224 | ALWAYS_INLINE uint32_t Get(uint32_t row, uint32_t column) const { |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 225 | DCHECK_LT(row, size()); |
| 226 | DCHECK_LT(column, kNumColumns); |
| 227 | const uint32_t* data = reinterpret_cast<const uint32_t*>(&rows_[row]); |
| 228 | return data[column]; |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 229 | } |
| 230 | |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 231 | // Calculate the column bit widths based on the current data. |
| 232 | void Measure(/*out*/ std::array<uint32_t, kNumColumns>* column_bits) const { |
| 233 | uint32_t max_column_value[kNumColumns]; |
| 234 | std::fill_n(max_column_value, kNumColumns, 0); |
| 235 | for (uint32_t r = 0; r < size(); r++) { |
| 236 | for (uint32_t c = 0; c < kNumColumns; c++) { |
| 237 | max_column_value[c] |= Get(r, c) - BitTable<kNumColumns>::kValueBias; |
| 238 | } |
| 239 | } |
| 240 | for (uint32_t c = 0; c < kNumColumns; c++) { |
| 241 | (*column_bits)[c] = MinimumBitsToStore(max_column_value[c]); |
| 242 | } |
| 243 | } |
| 244 | |
| 245 | // Encode the stored data into a BitTable. |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 246 | template<typename Vector> |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 247 | void Encode(Vector* out, size_t* bit_offset) const { |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 248 | constexpr uint32_t bias = BitTable<kNumColumns>::kValueBias; |
| 249 | size_t initial_bit_offset = *bit_offset; |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 250 | |
| 251 | std::array<uint32_t, kNumColumns> column_bits; |
| 252 | Measure(&column_bits); |
| 253 | EncodeVarintBits(out, bit_offset, size()); |
| 254 | if (size() != 0) { |
| 255 | // Write table header. |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 256 | for (uint32_t c = 0; c < kNumColumns; c++) { |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 257 | EncodeVarintBits(out, bit_offset, column_bits[c]); |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 258 | } |
| 259 | |
| 260 | // Write table data. |
| 261 | uint32_t row_bits = std::accumulate(column_bits.begin(), column_bits.end(), 0u); |
| 262 | out->resize(BitsToBytesRoundUp(*bit_offset + row_bits * size())); |
| 263 | BitMemoryRegion region(MemoryRegion(out->data(), out->size())); |
| 264 | for (uint32_t r = 0; r < size(); r++) { |
| 265 | for (uint32_t c = 0; c < kNumColumns; c++) { |
| 266 | region.StoreBitsAndAdvance(bit_offset, Get(r, c) - bias, column_bits[c]); |
| 267 | } |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 268 | } |
| 269 | } |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 270 | |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 271 | // Verify the written data. |
| 272 | if (kIsDebugBuild) { |
| 273 | BitTable<kNumColumns> table; |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 274 | BitMemoryRegion region(MemoryRegion(out->data(), out->size())); |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 275 | table.Decode(region, &initial_bit_offset); |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 276 | DCHECK_EQ(size(), table.NumRows()); |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 277 | for (uint32_t c = 0; c < kNumColumns; c++) { |
| 278 | DCHECK_EQ(column_bits[c], table.NumColumnBits(c)); |
| 279 | } |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 280 | for (uint32_t r = 0; r < size(); r++) { |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 281 | for (uint32_t c = 0; c < kNumColumns; c++) { |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 282 | DCHECK_EQ(Get(r, c), table.Get(r, c)) << " (" << r << ", " << c << ")"; |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 283 | } |
| 284 | } |
| 285 | } |
| 286 | } |
| 287 | |
| 288 | protected: |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 289 | ScopedArenaDeque<T> rows_; |
David Srbecky | 159c9dd | 2018-05-24 14:56:51 +0100 | [diff] [blame] | 290 | ScopedArenaUnorderedMultimap<uint32_t, uint32_t> dedup_; // Hash -> row index. |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 291 | }; |
| 292 | |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 293 | template<typename T> |
| 294 | constexpr size_t BitTableBuilder<T>::kNumColumns; |
| 295 | |
David Srbecky | 5513c2b | 2018-05-24 15:03:20 +0100 | [diff] [blame^] | 296 | // Helper class for encoding single-column BitTable of bitmaps (allows more than 32 bits). |
| 297 | class BitmapTableBuilder { |
| 298 | public: |
| 299 | explicit BitmapTableBuilder(ScopedArenaAllocator* const allocator) |
| 300 | : allocator_(allocator), |
| 301 | rows_(allocator->Adapter(kArenaAllocBitTableBuilder)), |
| 302 | dedup_(8, allocator_->Adapter(kArenaAllocBitTableBuilder)) { |
| 303 | } |
| 304 | |
| 305 | MemoryRegion operator[](size_t row) { return rows_[row]; } |
| 306 | const MemoryRegion operator[](size_t row) const { return rows_[row]; } |
| 307 | size_t size() const { return rows_.size(); } |
| 308 | |
| 309 | // Add the given bitmap to the table and return its index. |
| 310 | // If the bitmap was already added it will be deduplicated. |
| 311 | // The last bit must be set and any padding bits in the last byte must be zero. |
| 312 | uint32_t Dedup(const void* bitmap, size_t num_bits) { |
| 313 | MemoryRegion region(const_cast<void*>(bitmap), BitsToBytesRoundUp(num_bits)); |
| 314 | DCHECK(num_bits == 0 || BitMemoryRegion(region).LoadBit(num_bits - 1) == 1); |
| 315 | DCHECK_EQ(BitMemoryRegion(region).LoadBits(num_bits, region.size_in_bits() - num_bits), 0u); |
| 316 | FNVHash<MemoryRegion> hasher; |
| 317 | uint32_t hash = hasher(region); |
| 318 | |
| 319 | // Check if we have already added identical bitmap. |
| 320 | auto range = dedup_.equal_range(hash); |
| 321 | for (auto it = range.first; it != range.second; ++it) { |
| 322 | if (MemoryRegion::ContentEquals()(region, rows_[it->second])) { |
| 323 | return it->second; |
| 324 | } |
| 325 | } |
| 326 | |
| 327 | // Add the bitmap and add the index to the dedup map. |
| 328 | uint32_t index = size(); |
| 329 | void* copy = allocator_->Alloc(region.size(), kArenaAllocBitTableBuilder); |
| 330 | memcpy(copy, region.pointer(), region.size()); |
| 331 | rows_.push_back(MemoryRegion(copy, region.size())); |
| 332 | dedup_.emplace(hash, index); |
| 333 | max_num_bits_ = std::max(max_num_bits_, num_bits); |
| 334 | return index; |
| 335 | } |
| 336 | |
| 337 | // Encode the stored data into a BitTable. |
| 338 | template<typename Vector> |
| 339 | void Encode(Vector* out, size_t* bit_offset) const { |
| 340 | size_t initial_bit_offset = *bit_offset; |
| 341 | |
| 342 | EncodeVarintBits(out, bit_offset, size()); |
| 343 | if (size() != 0) { |
| 344 | EncodeVarintBits(out, bit_offset, max_num_bits_); |
| 345 | |
| 346 | // Write table data. |
| 347 | out->resize(BitsToBytesRoundUp(*bit_offset + max_num_bits_ * size())); |
| 348 | BitMemoryRegion region(MemoryRegion(out->data(), out->size())); |
| 349 | for (MemoryRegion row : rows_) { |
| 350 | BitMemoryRegion src(row); |
| 351 | region.StoreBits(*bit_offset, src, std::min(max_num_bits_, src.size_in_bits())); |
| 352 | *bit_offset += max_num_bits_; |
| 353 | } |
| 354 | } |
| 355 | |
| 356 | // Verify the written data. |
| 357 | if (kIsDebugBuild) { |
| 358 | BitTable<1> table; |
| 359 | BitMemoryRegion region(MemoryRegion(out->data(), out->size())); |
| 360 | table.Decode(region, &initial_bit_offset); |
| 361 | DCHECK_EQ(size(), table.NumRows()); |
| 362 | DCHECK_EQ(max_num_bits_, table.NumColumnBits(0)); |
| 363 | for (uint32_t r = 0; r < size(); r++) { |
| 364 | BitMemoryRegion expected(rows_[r]); |
| 365 | BitMemoryRegion seen = table.GetBitMemoryRegion(r); |
| 366 | size_t num_bits = std::max(expected.size_in_bits(), seen.size_in_bits()); |
| 367 | for (size_t b = 0; b < num_bits; b++) { |
| 368 | bool e = b < expected.size_in_bits() && expected.LoadBit(b); |
| 369 | bool s = b < seen.size_in_bits() && seen.LoadBit(b); |
| 370 | DCHECK_EQ(e, s) << " (" << r << ")[" << b << "]"; |
| 371 | } |
| 372 | } |
| 373 | } |
| 374 | } |
| 375 | |
| 376 | private: |
| 377 | ScopedArenaAllocator* const allocator_; |
| 378 | ScopedArenaDeque<MemoryRegion> rows_; |
| 379 | ScopedArenaUnorderedMultimap<uint32_t, uint32_t> dedup_; // Hash -> row index. |
| 380 | size_t max_num_bits_ = 0u; |
| 381 | }; |
| 382 | |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 383 | } // namespace art |
| 384 | |
| 385 | #endif // ART_LIBARTBASE_BASE_BIT_TABLE_H_ |