David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2018 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #ifndef ART_LIBARTBASE_BASE_BIT_TABLE_H_ |
| 18 | #define ART_LIBARTBASE_BASE_BIT_TABLE_H_ |
| 19 | |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 20 | #include <array> |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 21 | #include <initializer_list> |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 22 | #include <numeric> |
| 23 | #include <string.h> |
| 24 | #include <type_traits> |
| 25 | #include <unordered_map> |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 26 | |
| 27 | #include "base/bit_memory_region.h" |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 28 | #include "base/casts.h" |
David Srbecky | 93bd361 | 2018-07-02 19:30:18 +0100 | [diff] [blame] | 29 | #include "base/iteration_range.h" |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 30 | #include "base/memory_region.h" |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 31 | #include "base/scoped_arena_containers.h" |
| 32 | #include "base/stl_util.h" |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 33 | |
| 34 | namespace art { |
| 35 | |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 36 | // Generic purpose table of uint32_t values, which are tightly packed at bit level. |
| 37 | // It has its own header with the number of rows and the bit-widths of all columns. |
| 38 | // The values are accessible by (row, column). The value -1 is stored efficiently. |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 39 | template<uint32_t kNumColumns> |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 40 | class BitTableBase { |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 41 | public: |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 42 | static constexpr uint32_t kNoValue = std::numeric_limits<uint32_t>::max(); // == -1. |
| 43 | static constexpr uint32_t kValueBias = kNoValue; // Bias so that -1 is encoded as 0. |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 44 | |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 45 | BitTableBase() {} |
David Srbecky | 078d7ba | 2018-06-21 15:36:48 +0100 | [diff] [blame] | 46 | explicit BitTableBase(BitMemoryReader& reader) { |
| 47 | Decode(reader); |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 48 | } |
| 49 | |
David Srbecky | 078d7ba | 2018-06-21 15:36:48 +0100 | [diff] [blame] | 50 | ALWAYS_INLINE void Decode(BitMemoryReader& reader) { |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 51 | // Decode row count and column sizes from the table header. |
David Srbecky | 6c4ec5c | 2019-06-20 07:23:19 +0000 | [diff] [blame] | 52 | std::array<uint32_t, 1+kNumColumns> header = reader.ReadInterleavedVarints<1+kNumColumns>(); |
David Srbecky | 697c47a | 2019-06-16 21:53:07 +0100 | [diff] [blame] | 53 | num_rows_ = header[0]; |
| 54 | column_offset_[0] = 0; |
| 55 | for (uint32_t i = 0; i < kNumColumns; i++) { |
| 56 | size_t column_end = column_offset_[i] + header[i + 1]; |
| 57 | column_offset_[i + 1] = dchecked_integral_cast<uint16_t>(column_end); |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 58 | } |
| 59 | |
| 60 | // Record the region which contains the table data and skip past it. |
David Srbecky | d160641 | 2018-07-31 15:05:14 +0100 | [diff] [blame] | 61 | table_data_ = reader.ReadRegion(num_rows_ * NumRowBits()); |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 62 | } |
| 63 | |
| 64 | ALWAYS_INLINE uint32_t Get(uint32_t row, uint32_t column = 0) const { |
David Srbecky | 42deda8 | 2018-08-10 11:23:27 +0100 | [diff] [blame] | 65 | DCHECK(table_data_.IsValid()) << "Table has not been loaded"; |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 66 | DCHECK_LT(row, num_rows_); |
| 67 | DCHECK_LT(column, kNumColumns); |
| 68 | size_t offset = row * NumRowBits() + column_offset_[column]; |
| 69 | return table_data_.LoadBits(offset, NumColumnBits(column)) + kValueBias; |
| 70 | } |
| 71 | |
David Srbecky | 5513c2b | 2018-05-24 15:03:20 +0100 | [diff] [blame] | 72 | ALWAYS_INLINE BitMemoryRegion GetBitMemoryRegion(uint32_t row, uint32_t column = 0) const { |
David Srbecky | 42deda8 | 2018-08-10 11:23:27 +0100 | [diff] [blame] | 73 | DCHECK(table_data_.IsValid()) << "Table has not been loaded"; |
David Srbecky | 5513c2b | 2018-05-24 15:03:20 +0100 | [diff] [blame] | 74 | DCHECK_LT(row, num_rows_); |
| 75 | DCHECK_LT(column, kNumColumns); |
| 76 | size_t offset = row * NumRowBits() + column_offset_[column]; |
| 77 | return table_data_.Subregion(offset, NumColumnBits(column)); |
| 78 | } |
| 79 | |
Vladimir Marko | 394a082 | 2021-10-25 10:31:15 +0100 | [diff] [blame] | 80 | uint32_t NumRows() const { return num_rows_; } |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 81 | |
| 82 | uint32_t NumRowBits() const { return column_offset_[kNumColumns]; } |
| 83 | |
Vladimir Marko | 394a082 | 2021-10-25 10:31:15 +0100 | [diff] [blame] | 84 | constexpr uint32_t NumColumns() const { return kNumColumns; } |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 85 | |
| 86 | uint32_t NumColumnBits(uint32_t column) const { |
| 87 | return column_offset_[column + 1] - column_offset_[column]; |
| 88 | } |
| 89 | |
David Srbecky | 42deda8 | 2018-08-10 11:23:27 +0100 | [diff] [blame] | 90 | size_t DataBitSize() const { return table_data_.size_in_bits(); } |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 91 | |
David Srbecky | d160641 | 2018-07-31 15:05:14 +0100 | [diff] [blame] | 92 | bool Equals(const BitTableBase& other) const { |
| 93 | return num_rows_ == other.num_rows_ && |
| 94 | std::equal(column_offset_, column_offset_ + kNumColumns, other.column_offset_) && |
Vladimir Marko | e4ccbb5 | 2022-03-15 08:51:33 +0000 | [diff] [blame^] | 95 | BitMemoryRegion::Equals(table_data_, other.table_data_); |
David Srbecky | d160641 | 2018-07-31 15:05:14 +0100 | [diff] [blame] | 96 | } |
| 97 | |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 98 | protected: |
| 99 | BitMemoryRegion table_data_; |
Vladimir Marko | 394a082 | 2021-10-25 10:31:15 +0100 | [diff] [blame] | 100 | uint32_t num_rows_ = 0; |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 101 | uint16_t column_offset_[kNumColumns + 1] = {}; |
| 102 | }; |
| 103 | |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 104 | // Helper class which can be used to create BitTable accessors with named getters. |
| 105 | template<uint32_t NumColumns> |
| 106 | class BitTableAccessor { |
| 107 | public: |
| 108 | static constexpr uint32_t kNumColumns = NumColumns; |
| 109 | static constexpr uint32_t kNoValue = BitTableBase<kNumColumns>::kNoValue; |
| 110 | |
David Srbecky | 145a18a | 2019-06-03 14:35:22 +0100 | [diff] [blame] | 111 | BitTableAccessor() = default; |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 112 | BitTableAccessor(const BitTableBase<kNumColumns>* table, uint32_t row) |
| 113 | : table_(table), row_(row) { |
David Srbecky | a45a85c | 2018-06-21 16:03:12 +0100 | [diff] [blame] | 114 | DCHECK(table_ != nullptr); |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 115 | } |
| 116 | |
| 117 | ALWAYS_INLINE uint32_t Row() const { return row_; } |
| 118 | |
David Srbecky | a45a85c | 2018-06-21 16:03:12 +0100 | [diff] [blame] | 119 | ALWAYS_INLINE bool IsValid() const { return row_ < table_->NumRows(); } |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 120 | |
| 121 | ALWAYS_INLINE bool Equals(const BitTableAccessor& other) { |
| 122 | return this->table_ == other.table_ && this->row_ == other.row_; |
| 123 | } |
| 124 | |
| 125 | // Helper macro to create constructors and per-table utilities in derived class. |
David Srbecky | 42deda8 | 2018-08-10 11:23:27 +0100 | [diff] [blame] | 126 | #define BIT_TABLE_HEADER(NAME) \ |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 127 | using BitTableAccessor<kNumColumns>::BitTableAccessor; /* inherit constructors */ \ |
| 128 | template<int COLUMN, int UNUSED /*needed to compile*/> struct ColumnName; \ |
David Srbecky | 42deda8 | 2018-08-10 11:23:27 +0100 | [diff] [blame] | 129 | static constexpr const char* kTableName = #NAME; \ |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 130 | |
| 131 | // Helper macro to create named column accessors in derived class. |
| 132 | #define BIT_TABLE_COLUMN(COLUMN, NAME) \ |
| 133 | static constexpr uint32_t k##NAME = COLUMN; \ |
| 134 | ALWAYS_INLINE uint32_t Get##NAME() const { return table_->Get(row_, COLUMN); } \ |
| 135 | ALWAYS_INLINE bool Has##NAME() const { return Get##NAME() != kNoValue; } \ |
| 136 | template<int UNUSED> struct ColumnName<COLUMN, UNUSED> { \ |
| 137 | static constexpr const char* Value = #NAME; \ |
| 138 | }; \ |
| 139 | |
| 140 | protected: |
| 141 | const BitTableBase<kNumColumns>* table_ = nullptr; |
| 142 | uint32_t row_ = -1; |
| 143 | }; |
| 144 | |
David Srbecky | d97e082 | 2018-06-03 12:00:24 +0100 | [diff] [blame] | 145 | // Template meta-programming helper. |
| 146 | template<typename Accessor, size_t... Columns> |
David Srbecky | 86decb6 | 2018-06-05 06:41:10 +0100 | [diff] [blame] | 147 | static const char* const* GetBitTableColumnNamesImpl(std::index_sequence<Columns...>) { |
David Srbecky | d97e082 | 2018-06-03 12:00:24 +0100 | [diff] [blame] | 148 | static const char* names[] = { Accessor::template ColumnName<Columns, 0>::Value... }; |
| 149 | return names; |
| 150 | } |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 151 | |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 152 | // Wrapper which makes it easier to use named accessors for the individual rows. |
| 153 | template<typename Accessor> |
| 154 | class BitTable : public BitTableBase<Accessor::kNumColumns> { |
| 155 | public: |
David Srbecky | 0b4e5a3 | 2018-06-11 16:25:29 +0100 | [diff] [blame] | 156 | class const_iterator : public std::iterator<std::random_access_iterator_tag, |
| 157 | /* value_type */ Accessor, |
| 158 | /* difference_type */ int32_t, |
| 159 | /* pointer */ void, |
| 160 | /* reference */ void> { |
| 161 | public: |
| 162 | using difference_type = int32_t; |
| 163 | const_iterator() {} |
| 164 | const_iterator(const BitTable* table, uint32_t row) : table_(table), row_(row) {} |
| 165 | const_iterator operator+(difference_type n) { return const_iterator(table_, row_ + n); } |
| 166 | const_iterator operator-(difference_type n) { return const_iterator(table_, row_ - n); } |
| 167 | difference_type operator-(const const_iterator& other) { return row_ - other.row_; } |
| 168 | void operator+=(difference_type rows) { row_ += rows; } |
| 169 | void operator-=(difference_type rows) { row_ -= rows; } |
| 170 | const_iterator operator++() { return const_iterator(table_, ++row_); } |
| 171 | const_iterator operator--() { return const_iterator(table_, --row_); } |
| 172 | const_iterator operator++(int) { return const_iterator(table_, row_++); } |
| 173 | const_iterator operator--(int) { return const_iterator(table_, row_--); } |
| 174 | bool operator==(const_iterator i) const { DCHECK(table_ == i.table_); return row_ == i.row_; } |
| 175 | bool operator!=(const_iterator i) const { DCHECK(table_ == i.table_); return row_ != i.row_; } |
| 176 | bool operator<=(const_iterator i) const { DCHECK(table_ == i.table_); return row_ <= i.row_; } |
| 177 | bool operator>=(const_iterator i) const { DCHECK(table_ == i.table_); return row_ >= i.row_; } |
| 178 | bool operator<(const_iterator i) const { DCHECK(table_ == i.table_); return row_ < i.row_; } |
| 179 | bool operator>(const_iterator i) const { DCHECK(table_ == i.table_); return row_ > i.row_; } |
David Srbecky | 93bd361 | 2018-07-02 19:30:18 +0100 | [diff] [blame] | 180 | Accessor operator*() { |
| 181 | DCHECK_LT(row_, table_->NumRows()); |
| 182 | return Accessor(table_, row_); |
| 183 | } |
| 184 | Accessor operator->() { |
| 185 | DCHECK_LT(row_, table_->NumRows()); |
| 186 | return Accessor(table_, row_); |
| 187 | } |
| 188 | Accessor operator[](size_t index) { |
| 189 | DCHECK_LT(row_ + index, table_->NumRows()); |
| 190 | return Accessor(table_, row_ + index); |
| 191 | } |
David Srbecky | 0b4e5a3 | 2018-06-11 16:25:29 +0100 | [diff] [blame] | 192 | private: |
| 193 | const BitTable* table_ = nullptr; |
| 194 | uint32_t row_ = 0; |
| 195 | }; |
| 196 | |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 197 | using BitTableBase<Accessor::kNumColumns>::BitTableBase; // Constructors. |
| 198 | |
David Srbecky | 0b4e5a3 | 2018-06-11 16:25:29 +0100 | [diff] [blame] | 199 | ALWAYS_INLINE const_iterator begin() const { return const_iterator(this, 0); } |
| 200 | ALWAYS_INLINE const_iterator end() const { return const_iterator(this, this->NumRows()); } |
| 201 | |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 202 | ALWAYS_INLINE Accessor GetRow(uint32_t row) const { |
| 203 | return Accessor(this, row); |
| 204 | } |
David Srbecky | a45a85c | 2018-06-21 16:03:12 +0100 | [diff] [blame] | 205 | |
| 206 | ALWAYS_INLINE Accessor GetInvalidRow() const { |
| 207 | return Accessor(this, static_cast<uint32_t>(-1)); |
| 208 | } |
David Srbecky | 42deda8 | 2018-08-10 11:23:27 +0100 | [diff] [blame] | 209 | |
| 210 | const char* GetName() const { |
| 211 | return Accessor::kTableName; |
| 212 | } |
| 213 | |
| 214 | const char* const* GetColumnNames() const { |
| 215 | return GetBitTableColumnNamesImpl<Accessor>(std::make_index_sequence<Accessor::kNumColumns>()); |
| 216 | } |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 217 | }; |
| 218 | |
David Srbecky | 0b4e5a3 | 2018-06-11 16:25:29 +0100 | [diff] [blame] | 219 | template<typename Accessor> |
| 220 | typename BitTable<Accessor>::const_iterator operator+( |
| 221 | typename BitTable<Accessor>::const_iterator::difference_type n, |
| 222 | typename BitTable<Accessor>::const_iterator a) { |
| 223 | return a + n; |
| 224 | } |
| 225 | |
David Srbecky | 93bd361 | 2018-07-02 19:30:18 +0100 | [diff] [blame] | 226 | template<typename Accessor> |
| 227 | class BitTableRange : public IterationRange<typename BitTable<Accessor>::const_iterator> { |
| 228 | public: |
Vladimir Marko | 4f99071 | 2021-07-14 12:45:13 +0100 | [diff] [blame] | 229 | using const_iterator = typename BitTable<Accessor>::const_iterator; |
David Srbecky | 93bd361 | 2018-07-02 19:30:18 +0100 | [diff] [blame] | 230 | |
| 231 | using IterationRange<const_iterator>::IterationRange; |
| 232 | BitTableRange() : IterationRange<const_iterator>(const_iterator(), const_iterator()) { } |
| 233 | |
| 234 | bool empty() const { return this->begin() == this->end(); } |
| 235 | size_t size() const { return this->end() - this->begin(); } |
| 236 | |
| 237 | Accessor operator[](size_t index) const { |
| 238 | const_iterator it = this->begin() + index; |
| 239 | DCHECK(it < this->end()); |
| 240 | return *it; |
| 241 | } |
| 242 | |
| 243 | Accessor back() const { |
| 244 | DCHECK(!empty()); |
| 245 | return *(this->end() - 1); |
| 246 | } |
| 247 | |
| 248 | void pop_back() { |
| 249 | DCHECK(!empty()); |
| 250 | --this->last_; |
| 251 | } |
| 252 | }; |
| 253 | |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 254 | // Helper class for encoding BitTable. It can optionally de-duplicate the inputs. |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 255 | template<uint32_t kNumColumns> |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 256 | class BitTableBuilderBase { |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 257 | public: |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 258 | static constexpr uint32_t kNoValue = BitTableBase<kNumColumns>::kNoValue; |
| 259 | static constexpr uint32_t kValueBias = BitTableBase<kNumColumns>::kValueBias; |
| 260 | |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 261 | class Entry { |
| 262 | public: |
| 263 | Entry() { |
Orion Hodson | 2dd1681 | 2018-07-03 14:16:05 +0100 | [diff] [blame] | 264 | // The definition of kNoValue here is for host and target debug builds which complain about |
| 265 | // missing a symbol definition for BitTableBase<N>::kNovValue when optimization is off. |
| 266 | static constexpr uint32_t kNoValue = BitTableBase<kNumColumns>::kNoValue; |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 267 | std::fill_n(data_, kNumColumns, kNoValue); |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 268 | } |
| 269 | |
| 270 | Entry(std::initializer_list<uint32_t> values) { |
| 271 | DCHECK_EQ(values.size(), kNumColumns); |
| 272 | std::copy(values.begin(), values.end(), data_); |
| 273 | } |
| 274 | |
| 275 | uint32_t& operator[](size_t column) { |
| 276 | DCHECK_LT(column, kNumColumns); |
| 277 | return data_[column]; |
| 278 | } |
| 279 | |
| 280 | uint32_t operator[](size_t column) const { |
| 281 | DCHECK_LT(column, kNumColumns); |
| 282 | return data_[column]; |
| 283 | } |
| 284 | |
| 285 | private: |
| 286 | uint32_t data_[kNumColumns]; |
| 287 | }; |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 288 | |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 289 | explicit BitTableBuilderBase(ScopedArenaAllocator* allocator) |
David Srbecky | 159c9dd | 2018-05-24 14:56:51 +0100 | [diff] [blame] | 290 | : rows_(allocator->Adapter(kArenaAllocBitTableBuilder)), |
| 291 | dedup_(8, allocator->Adapter(kArenaAllocBitTableBuilder)) { |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 292 | } |
| 293 | |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 294 | Entry& operator[](size_t row) { return rows_[row]; } |
| 295 | const Entry& operator[](size_t row) const { return rows_[row]; } |
David Srbecky | 0b4e5a3 | 2018-06-11 16:25:29 +0100 | [diff] [blame] | 296 | const Entry& back() const { return rows_.back(); } |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 297 | size_t size() const { return rows_.size(); } |
| 298 | |
David Srbecky | 159c9dd | 2018-05-24 14:56:51 +0100 | [diff] [blame] | 299 | // Append given value to the vector without de-duplication. |
| 300 | // This will not add the element to the dedup map to avoid its associated costs. |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 301 | void Add(Entry value) { |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 302 | rows_.push_back(value); |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 303 | } |
| 304 | |
David Srbecky | 159c9dd | 2018-05-24 14:56:51 +0100 | [diff] [blame] | 305 | // Append given list of values and return the index of the first value. |
| 306 | // If the exact same set of values was already added, return the old index. |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 307 | uint32_t Dedup(Entry* values, size_t count = 1) { |
David Srbecky | 159c9dd | 2018-05-24 14:56:51 +0100 | [diff] [blame] | 308 | FNVHash<MemoryRegion> hasher; |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 309 | uint32_t hash = hasher(MemoryRegion(values, sizeof(Entry) * count)); |
David Srbecky | 159c9dd | 2018-05-24 14:56:51 +0100 | [diff] [blame] | 310 | |
| 311 | // Check if we have already added identical set of values. |
| 312 | auto range = dedup_.equal_range(hash); |
| 313 | for (auto it = range.first; it != range.second; ++it) { |
| 314 | uint32_t index = it->second; |
| 315 | if (count <= size() - index && |
| 316 | std::equal(values, |
| 317 | values + count, |
David Srbecky | 5f93710 | 2018-06-02 10:24:25 +0100 | [diff] [blame] | 318 | rows_.begin() + index, |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 319 | [](const Entry& lhs, const Entry& rhs) { |
| 320 | return memcmp(&lhs, &rhs, sizeof(Entry)) == 0; |
David Srbecky | 159c9dd | 2018-05-24 14:56:51 +0100 | [diff] [blame] | 321 | })) { |
| 322 | return index; |
| 323 | } |
| 324 | } |
| 325 | |
| 326 | // Add the set of values and add the index to the dedup map. |
| 327 | uint32_t index = size(); |
| 328 | rows_.insert(rows_.end(), values, values + count); |
| 329 | dedup_.emplace(hash, index); |
| 330 | return index; |
| 331 | } |
| 332 | |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 333 | uint32_t Dedup(Entry value) { |
| 334 | return Dedup(&value, /* count */ 1); |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 335 | } |
| 336 | |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 337 | // Calculate the column bit widths based on the current data. |
David Srbecky | 6c4ec5c | 2019-06-20 07:23:19 +0000 | [diff] [blame] | 338 | void Measure(/*out*/ uint32_t* column_bits) const { |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 339 | uint32_t max_column_value[kNumColumns]; |
| 340 | std::fill_n(max_column_value, kNumColumns, 0); |
| 341 | for (uint32_t r = 0; r < size(); r++) { |
| 342 | for (uint32_t c = 0; c < kNumColumns; c++) { |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 343 | max_column_value[c] |= rows_[r][c] - kValueBias; |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 344 | } |
| 345 | } |
| 346 | for (uint32_t c = 0; c < kNumColumns; c++) { |
David Srbecky | 6c4ec5c | 2019-06-20 07:23:19 +0000 | [diff] [blame] | 347 | column_bits[c] = MinimumBitsToStore(max_column_value[c]); |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 348 | } |
| 349 | } |
| 350 | |
| 351 | // Encode the stored data into a BitTable. |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 352 | template<typename Vector> |
David Srbecky | 078d7ba | 2018-06-21 15:36:48 +0100 | [diff] [blame] | 353 | void Encode(BitMemoryWriter<Vector>& out) const { |
David Srbecky | d160641 | 2018-07-31 15:05:14 +0100 | [diff] [blame] | 354 | size_t initial_bit_offset = out.NumberOfWrittenBits(); |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 355 | |
David Srbecky | 697c47a | 2019-06-16 21:53:07 +0100 | [diff] [blame] | 356 | // Write table header. |
David Srbecky | 6c4ec5c | 2019-06-20 07:23:19 +0000 | [diff] [blame] | 357 | std::array<uint32_t, 1 + kNumColumns> header; |
| 358 | header[0] = size(); |
| 359 | uint32_t* column_bits = header.data() + 1; |
| 360 | Measure(column_bits); |
| 361 | out.WriteInterleavedVarints(header); |
David Srbecky | 697c47a | 2019-06-16 21:53:07 +0100 | [diff] [blame] | 362 | |
| 363 | // Write table data. |
| 364 | for (uint32_t r = 0; r < size(); r++) { |
| 365 | for (uint32_t c = 0; c < kNumColumns; c++) { |
| 366 | out.WriteBits(rows_[r][c] - kValueBias, column_bits[c]); |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 367 | } |
| 368 | } |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 369 | |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 370 | // Verify the written data. |
| 371 | if (kIsDebugBuild) { |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 372 | BitTableBase<kNumColumns> table; |
David Srbecky | d160641 | 2018-07-31 15:05:14 +0100 | [diff] [blame] | 373 | BitMemoryReader reader(out.GetWrittenRegion().Subregion(initial_bit_offset)); |
David Srbecky | 078d7ba | 2018-06-21 15:36:48 +0100 | [diff] [blame] | 374 | table.Decode(reader); |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 375 | DCHECK_EQ(size(), table.NumRows()); |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 376 | for (uint32_t c = 0; c < kNumColumns; c++) { |
| 377 | DCHECK_EQ(column_bits[c], table.NumColumnBits(c)); |
| 378 | } |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 379 | for (uint32_t r = 0; r < size(); r++) { |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 380 | for (uint32_t c = 0; c < kNumColumns; c++) { |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 381 | DCHECK_EQ(rows_[r][c], table.Get(r, c)) << " (" << r << ", " << c << ")"; |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 382 | } |
| 383 | } |
| 384 | } |
| 385 | } |
| 386 | |
| 387 | protected: |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 388 | ScopedArenaDeque<Entry> rows_; |
David Srbecky | 159c9dd | 2018-05-24 14:56:51 +0100 | [diff] [blame] | 389 | ScopedArenaUnorderedMultimap<uint32_t, uint32_t> dedup_; // Hash -> row index. |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 390 | }; |
| 391 | |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 392 | template<typename Accessor> |
| 393 | class BitTableBuilder : public BitTableBuilderBase<Accessor::kNumColumns> { |
| 394 | public: |
| 395 | using BitTableBuilderBase<Accessor::kNumColumns>::BitTableBuilderBase; // Constructors. |
| 396 | }; |
| 397 | |
David Srbecky | 5513c2b | 2018-05-24 15:03:20 +0100 | [diff] [blame] | 398 | // Helper class for encoding single-column BitTable of bitmaps (allows more than 32 bits). |
| 399 | class BitmapTableBuilder { |
| 400 | public: |
| 401 | explicit BitmapTableBuilder(ScopedArenaAllocator* const allocator) |
| 402 | : allocator_(allocator), |
| 403 | rows_(allocator->Adapter(kArenaAllocBitTableBuilder)), |
| 404 | dedup_(8, allocator_->Adapter(kArenaAllocBitTableBuilder)) { |
| 405 | } |
| 406 | |
| 407 | MemoryRegion operator[](size_t row) { return rows_[row]; } |
| 408 | const MemoryRegion operator[](size_t row) const { return rows_[row]; } |
| 409 | size_t size() const { return rows_.size(); } |
| 410 | |
| 411 | // Add the given bitmap to the table and return its index. |
| 412 | // If the bitmap was already added it will be deduplicated. |
| 413 | // The last bit must be set and any padding bits in the last byte must be zero. |
| 414 | uint32_t Dedup(const void* bitmap, size_t num_bits) { |
| 415 | MemoryRegion region(const_cast<void*>(bitmap), BitsToBytesRoundUp(num_bits)); |
| 416 | DCHECK(num_bits == 0 || BitMemoryRegion(region).LoadBit(num_bits - 1) == 1); |
| 417 | DCHECK_EQ(BitMemoryRegion(region).LoadBits(num_bits, region.size_in_bits() - num_bits), 0u); |
| 418 | FNVHash<MemoryRegion> hasher; |
| 419 | uint32_t hash = hasher(region); |
| 420 | |
| 421 | // Check if we have already added identical bitmap. |
| 422 | auto range = dedup_.equal_range(hash); |
| 423 | for (auto it = range.first; it != range.second; ++it) { |
| 424 | if (MemoryRegion::ContentEquals()(region, rows_[it->second])) { |
| 425 | return it->second; |
| 426 | } |
| 427 | } |
| 428 | |
| 429 | // Add the bitmap and add the index to the dedup map. |
| 430 | uint32_t index = size(); |
| 431 | void* copy = allocator_->Alloc(region.size(), kArenaAllocBitTableBuilder); |
| 432 | memcpy(copy, region.pointer(), region.size()); |
| 433 | rows_.push_back(MemoryRegion(copy, region.size())); |
| 434 | dedup_.emplace(hash, index); |
| 435 | max_num_bits_ = std::max(max_num_bits_, num_bits); |
| 436 | return index; |
| 437 | } |
| 438 | |
| 439 | // Encode the stored data into a BitTable. |
| 440 | template<typename Vector> |
David Srbecky | 078d7ba | 2018-06-21 15:36:48 +0100 | [diff] [blame] | 441 | void Encode(BitMemoryWriter<Vector>& out) const { |
David Srbecky | d160641 | 2018-07-31 15:05:14 +0100 | [diff] [blame] | 442 | size_t initial_bit_offset = out.NumberOfWrittenBits(); |
David Srbecky | 5513c2b | 2018-05-24 15:03:20 +0100 | [diff] [blame] | 443 | |
David Srbecky | 697c47a | 2019-06-16 21:53:07 +0100 | [diff] [blame] | 444 | // Write table header. |
David Srbecky | 6c4ec5c | 2019-06-20 07:23:19 +0000 | [diff] [blame] | 445 | out.WriteInterleavedVarints(std::array<uint32_t, 2>{ |
| 446 | dchecked_integral_cast<uint32_t>(size()), |
| 447 | dchecked_integral_cast<uint32_t>(max_num_bits_), |
| 448 | }); |
David Srbecky | 5513c2b | 2018-05-24 15:03:20 +0100 | [diff] [blame] | 449 | |
David Srbecky | 697c47a | 2019-06-16 21:53:07 +0100 | [diff] [blame] | 450 | // Write table data. |
| 451 | for (MemoryRegion row : rows_) { |
Vladimir Marko | e4ccbb5 | 2022-03-15 08:51:33 +0000 | [diff] [blame^] | 452 | size_t bits_to_copy = std::min(max_num_bits_, row.size_in_bits()); |
| 453 | BitMemoryRegion src(row, /*bit_offset=*/ 0u, bits_to_copy); |
David Srbecky | 697c47a | 2019-06-16 21:53:07 +0100 | [diff] [blame] | 454 | BitMemoryRegion dst = out.Allocate(max_num_bits_); |
Vladimir Marko | e4ccbb5 | 2022-03-15 08:51:33 +0000 | [diff] [blame^] | 455 | dst.Subregion(/*bit_offset=*/ 0, bits_to_copy).CopyBits(src); |
David Srbecky | 5513c2b | 2018-05-24 15:03:20 +0100 | [diff] [blame] | 456 | } |
| 457 | |
| 458 | // Verify the written data. |
| 459 | if (kIsDebugBuild) { |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 460 | BitTableBase<1> table; |
David Srbecky | d160641 | 2018-07-31 15:05:14 +0100 | [diff] [blame] | 461 | BitMemoryReader reader(out.GetWrittenRegion().Subregion(initial_bit_offset)); |
David Srbecky | 078d7ba | 2018-06-21 15:36:48 +0100 | [diff] [blame] | 462 | table.Decode(reader); |
David Srbecky | 5513c2b | 2018-05-24 15:03:20 +0100 | [diff] [blame] | 463 | DCHECK_EQ(size(), table.NumRows()); |
| 464 | DCHECK_EQ(max_num_bits_, table.NumColumnBits(0)); |
| 465 | for (uint32_t r = 0; r < size(); r++) { |
| 466 | BitMemoryRegion expected(rows_[r]); |
| 467 | BitMemoryRegion seen = table.GetBitMemoryRegion(r); |
| 468 | size_t num_bits = std::max(expected.size_in_bits(), seen.size_in_bits()); |
| 469 | for (size_t b = 0; b < num_bits; b++) { |
| 470 | bool e = b < expected.size_in_bits() && expected.LoadBit(b); |
| 471 | bool s = b < seen.size_in_bits() && seen.LoadBit(b); |
| 472 | DCHECK_EQ(e, s) << " (" << r << ")[" << b << "]"; |
| 473 | } |
| 474 | } |
| 475 | } |
| 476 | } |
| 477 | |
| 478 | private: |
| 479 | ScopedArenaAllocator* const allocator_; |
| 480 | ScopedArenaDeque<MemoryRegion> rows_; |
| 481 | ScopedArenaUnorderedMultimap<uint32_t, uint32_t> dedup_; // Hash -> row index. |
| 482 | size_t max_num_bits_ = 0u; |
| 483 | }; |
| 484 | |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 485 | } // namespace art |
| 486 | |
| 487 | #endif // ART_LIBARTBASE_BASE_BIT_TABLE_H_ |