Brian Carlstrom | 9004cb6 | 2013-07-26 15:48:31 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2013 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "mem_map.h" |
| 18 | |
Ian Rogers | 700a402 | 2014-05-19 16:49:03 -0700 | [diff] [blame] | 19 | #include <memory> |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 20 | #include <random> |
Ian Rogers | 700a402 | 2014-05-19 16:49:03 -0700 | [diff] [blame] | 21 | |
Vladimir Marko | 41b605c | 2020-02-12 10:52:22 +0000 | [diff] [blame] | 22 | #include "bit_utils.h" |
Andreas Gampe | c857f4a | 2018-10-25 13:12:37 -0700 | [diff] [blame] | 23 | #include "common_art_test.h" |
Andreas Gampe | c857f4a | 2018-10-25 13:12:37 -0700 | [diff] [blame] | 24 | #include "logging.h" |
David Sehr | 1979c64 | 2018-04-26 14:41:18 -0700 | [diff] [blame] | 25 | #include "memory_tool.h" |
David Sehr | 10db8fe | 2018-07-18 11:01:20 -0700 | [diff] [blame] | 26 | #include "mman.h" |
David Sehr | 1979c64 | 2018-04-26 14:41:18 -0700 | [diff] [blame] | 27 | #include "unix_file/fd_file.h" |
Brian Carlstrom | 9004cb6 | 2013-07-26 15:48:31 -0700 | [diff] [blame] | 28 | |
| 29 | namespace art { |
| 30 | |
David Sehr | d5f8de8 | 2018-04-27 14:12:03 -0700 | [diff] [blame] | 31 | class MemMapTest : public CommonArtTest { |
Hiroshi Yamauchi | fd7e7f1 | 2013-10-22 14:17:48 -0700 | [diff] [blame] | 32 | public: |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 33 | static bool IsAddressMapped(void* addr) { |
| 34 | bool res = msync(addr, 1, MS_SYNC) == 0; |
| 35 | if (!res && errno != ENOMEM) { |
| 36 | PLOG(FATAL) << "Unexpected error occurred on msync"; |
| 37 | } |
| 38 | return res; |
| 39 | } |
| 40 | |
| 41 | static std::vector<uint8_t> RandomData(size_t size) { |
| 42 | std::random_device rd; |
| 43 | std::uniform_int_distribution<uint8_t> dist; |
| 44 | std::vector<uint8_t> res; |
| 45 | res.resize(size); |
| 46 | for (size_t i = 0; i < size; i++) { |
| 47 | res[i] = dist(rd); |
| 48 | } |
| 49 | return res; |
| 50 | } |
| 51 | |
Mathieu Chartier | 16d29f8 | 2015-11-10 10:32:52 -0800 | [diff] [blame] | 52 | static uint8_t* GetValidMapAddress(size_t size, bool low_4gb) { |
| 53 | // Find a valid map address and unmap it before returning. |
| 54 | std::string error_msg; |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 55 | MemMap map = MemMap::MapAnonymous("temp", |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 56 | size, |
| 57 | PROT_READ, |
| 58 | low_4gb, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 59 | &error_msg); |
| 60 | CHECK(map.IsValid()); |
| 61 | return map.Begin(); |
Mathieu Chartier | 16d29f8 | 2015-11-10 10:32:52 -0800 | [diff] [blame] | 62 | } |
| 63 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 64 | static void RemapAtEndTest(bool low_4gb) { |
| 65 | std::string error_msg; |
| 66 | // Cast the page size to size_t. |
| 67 | const size_t page_size = static_cast<size_t>(kPageSize); |
| 68 | // Map a two-page memory region. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 69 | MemMap m0 = MemMap::MapAnonymous("MemMapTest_RemapAtEndTest_map0", |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 70 | 2 * page_size, |
| 71 | PROT_READ | PROT_WRITE, |
| 72 | low_4gb, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 73 | &error_msg); |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 74 | // Check its state and write to it. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 75 | ASSERT_TRUE(m0.IsValid()); |
| 76 | uint8_t* base0 = m0.Begin(); |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 77 | ASSERT_TRUE(base0 != nullptr) << error_msg; |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 78 | size_t size0 = m0.Size(); |
| 79 | EXPECT_EQ(m0.Size(), 2 * page_size); |
| 80 | EXPECT_EQ(m0.BaseBegin(), base0); |
| 81 | EXPECT_EQ(m0.BaseSize(), size0); |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 82 | memset(base0, 42, 2 * page_size); |
| 83 | // Remap the latter half into a second MemMap. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 84 | MemMap m1 = m0.RemapAtEnd(base0 + page_size, |
| 85 | "MemMapTest_RemapAtEndTest_map1", |
| 86 | PROT_READ | PROT_WRITE, |
| 87 | &error_msg); |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 88 | // Check the states of the two maps. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 89 | EXPECT_EQ(m0.Begin(), base0) << error_msg; |
| 90 | EXPECT_EQ(m0.Size(), page_size); |
| 91 | EXPECT_EQ(m0.BaseBegin(), base0); |
| 92 | EXPECT_EQ(m0.BaseSize(), page_size); |
| 93 | uint8_t* base1 = m1.Begin(); |
| 94 | size_t size1 = m1.Size(); |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 95 | EXPECT_EQ(base1, base0 + page_size); |
| 96 | EXPECT_EQ(size1, page_size); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 97 | EXPECT_EQ(m1.BaseBegin(), base1); |
| 98 | EXPECT_EQ(m1.BaseSize(), size1); |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 99 | // Write to the second region. |
| 100 | memset(base1, 43, page_size); |
| 101 | // Check the contents of the two regions. |
| 102 | for (size_t i = 0; i < page_size; ++i) { |
| 103 | EXPECT_EQ(base0[i], 42); |
| 104 | } |
| 105 | for (size_t i = 0; i < page_size; ++i) { |
| 106 | EXPECT_EQ(base1[i], 43); |
| 107 | } |
| 108 | // Unmap the first region. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 109 | m0.Reset(); |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 110 | // Make sure the second region is still accessible after the first |
| 111 | // region is unmapped. |
| 112 | for (size_t i = 0; i < page_size; ++i) { |
| 113 | EXPECT_EQ(base1[i], 43); |
| 114 | } |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 115 | MemMap m2 = m1.RemapAtEnd(m1.Begin(), |
| 116 | "MemMapTest_RemapAtEndTest_map1", |
| 117 | PROT_READ | PROT_WRITE, |
| 118 | &error_msg); |
| 119 | ASSERT_TRUE(m2.IsValid()) << error_msg; |
| 120 | ASSERT_FALSE(m1.IsValid()); |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 121 | } |
Andreas Gampe | d8f26db | 2014-05-19 17:01:13 -0700 | [diff] [blame] | 122 | |
Mathieu Chartier | 6e88ef6 | 2014-10-14 15:01:24 -0700 | [diff] [blame] | 123 | void CommonInit() { |
| 124 | MemMap::Init(); |
| 125 | } |
| 126 | |
Andreas Gampe | d8f26db | 2014-05-19 17:01:13 -0700 | [diff] [blame] | 127 | #if defined(__LP64__) && !defined(__x86_64__) |
| 128 | static uintptr_t GetLinearScanPos() { |
| 129 | return MemMap::next_mem_pos_; |
| 130 | } |
| 131 | #endif |
Hiroshi Yamauchi | fd7e7f1 | 2013-10-22 14:17:48 -0700 | [diff] [blame] | 132 | }; |
Brian Carlstrom | 9004cb6 | 2013-07-26 15:48:31 -0700 | [diff] [blame] | 133 | |
Andreas Gampe | d8f26db | 2014-05-19 17:01:13 -0700 | [diff] [blame] | 134 | #if defined(__LP64__) && !defined(__x86_64__) |
| 135 | |
| 136 | #ifdef __BIONIC__ |
| 137 | extern uintptr_t CreateStartPos(uint64_t input); |
| 138 | #endif |
| 139 | |
| 140 | TEST_F(MemMapTest, Start) { |
Mathieu Chartier | 6e88ef6 | 2014-10-14 15:01:24 -0700 | [diff] [blame] | 141 | CommonInit(); |
Andreas Gampe | d8f26db | 2014-05-19 17:01:13 -0700 | [diff] [blame] | 142 | uintptr_t start = GetLinearScanPos(); |
| 143 | EXPECT_LE(64 * KB, start); |
| 144 | EXPECT_LT(start, static_cast<uintptr_t>(ART_BASE_ADDRESS)); |
Andreas Gampe | d8f26db | 2014-05-19 17:01:13 -0700 | [diff] [blame] | 145 | #ifdef __BIONIC__ |
| 146 | // Test a couple of values. Make sure they are different. |
| 147 | uintptr_t last = 0; |
| 148 | for (size_t i = 0; i < 100; ++i) { |
| 149 | uintptr_t random_start = CreateStartPos(i * kPageSize); |
| 150 | EXPECT_NE(last, random_start); |
| 151 | last = random_start; |
| 152 | } |
| 153 | |
| 154 | // Even on max, should be below ART_BASE_ADDRESS. |
| 155 | EXPECT_LT(CreateStartPos(~0), static_cast<uintptr_t>(ART_BASE_ADDRESS)); |
| 156 | #endif |
| 157 | // End of test. |
| 158 | } |
| 159 | #endif |
| 160 | |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 161 | // We need mremap to be able to test ReplaceMapping at all |
| 162 | #if HAVE_MREMAP_SYSCALL |
| 163 | TEST_F(MemMapTest, ReplaceMapping_SameSize) { |
| 164 | std::string error_msg; |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 165 | MemMap dest = MemMap::MapAnonymous("MapAnonymousEmpty-atomic-replace-dest", |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 166 | kPageSize, |
| 167 | PROT_READ, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 168 | /*low_4gb=*/ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 169 | &error_msg); |
| 170 | ASSERT_TRUE(dest.IsValid()); |
| 171 | MemMap source = MemMap::MapAnonymous("MapAnonymous-atomic-replace-source", |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 172 | kPageSize, |
| 173 | PROT_WRITE | PROT_READ, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 174 | /*low_4gb=*/ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 175 | &error_msg); |
| 176 | ASSERT_TRUE(source.IsValid()); |
| 177 | void* source_addr = source.Begin(); |
| 178 | void* dest_addr = dest.Begin(); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 179 | ASSERT_TRUE(IsAddressMapped(source_addr)); |
| 180 | ASSERT_TRUE(IsAddressMapped(dest_addr)); |
| 181 | |
| 182 | std::vector<uint8_t> data = RandomData(kPageSize); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 183 | memcpy(source.Begin(), data.data(), data.size()); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 184 | |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 185 | ASSERT_TRUE(dest.ReplaceWith(&source, &error_msg)) << error_msg; |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 186 | |
| 187 | ASSERT_FALSE(IsAddressMapped(source_addr)); |
| 188 | ASSERT_TRUE(IsAddressMapped(dest_addr)); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 189 | ASSERT_FALSE(source.IsValid()); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 190 | |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 191 | ASSERT_EQ(dest.Size(), static_cast<size_t>(kPageSize)); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 192 | |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 193 | ASSERT_EQ(memcmp(dest.Begin(), data.data(), dest.Size()), 0); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 194 | } |
| 195 | |
| 196 | TEST_F(MemMapTest, ReplaceMapping_MakeLarger) { |
| 197 | std::string error_msg; |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 198 | MemMap dest = MemMap::MapAnonymous("MapAnonymousEmpty-atomic-replace-dest", |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 199 | 5 * kPageSize, // Need to make it larger |
| 200 | // initially so we know |
| 201 | // there won't be mappings |
Vladimir Marko | 830f356 | 2018-10-31 12:58:44 +0000 | [diff] [blame] | 202 | // in the way when we move |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 203 | // source. |
| 204 | PROT_READ, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 205 | /*low_4gb=*/ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 206 | &error_msg); |
| 207 | ASSERT_TRUE(dest.IsValid()); |
| 208 | MemMap source = MemMap::MapAnonymous("MapAnonymous-atomic-replace-source", |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 209 | 3 * kPageSize, |
| 210 | PROT_WRITE | PROT_READ, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 211 | /*low_4gb=*/ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 212 | &error_msg); |
| 213 | ASSERT_TRUE(source.IsValid()); |
| 214 | uint8_t* source_addr = source.Begin(); |
| 215 | uint8_t* dest_addr = dest.Begin(); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 216 | ASSERT_TRUE(IsAddressMapped(source_addr)); |
| 217 | |
| 218 | // Fill the source with random data. |
| 219 | std::vector<uint8_t> data = RandomData(3 * kPageSize); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 220 | memcpy(source.Begin(), data.data(), data.size()); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 221 | |
| 222 | // Make the dest smaller so that we know we'll have space. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 223 | dest.SetSize(kPageSize); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 224 | |
| 225 | ASSERT_TRUE(IsAddressMapped(dest_addr)); |
| 226 | ASSERT_FALSE(IsAddressMapped(dest_addr + 2 * kPageSize)); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 227 | ASSERT_EQ(dest.Size(), static_cast<size_t>(kPageSize)); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 228 | |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 229 | ASSERT_TRUE(dest.ReplaceWith(&source, &error_msg)) << error_msg; |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 230 | |
| 231 | ASSERT_FALSE(IsAddressMapped(source_addr)); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 232 | ASSERT_EQ(dest.Size(), static_cast<size_t>(3 * kPageSize)); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 233 | ASSERT_TRUE(IsAddressMapped(dest_addr)); |
| 234 | ASSERT_TRUE(IsAddressMapped(dest_addr + 2 * kPageSize)); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 235 | ASSERT_FALSE(source.IsValid()); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 236 | |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 237 | ASSERT_EQ(memcmp(dest.Begin(), data.data(), dest.Size()), 0); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 238 | } |
| 239 | |
| 240 | TEST_F(MemMapTest, ReplaceMapping_MakeSmaller) { |
| 241 | std::string error_msg; |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 242 | MemMap dest = MemMap::MapAnonymous("MapAnonymousEmpty-atomic-replace-dest", |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 243 | 3 * kPageSize, |
| 244 | PROT_READ, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 245 | /*low_4gb=*/ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 246 | &error_msg); |
| 247 | ASSERT_TRUE(dest.IsValid()); |
| 248 | MemMap source = MemMap::MapAnonymous("MapAnonymous-atomic-replace-source", |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 249 | kPageSize, |
| 250 | PROT_WRITE | PROT_READ, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 251 | /*low_4gb=*/ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 252 | &error_msg); |
| 253 | ASSERT_TRUE(source.IsValid()); |
| 254 | uint8_t* source_addr = source.Begin(); |
| 255 | uint8_t* dest_addr = dest.Begin(); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 256 | ASSERT_TRUE(IsAddressMapped(source_addr)); |
| 257 | ASSERT_TRUE(IsAddressMapped(dest_addr)); |
| 258 | ASSERT_TRUE(IsAddressMapped(dest_addr + 2 * kPageSize)); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 259 | ASSERT_EQ(dest.Size(), static_cast<size_t>(3 * kPageSize)); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 260 | |
| 261 | std::vector<uint8_t> data = RandomData(kPageSize); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 262 | memcpy(source.Begin(), data.data(), kPageSize); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 263 | |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 264 | ASSERT_TRUE(dest.ReplaceWith(&source, &error_msg)) << error_msg; |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 265 | |
| 266 | ASSERT_FALSE(IsAddressMapped(source_addr)); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 267 | ASSERT_EQ(dest.Size(), static_cast<size_t>(kPageSize)); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 268 | ASSERT_TRUE(IsAddressMapped(dest_addr)); |
| 269 | ASSERT_FALSE(IsAddressMapped(dest_addr + 2 * kPageSize)); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 270 | ASSERT_FALSE(source.IsValid()); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 271 | |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 272 | ASSERT_EQ(memcmp(dest.Begin(), data.data(), dest.Size()), 0); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 273 | } |
| 274 | |
| 275 | TEST_F(MemMapTest, ReplaceMapping_FailureOverlap) { |
| 276 | std::string error_msg; |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 277 | MemMap dest = |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 278 | MemMap::MapAnonymous( |
| 279 | "MapAnonymousEmpty-atomic-replace-dest", |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 280 | 3 * kPageSize, // Need to make it larger initially so we know there won't be mappings in |
Vladimir Marko | 830f356 | 2018-10-31 12:58:44 +0000 | [diff] [blame] | 281 | // the way when we move source. |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 282 | PROT_READ | PROT_WRITE, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 283 | /*low_4gb=*/ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 284 | &error_msg); |
| 285 | ASSERT_TRUE(dest.IsValid()); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 286 | // Resize down to 1 page so we can remap the rest. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 287 | dest.SetSize(kPageSize); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 288 | // Create source from the last 2 pages |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 289 | MemMap source = MemMap::MapAnonymous("MapAnonymous-atomic-replace-source", |
| 290 | dest.Begin() + kPageSize, |
| 291 | 2 * kPageSize, |
| 292 | PROT_WRITE | PROT_READ, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 293 | /*low_4gb=*/ false, |
| 294 | /*reuse=*/ false, |
| 295 | /*reservation=*/ nullptr, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 296 | &error_msg); |
| 297 | ASSERT_TRUE(source.IsValid()); |
| 298 | ASSERT_EQ(dest.Begin() + kPageSize, source.Begin()); |
| 299 | uint8_t* source_addr = source.Begin(); |
| 300 | uint8_t* dest_addr = dest.Begin(); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 301 | ASSERT_TRUE(IsAddressMapped(source_addr)); |
| 302 | |
| 303 | // Fill the source and dest with random data. |
| 304 | std::vector<uint8_t> data = RandomData(2 * kPageSize); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 305 | memcpy(source.Begin(), data.data(), data.size()); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 306 | std::vector<uint8_t> dest_data = RandomData(kPageSize); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 307 | memcpy(dest.Begin(), dest_data.data(), dest_data.size()); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 308 | |
| 309 | ASSERT_TRUE(IsAddressMapped(dest_addr)); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 310 | ASSERT_EQ(dest.Size(), static_cast<size_t>(kPageSize)); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 311 | |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 312 | ASSERT_FALSE(dest.ReplaceWith(&source, &error_msg)) << error_msg; |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 313 | |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 314 | ASSERT_TRUE(IsAddressMapped(source_addr)); |
| 315 | ASSERT_TRUE(IsAddressMapped(dest_addr)); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 316 | ASSERT_EQ(source.Size(), data.size()); |
| 317 | ASSERT_EQ(dest.Size(), dest_data.size()); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 318 | |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 319 | ASSERT_EQ(memcmp(source.Begin(), data.data(), data.size()), 0); |
| 320 | ASSERT_EQ(memcmp(dest.Begin(), dest_data.data(), dest_data.size()), 0); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 321 | } |
| 322 | #endif // HAVE_MREMAP_SYSCALL |
| 323 | |
Brian Carlstrom | 9004cb6 | 2013-07-26 15:48:31 -0700 | [diff] [blame] | 324 | TEST_F(MemMapTest, MapAnonymousEmpty) { |
Mathieu Chartier | 6e88ef6 | 2014-10-14 15:01:24 -0700 | [diff] [blame] | 325 | CommonInit(); |
Ian Rogers | 8d31bbd | 2013-10-13 10:44:14 -0700 | [diff] [blame] | 326 | std::string error_msg; |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 327 | MemMap map = MemMap::MapAnonymous("MapAnonymousEmpty", |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 328 | /*byte_count=*/ 0, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 329 | PROT_READ, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 330 | /*low_4gb=*/ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 331 | &error_msg); |
| 332 | ASSERT_FALSE(map.IsValid()) << error_msg; |
| 333 | ASSERT_FALSE(error_msg.empty()); |
| 334 | |
| 335 | error_msg.clear(); |
| 336 | map = MemMap::MapAnonymous("MapAnonymousNonEmpty", |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 337 | kPageSize, |
| 338 | PROT_READ | PROT_WRITE, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 339 | /*low_4gb=*/ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 340 | &error_msg); |
| 341 | ASSERT_TRUE(map.IsValid()) << error_msg; |
Ian Rogers | 8d31bbd | 2013-10-13 10:44:14 -0700 | [diff] [blame] | 342 | ASSERT_TRUE(error_msg.empty()); |
Brian Carlstrom | 9004cb6 | 2013-07-26 15:48:31 -0700 | [diff] [blame] | 343 | } |
| 344 | |
Mathieu Chartier | 486932a | 2016-02-24 10:09:23 -0800 | [diff] [blame] | 345 | TEST_F(MemMapTest, MapAnonymousFailNullError) { |
| 346 | CommonInit(); |
| 347 | // Test that we don't crash with a null error_str when mapping at an invalid location. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 348 | MemMap map = MemMap::MapAnonymous("MapAnonymousInvalid", |
| 349 | reinterpret_cast<uint8_t*>(kPageSize), |
| 350 | 0x20000, |
| 351 | PROT_READ | PROT_WRITE, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 352 | /*low_4gb=*/ false, |
| 353 | /*reuse=*/ false, |
| 354 | /*reservation=*/ nullptr, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 355 | nullptr); |
| 356 | ASSERT_FALSE(map.IsValid()); |
Mathieu Chartier | 486932a | 2016-02-24 10:09:23 -0800 | [diff] [blame] | 357 | } |
| 358 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 359 | #ifdef __LP64__ |
| 360 | TEST_F(MemMapTest, MapAnonymousEmpty32bit) { |
Mathieu Chartier | 6e88ef6 | 2014-10-14 15:01:24 -0700 | [diff] [blame] | 361 | CommonInit(); |
Hiroshi Yamauchi | fd7e7f1 | 2013-10-22 14:17:48 -0700 | [diff] [blame] | 362 | std::string error_msg; |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 363 | MemMap map = MemMap::MapAnonymous("MapAnonymousEmpty", |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 364 | /*byte_count=*/ 0, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 365 | PROT_READ, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 366 | /*low_4gb=*/ true, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 367 | &error_msg); |
| 368 | ASSERT_FALSE(map.IsValid()) << error_msg; |
| 369 | ASSERT_FALSE(error_msg.empty()); |
| 370 | |
| 371 | error_msg.clear(); |
| 372 | map = MemMap::MapAnonymous("MapAnonymousNonEmpty", |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 373 | kPageSize, |
| 374 | PROT_READ | PROT_WRITE, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 375 | /*low_4gb=*/ true, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 376 | &error_msg); |
| 377 | ASSERT_TRUE(map.IsValid()) << error_msg; |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 378 | ASSERT_TRUE(error_msg.empty()); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 379 | ASSERT_LT(reinterpret_cast<uintptr_t>(map.BaseBegin()), 1ULL << 32); |
Hiroshi Yamauchi | fd7e7f1 | 2013-10-22 14:17:48 -0700 | [diff] [blame] | 380 | } |
Mathieu Chartier | 42bddce | 2015-11-09 15:16:56 -0800 | [diff] [blame] | 381 | TEST_F(MemMapTest, MapFile32Bit) { |
| 382 | CommonInit(); |
| 383 | std::string error_msg; |
| 384 | ScratchFile scratch_file; |
| 385 | constexpr size_t kMapSize = kPageSize; |
| 386 | std::unique_ptr<uint8_t[]> data(new uint8_t[kMapSize]()); |
| 387 | ASSERT_TRUE(scratch_file.GetFile()->WriteFully(&data[0], kMapSize)); |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame] | 388 | MemMap map = MemMap::MapFile(/*byte_count=*/kMapSize, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 389 | PROT_READ, |
| 390 | MAP_PRIVATE, |
| 391 | scratch_file.GetFd(), |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame] | 392 | /*start=*/0, |
| 393 | /*low_4gb=*/true, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 394 | scratch_file.GetFilename().c_str(), |
| 395 | &error_msg); |
| 396 | ASSERT_TRUE(map.IsValid()) << error_msg; |
Mathieu Chartier | 42bddce | 2015-11-09 15:16:56 -0800 | [diff] [blame] | 397 | ASSERT_TRUE(error_msg.empty()); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 398 | ASSERT_EQ(map.Size(), kMapSize); |
| 399 | ASSERT_LT(reinterpret_cast<uintptr_t>(map.BaseBegin()), 1ULL << 32); |
Mathieu Chartier | 42bddce | 2015-11-09 15:16:56 -0800 | [diff] [blame] | 400 | } |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 401 | #endif |
| 402 | |
Hiroshi Yamauchi | 4fb5df8 | 2014-03-13 15:10:27 -0700 | [diff] [blame] | 403 | TEST_F(MemMapTest, MapAnonymousExactAddr) { |
Vladimir Marko | 1c1442a | 2018-10-26 13:39:14 +0100 | [diff] [blame] | 404 | // TODO: The semantics of the MemMap::MapAnonymous() with a given address but without |
| 405 | // `reuse == true` or `reservation != nullptr` is weird. We should either drop support |
| 406 | // for it, or take it only as a hint and allow the result to be mapped elsewhere. |
| 407 | // Currently we're seeing failures with ASAN. b/118408378 |
| 408 | TEST_DISABLED_FOR_MEMORY_TOOL(); |
| 409 | |
Mathieu Chartier | 6e88ef6 | 2014-10-14 15:01:24 -0700 | [diff] [blame] | 410 | CommonInit(); |
Hiroshi Yamauchi | 4fb5df8 | 2014-03-13 15:10:27 -0700 | [diff] [blame] | 411 | std::string error_msg; |
Mathieu Chartier | 16d29f8 | 2015-11-10 10:32:52 -0800 | [diff] [blame] | 412 | // Find a valid address. |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame] | 413 | uint8_t* valid_address = GetValidMapAddress(kPageSize, /*low_4gb=*/false); |
Hiroshi Yamauchi | 4fb5df8 | 2014-03-13 15:10:27 -0700 | [diff] [blame] | 414 | // Map at an address that should work, which should succeed. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 415 | MemMap map0 = MemMap::MapAnonymous("MapAnonymous0", |
| 416 | valid_address, |
| 417 | kPageSize, |
| 418 | PROT_READ | PROT_WRITE, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 419 | /*low_4gb=*/ false, |
| 420 | /*reuse=*/ false, |
| 421 | /*reservation=*/ nullptr, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 422 | &error_msg); |
| 423 | ASSERT_TRUE(map0.IsValid()) << error_msg; |
Hiroshi Yamauchi | 4fb5df8 | 2014-03-13 15:10:27 -0700 | [diff] [blame] | 424 | ASSERT_TRUE(error_msg.empty()); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 425 | ASSERT_TRUE(map0.BaseBegin() == valid_address); |
Hiroshi Yamauchi | 4fb5df8 | 2014-03-13 15:10:27 -0700 | [diff] [blame] | 426 | // Map at an unspecified address, which should succeed. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 427 | MemMap map1 = MemMap::MapAnonymous("MapAnonymous1", |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 428 | kPageSize, |
| 429 | PROT_READ | PROT_WRITE, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 430 | /*low_4gb=*/ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 431 | &error_msg); |
| 432 | ASSERT_TRUE(map1.IsValid()) << error_msg; |
Hiroshi Yamauchi | 4fb5df8 | 2014-03-13 15:10:27 -0700 | [diff] [blame] | 433 | ASSERT_TRUE(error_msg.empty()); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 434 | ASSERT_TRUE(map1.BaseBegin() != nullptr); |
Hiroshi Yamauchi | 4fb5df8 | 2014-03-13 15:10:27 -0700 | [diff] [blame] | 435 | // Attempt to map at the same address, which should fail. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 436 | MemMap map2 = MemMap::MapAnonymous("MapAnonymous2", |
| 437 | reinterpret_cast<uint8_t*>(map1.BaseBegin()), |
| 438 | kPageSize, |
| 439 | PROT_READ | PROT_WRITE, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 440 | /*low_4gb=*/ false, |
| 441 | /*reuse=*/ false, |
| 442 | /*reservation=*/ nullptr, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 443 | &error_msg); |
| 444 | ASSERT_FALSE(map2.IsValid()) << error_msg; |
Hiroshi Yamauchi | 4fb5df8 | 2014-03-13 15:10:27 -0700 | [diff] [blame] | 445 | ASSERT_TRUE(!error_msg.empty()); |
| 446 | } |
| 447 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 448 | TEST_F(MemMapTest, RemapAtEnd) { |
| 449 | RemapAtEndTest(false); |
| 450 | } |
| 451 | |
| 452 | #ifdef __LP64__ |
| 453 | TEST_F(MemMapTest, RemapAtEnd32bit) { |
| 454 | RemapAtEndTest(true); |
| 455 | } |
| 456 | #endif |
Hiroshi Yamauchi | fd7e7f1 | 2013-10-22 14:17:48 -0700 | [diff] [blame] | 457 | |
Orion Hodson | 1d3fd08 | 2018-09-28 09:38:35 +0100 | [diff] [blame] | 458 | TEST_F(MemMapTest, RemapFileViewAtEnd) { |
| 459 | CommonInit(); |
| 460 | std::string error_msg; |
| 461 | ScratchFile scratch_file; |
| 462 | |
| 463 | // Create a scratch file 3 pages large. |
| 464 | constexpr size_t kMapSize = 3 * kPageSize; |
| 465 | std::unique_ptr<uint8_t[]> data(new uint8_t[kMapSize]()); |
| 466 | memset(data.get(), 1, kPageSize); |
| 467 | memset(&data[0], 0x55, kPageSize); |
| 468 | memset(&data[kPageSize], 0x5a, kPageSize); |
| 469 | memset(&data[2 * kPageSize], 0xaa, kPageSize); |
| 470 | ASSERT_TRUE(scratch_file.GetFile()->WriteFully(&data[0], kMapSize)); |
| 471 | |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame] | 472 | MemMap map = MemMap::MapFile(/*byte_count=*/kMapSize, |
Orion Hodson | 1d3fd08 | 2018-09-28 09:38:35 +0100 | [diff] [blame] | 473 | PROT_READ, |
| 474 | MAP_PRIVATE, |
| 475 | scratch_file.GetFd(), |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame] | 476 | /*start=*/0, |
| 477 | /*low_4gb=*/true, |
Orion Hodson | 1d3fd08 | 2018-09-28 09:38:35 +0100 | [diff] [blame] | 478 | scratch_file.GetFilename().c_str(), |
| 479 | &error_msg); |
| 480 | ASSERT_TRUE(map.IsValid()) << error_msg; |
| 481 | ASSERT_TRUE(error_msg.empty()); |
| 482 | ASSERT_EQ(map.Size(), kMapSize); |
| 483 | ASSERT_LT(reinterpret_cast<uintptr_t>(map.BaseBegin()), 1ULL << 32); |
| 484 | ASSERT_EQ(data[0], *map.Begin()); |
| 485 | ASSERT_EQ(data[kPageSize], *(map.Begin() + kPageSize)); |
| 486 | ASSERT_EQ(data[2 * kPageSize], *(map.Begin() + 2 * kPageSize)); |
| 487 | |
| 488 | for (size_t offset = 2 * kPageSize; offset > 0; offset -= kPageSize) { |
| 489 | MemMap tail = map.RemapAtEnd(map.Begin() + offset, |
| 490 | "bad_offset_map", |
| 491 | PROT_READ, |
| 492 | MAP_PRIVATE | MAP_FIXED, |
| 493 | scratch_file.GetFd(), |
| 494 | offset, |
| 495 | &error_msg); |
| 496 | ASSERT_TRUE(tail.IsValid()) << error_msg; |
| 497 | ASSERT_TRUE(error_msg.empty()); |
| 498 | ASSERT_EQ(offset, map.Size()); |
| 499 | ASSERT_EQ(static_cast<size_t>(kPageSize), tail.Size()); |
| 500 | ASSERT_EQ(tail.Begin(), map.Begin() + map.Size()); |
| 501 | ASSERT_EQ(data[offset], *tail.Begin()); |
| 502 | } |
| 503 | } |
| 504 | |
Qiming Shi | 84d49cc | 2014-04-24 15:38:41 +0800 | [diff] [blame] | 505 | TEST_F(MemMapTest, MapAnonymousExactAddr32bitHighAddr) { |
Roland Levillain | 0b0d3b4 | 2018-06-14 13:55:49 +0100 | [diff] [blame] | 506 | // This test does not work under AddressSanitizer. |
| 507 | // Historical note: This test did not work under Valgrind either. |
Roland Levillain | 05e34f4 | 2018-05-24 13:19:05 +0000 | [diff] [blame] | 508 | TEST_DISABLED_FOR_MEMORY_TOOL(); |
| 509 | |
Mathieu Chartier | 6e88ef6 | 2014-10-14 15:01:24 -0700 | [diff] [blame] | 510 | CommonInit(); |
Roland Levillain | 05e34f4 | 2018-05-24 13:19:05 +0000 | [diff] [blame] | 511 | constexpr size_t size = 0x100000; |
| 512 | // Try all addresses starting from 2GB to 4GB. |
| 513 | size_t start_addr = 2 * GB; |
| 514 | std::string error_msg; |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 515 | MemMap map; |
Roland Levillain | 05e34f4 | 2018-05-24 13:19:05 +0000 | [diff] [blame] | 516 | for (; start_addr <= std::numeric_limits<uint32_t>::max() - size; start_addr += size) { |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 517 | map = MemMap::MapAnonymous("MapAnonymousExactAddr32bitHighAddr", |
| 518 | reinterpret_cast<uint8_t*>(start_addr), |
| 519 | size, |
| 520 | PROT_READ | PROT_WRITE, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame] | 521 | /*low_4gb=*/ true, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 522 | /*reuse=*/ false, |
| 523 | /*reservation=*/ nullptr, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 524 | &error_msg); |
| 525 | if (map.IsValid()) { |
Roland Levillain | 05e34f4 | 2018-05-24 13:19:05 +0000 | [diff] [blame] | 526 | break; |
Mathieu Chartier | 16d29f8 | 2015-11-10 10:32:52 -0800 | [diff] [blame] | 527 | } |
Andreas Gampe | 928f72b | 2014-09-09 19:53:48 -0700 | [diff] [blame] | 528 | } |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 529 | ASSERT_TRUE(map.IsValid()) << error_msg; |
| 530 | ASSERT_GE(reinterpret_cast<uintptr_t>(map.End()), 2u * GB); |
Roland Levillain | 05e34f4 | 2018-05-24 13:19:05 +0000 | [diff] [blame] | 531 | ASSERT_TRUE(error_msg.empty()); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 532 | ASSERT_EQ(map.BaseBegin(), reinterpret_cast<void*>(start_addr)); |
Qiming Shi | 84d49cc | 2014-04-24 15:38:41 +0800 | [diff] [blame] | 533 | } |
| 534 | |
| 535 | TEST_F(MemMapTest, MapAnonymousOverflow) { |
Mathieu Chartier | 6e88ef6 | 2014-10-14 15:01:24 -0700 | [diff] [blame] | 536 | CommonInit(); |
Qiming Shi | 84d49cc | 2014-04-24 15:38:41 +0800 | [diff] [blame] | 537 | std::string error_msg; |
| 538 | uintptr_t ptr = 0; |
| 539 | ptr -= kPageSize; // Now it's close to the top. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 540 | MemMap map = MemMap::MapAnonymous("MapAnonymousOverflow", |
| 541 | reinterpret_cast<uint8_t*>(ptr), |
| 542 | 2 * kPageSize, // brings it over the top. |
| 543 | PROT_READ | PROT_WRITE, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 544 | /*low_4gb=*/ false, |
| 545 | /*reuse=*/ false, |
| 546 | /*reservation=*/ nullptr, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 547 | &error_msg); |
| 548 | ASSERT_FALSE(map.IsValid()); |
Qiming Shi | 84d49cc | 2014-04-24 15:38:41 +0800 | [diff] [blame] | 549 | ASSERT_FALSE(error_msg.empty()); |
| 550 | } |
| 551 | |
| 552 | #ifdef __LP64__ |
| 553 | TEST_F(MemMapTest, MapAnonymousLow4GBExpectedTooHigh) { |
Mathieu Chartier | 6e88ef6 | 2014-10-14 15:01:24 -0700 | [diff] [blame] | 554 | CommonInit(); |
Qiming Shi | 84d49cc | 2014-04-24 15:38:41 +0800 | [diff] [blame] | 555 | std::string error_msg; |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 556 | MemMap map = |
Vladimir Marko | 5c42c29 | 2015-02-25 12:02:49 +0000 | [diff] [blame] | 557 | MemMap::MapAnonymous("MapAnonymousLow4GBExpectedTooHigh", |
| 558 | reinterpret_cast<uint8_t*>(UINT64_C(0x100000000)), |
| 559 | kPageSize, |
| 560 | PROT_READ | PROT_WRITE, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 561 | /*low_4gb=*/ true, |
| 562 | /*reuse=*/ false, |
| 563 | /*reservation=*/ nullptr, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 564 | &error_msg); |
| 565 | ASSERT_FALSE(map.IsValid()); |
Qiming Shi | 84d49cc | 2014-04-24 15:38:41 +0800 | [diff] [blame] | 566 | ASSERT_FALSE(error_msg.empty()); |
| 567 | } |
| 568 | |
| 569 | TEST_F(MemMapTest, MapAnonymousLow4GBRangeTooHigh) { |
Mathieu Chartier | 6e88ef6 | 2014-10-14 15:01:24 -0700 | [diff] [blame] | 570 | CommonInit(); |
Qiming Shi | 84d49cc | 2014-04-24 15:38:41 +0800 | [diff] [blame] | 571 | std::string error_msg; |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 572 | MemMap map = MemMap::MapAnonymous("MapAnonymousLow4GBRangeTooHigh", |
Vladimir Marko | 830f356 | 2018-10-31 12:58:44 +0000 | [diff] [blame] | 573 | /*addr=*/ reinterpret_cast<uint8_t*>(0xF0000000), |
| 574 | /*byte_count=*/ 0x20000000, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 575 | PROT_READ | PROT_WRITE, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 576 | /*low_4gb=*/ true, |
| 577 | /*reuse=*/ false, |
| 578 | /*reservation=*/ nullptr, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 579 | &error_msg); |
| 580 | ASSERT_FALSE(map.IsValid()); |
Qiming Shi | 84d49cc | 2014-04-24 15:38:41 +0800 | [diff] [blame] | 581 | ASSERT_FALSE(error_msg.empty()); |
| 582 | } |
| 583 | #endif |
| 584 | |
Vladimir Marko | 5c42c29 | 2015-02-25 12:02:49 +0000 | [diff] [blame] | 585 | TEST_F(MemMapTest, MapAnonymousReuse) { |
| 586 | CommonInit(); |
| 587 | std::string error_msg; |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 588 | MemMap map = MemMap::MapAnonymous("MapAnonymousReserve", |
Vladimir Marko | 830f356 | 2018-10-31 12:58:44 +0000 | [diff] [blame] | 589 | /*byte_count=*/ 0x20000, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 590 | PROT_READ | PROT_WRITE, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 591 | /*low_4gb=*/ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 592 | &error_msg); |
| 593 | ASSERT_TRUE(map.IsValid()); |
Vladimir Marko | 5c42c29 | 2015-02-25 12:02:49 +0000 | [diff] [blame] | 594 | ASSERT_TRUE(error_msg.empty()); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 595 | MemMap map2 = MemMap::MapAnonymous("MapAnonymousReused", |
Vladimir Marko | 830f356 | 2018-10-31 12:58:44 +0000 | [diff] [blame] | 596 | /*addr=*/ reinterpret_cast<uint8_t*>(map.BaseBegin()), |
| 597 | /*byte_count=*/ 0x10000, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 598 | PROT_READ | PROT_WRITE, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 599 | /*low_4gb=*/ false, |
| 600 | /*reuse=*/ true, |
| 601 | /*reservation=*/ nullptr, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 602 | &error_msg); |
| 603 | ASSERT_TRUE(map2.IsValid()); |
Vladimir Marko | 5c42c29 | 2015-02-25 12:02:49 +0000 | [diff] [blame] | 604 | ASSERT_TRUE(error_msg.empty()); |
| 605 | } |
| 606 | |
Hiroshi Yamauchi | 3eed93d | 2014-06-04 11:43:59 -0700 | [diff] [blame] | 607 | TEST_F(MemMapTest, CheckNoGaps) { |
Mathieu Chartier | 6e88ef6 | 2014-10-14 15:01:24 -0700 | [diff] [blame] | 608 | CommonInit(); |
Hiroshi Yamauchi | 3eed93d | 2014-06-04 11:43:59 -0700 | [diff] [blame] | 609 | std::string error_msg; |
| 610 | constexpr size_t kNumPages = 3; |
| 611 | // Map a 3-page mem map. |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 612 | MemMap reservation = MemMap::MapAnonymous("MapAnonymous0", |
| 613 | kPageSize * kNumPages, |
| 614 | PROT_READ | PROT_WRITE, |
| 615 | /*low_4gb=*/ false, |
| 616 | &error_msg); |
| 617 | ASSERT_TRUE(reservation.IsValid()) << error_msg; |
Hiroshi Yamauchi | 3eed93d | 2014-06-04 11:43:59 -0700 | [diff] [blame] | 618 | ASSERT_TRUE(error_msg.empty()); |
| 619 | // Record the base address. |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 620 | uint8_t* map_base = reinterpret_cast<uint8_t*>(reservation.BaseBegin()); |
Hiroshi Yamauchi | 3eed93d | 2014-06-04 11:43:59 -0700 | [diff] [blame] | 621 | |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 622 | // Map at the same address, taking from the `map` reservation. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 623 | MemMap map0 = MemMap::MapAnonymous("MapAnonymous0", |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 624 | kPageSize, |
| 625 | PROT_READ | PROT_WRITE, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 626 | /*low_4gb=*/ false, |
| 627 | &reservation, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 628 | &error_msg); |
| 629 | ASSERT_TRUE(map0.IsValid()) << error_msg; |
Hiroshi Yamauchi | 3eed93d | 2014-06-04 11:43:59 -0700 | [diff] [blame] | 630 | ASSERT_TRUE(error_msg.empty()); |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 631 | ASSERT_EQ(map_base, map0.Begin()); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 632 | MemMap map1 = MemMap::MapAnonymous("MapAnonymous1", |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 633 | kPageSize, |
| 634 | PROT_READ | PROT_WRITE, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 635 | /*low_4gb=*/ false, |
| 636 | &reservation, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 637 | &error_msg); |
| 638 | ASSERT_TRUE(map1.IsValid()) << error_msg; |
Hiroshi Yamauchi | 3eed93d | 2014-06-04 11:43:59 -0700 | [diff] [blame] | 639 | ASSERT_TRUE(error_msg.empty()); |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 640 | ASSERT_EQ(map_base + kPageSize, map1.Begin()); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 641 | MemMap map2 = MemMap::MapAnonymous("MapAnonymous2", |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 642 | kPageSize, |
| 643 | PROT_READ | PROT_WRITE, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 644 | /*low_4gb=*/ false, |
| 645 | &reservation, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 646 | &error_msg); |
| 647 | ASSERT_TRUE(map2.IsValid()) << error_msg; |
Hiroshi Yamauchi | 3eed93d | 2014-06-04 11:43:59 -0700 | [diff] [blame] | 648 | ASSERT_TRUE(error_msg.empty()); |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 649 | ASSERT_EQ(map_base + 2 * kPageSize, map2.Begin()); |
| 650 | ASSERT_FALSE(reservation.IsValid()); // The entire reservation was used. |
Hiroshi Yamauchi | 3eed93d | 2014-06-04 11:43:59 -0700 | [diff] [blame] | 651 | |
| 652 | // One-map cases. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 653 | ASSERT_TRUE(MemMap::CheckNoGaps(map0, map0)); |
| 654 | ASSERT_TRUE(MemMap::CheckNoGaps(map1, map1)); |
| 655 | ASSERT_TRUE(MemMap::CheckNoGaps(map2, map2)); |
Hiroshi Yamauchi | 3eed93d | 2014-06-04 11:43:59 -0700 | [diff] [blame] | 656 | |
| 657 | // Two or three-map cases. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 658 | ASSERT_TRUE(MemMap::CheckNoGaps(map0, map1)); |
| 659 | ASSERT_TRUE(MemMap::CheckNoGaps(map1, map2)); |
| 660 | ASSERT_TRUE(MemMap::CheckNoGaps(map0, map2)); |
Hiroshi Yamauchi | 3eed93d | 2014-06-04 11:43:59 -0700 | [diff] [blame] | 661 | |
| 662 | // Unmap the middle one. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 663 | map1.Reset(); |
Hiroshi Yamauchi | 3eed93d | 2014-06-04 11:43:59 -0700 | [diff] [blame] | 664 | |
| 665 | // Should return false now that there's a gap in the middle. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 666 | ASSERT_FALSE(MemMap::CheckNoGaps(map0, map2)); |
Hiroshi Yamauchi | 3eed93d | 2014-06-04 11:43:59 -0700 | [diff] [blame] | 667 | } |
| 668 | |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 669 | TEST_F(MemMapTest, AlignBy) { |
| 670 | CommonInit(); |
| 671 | std::string error_msg; |
| 672 | // Cast the page size to size_t. |
| 673 | const size_t page_size = static_cast<size_t>(kPageSize); |
| 674 | // Map a region. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 675 | MemMap m0 = MemMap::MapAnonymous("MemMapTest_AlignByTest_map0", |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 676 | 14 * page_size, |
| 677 | PROT_READ | PROT_WRITE, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 678 | /*low_4gb=*/ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 679 | &error_msg); |
| 680 | ASSERT_TRUE(m0.IsValid()); |
| 681 | uint8_t* base0 = m0.Begin(); |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 682 | ASSERT_TRUE(base0 != nullptr) << error_msg; |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 683 | ASSERT_EQ(m0.Size(), 14 * page_size); |
| 684 | ASSERT_EQ(m0.BaseBegin(), base0); |
| 685 | ASSERT_EQ(m0.BaseSize(), m0.Size()); |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 686 | |
| 687 | // Break it into several regions by using RemapAtEnd. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 688 | MemMap m1 = m0.RemapAtEnd(base0 + 3 * page_size, |
| 689 | "MemMapTest_AlignByTest_map1", |
| 690 | PROT_READ | PROT_WRITE, |
| 691 | &error_msg); |
| 692 | uint8_t* base1 = m1.Begin(); |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 693 | ASSERT_TRUE(base1 != nullptr) << error_msg; |
| 694 | ASSERT_EQ(base1, base0 + 3 * page_size); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 695 | ASSERT_EQ(m0.Size(), 3 * page_size); |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 696 | |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 697 | MemMap m2 = m1.RemapAtEnd(base1 + 4 * page_size, |
| 698 | "MemMapTest_AlignByTest_map2", |
| 699 | PROT_READ | PROT_WRITE, |
| 700 | &error_msg); |
| 701 | uint8_t* base2 = m2.Begin(); |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 702 | ASSERT_TRUE(base2 != nullptr) << error_msg; |
| 703 | ASSERT_EQ(base2, base1 + 4 * page_size); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 704 | ASSERT_EQ(m1.Size(), 4 * page_size); |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 705 | |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 706 | MemMap m3 = m2.RemapAtEnd(base2 + 3 * page_size, |
| 707 | "MemMapTest_AlignByTest_map1", |
| 708 | PROT_READ | PROT_WRITE, |
| 709 | &error_msg); |
| 710 | uint8_t* base3 = m3.Begin(); |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 711 | ASSERT_TRUE(base3 != nullptr) << error_msg; |
| 712 | ASSERT_EQ(base3, base2 + 3 * page_size); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 713 | ASSERT_EQ(m2.Size(), 3 * page_size); |
| 714 | ASSERT_EQ(m3.Size(), 4 * page_size); |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 715 | |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 716 | uint8_t* end0 = base0 + m0.Size(); |
| 717 | uint8_t* end1 = base1 + m1.Size(); |
| 718 | uint8_t* end2 = base2 + m2.Size(); |
| 719 | uint8_t* end3 = base3 + m3.Size(); |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 720 | |
| 721 | ASSERT_EQ(static_cast<size_t>(end3 - base0), 14 * page_size); |
| 722 | |
| 723 | if (IsAlignedParam(base0, 2 * page_size)) { |
| 724 | ASSERT_FALSE(IsAlignedParam(base1, 2 * page_size)); |
| 725 | ASSERT_FALSE(IsAlignedParam(base2, 2 * page_size)); |
| 726 | ASSERT_TRUE(IsAlignedParam(base3, 2 * page_size)); |
| 727 | ASSERT_TRUE(IsAlignedParam(end3, 2 * page_size)); |
| 728 | } else { |
| 729 | ASSERT_TRUE(IsAlignedParam(base1, 2 * page_size)); |
| 730 | ASSERT_TRUE(IsAlignedParam(base2, 2 * page_size)); |
| 731 | ASSERT_FALSE(IsAlignedParam(base3, 2 * page_size)); |
| 732 | ASSERT_FALSE(IsAlignedParam(end3, 2 * page_size)); |
| 733 | } |
| 734 | |
| 735 | // Align by 2 * page_size; |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 736 | m0.AlignBy(2 * page_size); |
| 737 | m1.AlignBy(2 * page_size); |
| 738 | m2.AlignBy(2 * page_size); |
| 739 | m3.AlignBy(2 * page_size); |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 740 | |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 741 | EXPECT_TRUE(IsAlignedParam(m0.Begin(), 2 * page_size)); |
| 742 | EXPECT_TRUE(IsAlignedParam(m1.Begin(), 2 * page_size)); |
| 743 | EXPECT_TRUE(IsAlignedParam(m2.Begin(), 2 * page_size)); |
| 744 | EXPECT_TRUE(IsAlignedParam(m3.Begin(), 2 * page_size)); |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 745 | |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 746 | EXPECT_TRUE(IsAlignedParam(m0.Begin() + m0.Size(), 2 * page_size)); |
| 747 | EXPECT_TRUE(IsAlignedParam(m1.Begin() + m1.Size(), 2 * page_size)); |
| 748 | EXPECT_TRUE(IsAlignedParam(m2.Begin() + m2.Size(), 2 * page_size)); |
| 749 | EXPECT_TRUE(IsAlignedParam(m3.Begin() + m3.Size(), 2 * page_size)); |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 750 | |
| 751 | if (IsAlignedParam(base0, 2 * page_size)) { |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 752 | EXPECT_EQ(m0.Begin(), base0); |
| 753 | EXPECT_EQ(m0.Begin() + m0.Size(), end0 - page_size); |
| 754 | EXPECT_EQ(m1.Begin(), base1 + page_size); |
| 755 | EXPECT_EQ(m1.Begin() + m1.Size(), end1 - page_size); |
| 756 | EXPECT_EQ(m2.Begin(), base2 + page_size); |
| 757 | EXPECT_EQ(m2.Begin() + m2.Size(), end2); |
| 758 | EXPECT_EQ(m3.Begin(), base3); |
| 759 | EXPECT_EQ(m3.Begin() + m3.Size(), end3); |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 760 | } else { |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 761 | EXPECT_EQ(m0.Begin(), base0 + page_size); |
| 762 | EXPECT_EQ(m0.Begin() + m0.Size(), end0); |
| 763 | EXPECT_EQ(m1.Begin(), base1); |
| 764 | EXPECT_EQ(m1.Begin() + m1.Size(), end1); |
| 765 | EXPECT_EQ(m2.Begin(), base2); |
| 766 | EXPECT_EQ(m2.Begin() + m2.Size(), end2 - page_size); |
| 767 | EXPECT_EQ(m3.Begin(), base3 + page_size); |
| 768 | EXPECT_EQ(m3.Begin() + m3.Size(), end3 - page_size); |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 769 | } |
| 770 | } |
| 771 | |
Vladimir Marko | c09cd05 | 2018-08-23 16:36:36 +0100 | [diff] [blame] | 772 | TEST_F(MemMapTest, Reservation) { |
| 773 | CommonInit(); |
| 774 | std::string error_msg; |
| 775 | ScratchFile scratch_file; |
| 776 | constexpr size_t kMapSize = 5 * kPageSize; |
| 777 | std::unique_ptr<uint8_t[]> data(new uint8_t[kMapSize]()); |
| 778 | ASSERT_TRUE(scratch_file.GetFile()->WriteFully(&data[0], kMapSize)); |
| 779 | |
| 780 | MemMap reservation = MemMap::MapAnonymous("Test reservation", |
Vladimir Marko | c09cd05 | 2018-08-23 16:36:36 +0100 | [diff] [blame] | 781 | kMapSize, |
| 782 | PROT_NONE, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 783 | /*low_4gb=*/ false, |
Vladimir Marko | c09cd05 | 2018-08-23 16:36:36 +0100 | [diff] [blame] | 784 | &error_msg); |
| 785 | ASSERT_TRUE(reservation.IsValid()); |
| 786 | ASSERT_TRUE(error_msg.empty()); |
| 787 | |
| 788 | // Map first part of the reservation. |
| 789 | constexpr size_t kChunk1Size = kPageSize - 1u; |
| 790 | static_assert(kChunk1Size < kMapSize, "We want to split the reservation."); |
| 791 | uint8_t* addr1 = reservation.Begin(); |
| 792 | MemMap map1 = MemMap::MapFileAtAddress(addr1, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 793 | /*byte_count=*/ kChunk1Size, |
Vladimir Marko | c09cd05 | 2018-08-23 16:36:36 +0100 | [diff] [blame] | 794 | PROT_READ, |
| 795 | MAP_PRIVATE, |
| 796 | scratch_file.GetFd(), |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 797 | /*start=*/ 0, |
| 798 | /*low_4gb=*/ false, |
Vladimir Marko | c09cd05 | 2018-08-23 16:36:36 +0100 | [diff] [blame] | 799 | scratch_file.GetFilename().c_str(), |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 800 | /*reuse=*/ false, |
Vladimir Marko | c09cd05 | 2018-08-23 16:36:36 +0100 | [diff] [blame] | 801 | &reservation, |
| 802 | &error_msg); |
| 803 | ASSERT_TRUE(map1.IsValid()) << error_msg; |
| 804 | ASSERT_TRUE(error_msg.empty()); |
| 805 | ASSERT_EQ(map1.Size(), kChunk1Size); |
| 806 | ASSERT_EQ(addr1, map1.Begin()); |
| 807 | ASSERT_TRUE(reservation.IsValid()); |
| 808 | // Entire pages are taken from the `reservation`. |
| 809 | ASSERT_LT(map1.End(), map1.BaseEnd()); |
| 810 | ASSERT_EQ(map1.BaseEnd(), reservation.Begin()); |
| 811 | |
| 812 | // Map second part as an anonymous mapping. |
| 813 | constexpr size_t kChunk2Size = 2 * kPageSize; |
| 814 | DCHECK_LT(kChunk2Size, reservation.Size()); // We want to split the reservation. |
| 815 | uint8_t* addr2 = reservation.Begin(); |
| 816 | MemMap map2 = MemMap::MapAnonymous("MiddleReservation", |
| 817 | addr2, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 818 | /*byte_count=*/ kChunk2Size, |
Vladimir Marko | c09cd05 | 2018-08-23 16:36:36 +0100 | [diff] [blame] | 819 | PROT_READ, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 820 | /*low_4gb=*/ false, |
| 821 | /*reuse=*/ false, |
Vladimir Marko | c09cd05 | 2018-08-23 16:36:36 +0100 | [diff] [blame] | 822 | &reservation, |
| 823 | &error_msg); |
| 824 | ASSERT_TRUE(map2.IsValid()) << error_msg; |
| 825 | ASSERT_TRUE(error_msg.empty()); |
| 826 | ASSERT_EQ(map2.Size(), kChunk2Size); |
| 827 | ASSERT_EQ(addr2, map2.Begin()); |
| 828 | ASSERT_EQ(map2.End(), map2.BaseEnd()); // kChunk2Size is page aligned. |
| 829 | ASSERT_EQ(map2.BaseEnd(), reservation.Begin()); |
| 830 | |
| 831 | // Map the rest of the reservation except the last byte. |
| 832 | const size_t kChunk3Size = reservation.Size() - 1u; |
| 833 | uint8_t* addr3 = reservation.Begin(); |
| 834 | MemMap map3 = MemMap::MapFileAtAddress(addr3, |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 835 | /*byte_count=*/ kChunk3Size, |
Vladimir Marko | c09cd05 | 2018-08-23 16:36:36 +0100 | [diff] [blame] | 836 | PROT_READ, |
| 837 | MAP_PRIVATE, |
| 838 | scratch_file.GetFd(), |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 839 | /*start=*/ dchecked_integral_cast<size_t>(addr3 - addr1), |
| 840 | /*low_4gb=*/ false, |
Vladimir Marko | c09cd05 | 2018-08-23 16:36:36 +0100 | [diff] [blame] | 841 | scratch_file.GetFilename().c_str(), |
Vladimir Marko | 1130659 | 2018-10-26 14:22:59 +0100 | [diff] [blame] | 842 | /*reuse=*/ false, |
Vladimir Marko | c09cd05 | 2018-08-23 16:36:36 +0100 | [diff] [blame] | 843 | &reservation, |
| 844 | &error_msg); |
| 845 | ASSERT_TRUE(map3.IsValid()) << error_msg; |
| 846 | ASSERT_TRUE(error_msg.empty()); |
| 847 | ASSERT_EQ(map3.Size(), kChunk3Size); |
| 848 | ASSERT_EQ(addr3, map3.Begin()); |
| 849 | // Entire pages are taken from the `reservation`, so it's now exhausted. |
| 850 | ASSERT_FALSE(reservation.IsValid()); |
| 851 | |
| 852 | // Now split the MiddleReservation. |
| 853 | constexpr size_t kChunk2ASize = kPageSize - 1u; |
| 854 | DCHECK_LT(kChunk2ASize, map2.Size()); // We want to split the reservation. |
| 855 | MemMap map2a = map2.TakeReservedMemory(kChunk2ASize); |
| 856 | ASSERT_TRUE(map2a.IsValid()) << error_msg; |
| 857 | ASSERT_TRUE(error_msg.empty()); |
| 858 | ASSERT_EQ(map2a.Size(), kChunk2ASize); |
| 859 | ASSERT_EQ(addr2, map2a.Begin()); |
| 860 | ASSERT_TRUE(map2.IsValid()); |
| 861 | ASSERT_LT(map2a.End(), map2a.BaseEnd()); |
| 862 | ASSERT_EQ(map2a.BaseEnd(), map2.Begin()); |
| 863 | |
| 864 | // And take the rest of the middle reservation. |
| 865 | const size_t kChunk2BSize = map2.Size() - 1u; |
| 866 | uint8_t* addr2b = map2.Begin(); |
| 867 | MemMap map2b = map2.TakeReservedMemory(kChunk2BSize); |
| 868 | ASSERT_TRUE(map2b.IsValid()) << error_msg; |
| 869 | ASSERT_TRUE(error_msg.empty()); |
| 870 | ASSERT_EQ(map2b.Size(), kChunk2ASize); |
| 871 | ASSERT_EQ(addr2b, map2b.Begin()); |
| 872 | ASSERT_FALSE(map2.IsValid()); |
| 873 | } |
| 874 | |
Brian Carlstrom | 9004cb6 | 2013-07-26 15:48:31 -0700 | [diff] [blame] | 875 | } // namespace art |
Andreas Gampe | c857f4a | 2018-10-25 13:12:37 -0700 | [diff] [blame] | 876 | |
| 877 | namespace { |
| 878 | |
| 879 | class DumpMapsOnFailListener : public testing::EmptyTestEventListener { |
| 880 | void OnTestPartResult(const testing::TestPartResult& result) override { |
| 881 | switch (result.type()) { |
| 882 | case testing::TestPartResult::kFatalFailure: |
| 883 | art::PrintFileToLog("/proc/self/maps", android::base::LogSeverity::ERROR); |
| 884 | break; |
| 885 | |
| 886 | // TODO: Could consider logging on EXPECT failures. |
| 887 | case testing::TestPartResult::kNonFatalFailure: |
Elliott Hughes | e00648f | 2018-10-30 08:34:52 -0700 | [diff] [blame] | 888 | case testing::TestPartResult::kSkip: |
Andreas Gampe | c857f4a | 2018-10-25 13:12:37 -0700 | [diff] [blame] | 889 | case testing::TestPartResult::kSuccess: |
| 890 | break; |
| 891 | } |
| 892 | } |
| 893 | }; |
| 894 | |
| 895 | } // namespace |
| 896 | |
| 897 | // Inject our listener into the test runner. |
| 898 | extern "C" |
| 899 | __attribute__((visibility("default"))) __attribute__((used)) |
| 900 | void ArtTestGlobalInit() { |
Andreas Gampe | c857f4a | 2018-10-25 13:12:37 -0700 | [diff] [blame] | 901 | testing::UnitTest::GetInstance()->listeners().Append(new DumpMapsOnFailListener()); |
| 902 | } |