Brian Carlstrom | 9004cb6 | 2013-07-26 15:48:31 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2013 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "mem_map.h" |
| 18 | |
Andreas Gampe | d490129 | 2017-05-30 18:41:34 -0700 | [diff] [blame] | 19 | #include <sys/mman.h> |
| 20 | |
Ian Rogers | 700a402 | 2014-05-19 16:49:03 -0700 | [diff] [blame] | 21 | #include <memory> |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 22 | #include <random> |
Ian Rogers | 700a402 | 2014-05-19 16:49:03 -0700 | [diff] [blame] | 23 | |
David Sehr | d5f8de8 | 2018-04-27 14:12:03 -0700 | [diff] [blame] | 24 | #include "base/common_art_test.h" |
| 25 | #include "common_runtime_test.h" // For TEST_DISABLED_FOR_MIPS |
David Sehr | 1979c64 | 2018-04-26 14:41:18 -0700 | [diff] [blame] | 26 | #include "memory_tool.h" |
| 27 | #include "unix_file/fd_file.h" |
Brian Carlstrom | 9004cb6 | 2013-07-26 15:48:31 -0700 | [diff] [blame] | 28 | |
| 29 | namespace art { |
| 30 | |
David Sehr | d5f8de8 | 2018-04-27 14:12:03 -0700 | [diff] [blame] | 31 | class MemMapTest : public CommonArtTest { |
Hiroshi Yamauchi | fd7e7f1 | 2013-10-22 14:17:48 -0700 | [diff] [blame] | 32 | public: |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 33 | static bool IsAddressMapped(void* addr) { |
| 34 | bool res = msync(addr, 1, MS_SYNC) == 0; |
| 35 | if (!res && errno != ENOMEM) { |
| 36 | PLOG(FATAL) << "Unexpected error occurred on msync"; |
| 37 | } |
| 38 | return res; |
| 39 | } |
| 40 | |
| 41 | static std::vector<uint8_t> RandomData(size_t size) { |
| 42 | std::random_device rd; |
| 43 | std::uniform_int_distribution<uint8_t> dist; |
| 44 | std::vector<uint8_t> res; |
| 45 | res.resize(size); |
| 46 | for (size_t i = 0; i < size; i++) { |
| 47 | res[i] = dist(rd); |
| 48 | } |
| 49 | return res; |
| 50 | } |
| 51 | |
Mathieu Chartier | 16d29f8 | 2015-11-10 10:32:52 -0800 | [diff] [blame] | 52 | static uint8_t* GetValidMapAddress(size_t size, bool low_4gb) { |
| 53 | // Find a valid map address and unmap it before returning. |
| 54 | std::string error_msg; |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 55 | MemMap map = MemMap::MapAnonymous("temp", |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 56 | /* addr= */ nullptr, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 57 | size, |
| 58 | PROT_READ, |
| 59 | low_4gb, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 60 | &error_msg); |
| 61 | CHECK(map.IsValid()); |
| 62 | return map.Begin(); |
Mathieu Chartier | 16d29f8 | 2015-11-10 10:32:52 -0800 | [diff] [blame] | 63 | } |
| 64 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 65 | static void RemapAtEndTest(bool low_4gb) { |
| 66 | std::string error_msg; |
| 67 | // Cast the page size to size_t. |
| 68 | const size_t page_size = static_cast<size_t>(kPageSize); |
| 69 | // Map a two-page memory region. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 70 | MemMap m0 = MemMap::MapAnonymous("MemMapTest_RemapAtEndTest_map0", |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 71 | /* addr= */ nullptr, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 72 | 2 * page_size, |
| 73 | PROT_READ | PROT_WRITE, |
| 74 | low_4gb, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 75 | &error_msg); |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 76 | // Check its state and write to it. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 77 | ASSERT_TRUE(m0.IsValid()); |
| 78 | uint8_t* base0 = m0.Begin(); |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 79 | ASSERT_TRUE(base0 != nullptr) << error_msg; |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 80 | size_t size0 = m0.Size(); |
| 81 | EXPECT_EQ(m0.Size(), 2 * page_size); |
| 82 | EXPECT_EQ(m0.BaseBegin(), base0); |
| 83 | EXPECT_EQ(m0.BaseSize(), size0); |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 84 | memset(base0, 42, 2 * page_size); |
| 85 | // Remap the latter half into a second MemMap. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 86 | MemMap m1 = m0.RemapAtEnd(base0 + page_size, |
| 87 | "MemMapTest_RemapAtEndTest_map1", |
| 88 | PROT_READ | PROT_WRITE, |
| 89 | &error_msg); |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 90 | // Check the states of the two maps. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 91 | EXPECT_EQ(m0.Begin(), base0) << error_msg; |
| 92 | EXPECT_EQ(m0.Size(), page_size); |
| 93 | EXPECT_EQ(m0.BaseBegin(), base0); |
| 94 | EXPECT_EQ(m0.BaseSize(), page_size); |
| 95 | uint8_t* base1 = m1.Begin(); |
| 96 | size_t size1 = m1.Size(); |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 97 | EXPECT_EQ(base1, base0 + page_size); |
| 98 | EXPECT_EQ(size1, page_size); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 99 | EXPECT_EQ(m1.BaseBegin(), base1); |
| 100 | EXPECT_EQ(m1.BaseSize(), size1); |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 101 | // Write to the second region. |
| 102 | memset(base1, 43, page_size); |
| 103 | // Check the contents of the two regions. |
| 104 | for (size_t i = 0; i < page_size; ++i) { |
| 105 | EXPECT_EQ(base0[i], 42); |
| 106 | } |
| 107 | for (size_t i = 0; i < page_size; ++i) { |
| 108 | EXPECT_EQ(base1[i], 43); |
| 109 | } |
| 110 | // Unmap the first region. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 111 | m0.Reset(); |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 112 | // Make sure the second region is still accessible after the first |
| 113 | // region is unmapped. |
| 114 | for (size_t i = 0; i < page_size; ++i) { |
| 115 | EXPECT_EQ(base1[i], 43); |
| 116 | } |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 117 | MemMap m2 = m1.RemapAtEnd(m1.Begin(), |
| 118 | "MemMapTest_RemapAtEndTest_map1", |
| 119 | PROT_READ | PROT_WRITE, |
| 120 | &error_msg); |
| 121 | ASSERT_TRUE(m2.IsValid()) << error_msg; |
| 122 | ASSERT_FALSE(m1.IsValid()); |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 123 | } |
Andreas Gampe | d8f26db | 2014-05-19 17:01:13 -0700 | [diff] [blame] | 124 | |
Mathieu Chartier | 6e88ef6 | 2014-10-14 15:01:24 -0700 | [diff] [blame] | 125 | void CommonInit() { |
| 126 | MemMap::Init(); |
| 127 | } |
| 128 | |
Andreas Gampe | d8f26db | 2014-05-19 17:01:13 -0700 | [diff] [blame] | 129 | #if defined(__LP64__) && !defined(__x86_64__) |
| 130 | static uintptr_t GetLinearScanPos() { |
| 131 | return MemMap::next_mem_pos_; |
| 132 | } |
| 133 | #endif |
Hiroshi Yamauchi | fd7e7f1 | 2013-10-22 14:17:48 -0700 | [diff] [blame] | 134 | }; |
Brian Carlstrom | 9004cb6 | 2013-07-26 15:48:31 -0700 | [diff] [blame] | 135 | |
Andreas Gampe | d8f26db | 2014-05-19 17:01:13 -0700 | [diff] [blame] | 136 | #if defined(__LP64__) && !defined(__x86_64__) |
| 137 | |
| 138 | #ifdef __BIONIC__ |
| 139 | extern uintptr_t CreateStartPos(uint64_t input); |
| 140 | #endif |
| 141 | |
| 142 | TEST_F(MemMapTest, Start) { |
Mathieu Chartier | 6e88ef6 | 2014-10-14 15:01:24 -0700 | [diff] [blame] | 143 | CommonInit(); |
Andreas Gampe | d8f26db | 2014-05-19 17:01:13 -0700 | [diff] [blame] | 144 | uintptr_t start = GetLinearScanPos(); |
| 145 | EXPECT_LE(64 * KB, start); |
| 146 | EXPECT_LT(start, static_cast<uintptr_t>(ART_BASE_ADDRESS)); |
Andreas Gampe | d8f26db | 2014-05-19 17:01:13 -0700 | [diff] [blame] | 147 | #ifdef __BIONIC__ |
| 148 | // Test a couple of values. Make sure they are different. |
| 149 | uintptr_t last = 0; |
| 150 | for (size_t i = 0; i < 100; ++i) { |
| 151 | uintptr_t random_start = CreateStartPos(i * kPageSize); |
| 152 | EXPECT_NE(last, random_start); |
| 153 | last = random_start; |
| 154 | } |
| 155 | |
| 156 | // Even on max, should be below ART_BASE_ADDRESS. |
| 157 | EXPECT_LT(CreateStartPos(~0), static_cast<uintptr_t>(ART_BASE_ADDRESS)); |
| 158 | #endif |
| 159 | // End of test. |
| 160 | } |
| 161 | #endif |
| 162 | |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 163 | // We need mremap to be able to test ReplaceMapping at all |
| 164 | #if HAVE_MREMAP_SYSCALL |
| 165 | TEST_F(MemMapTest, ReplaceMapping_SameSize) { |
| 166 | std::string error_msg; |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 167 | MemMap dest = MemMap::MapAnonymous("MapAnonymousEmpty-atomic-replace-dest", |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 168 | /* addr= */ nullptr, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 169 | kPageSize, |
| 170 | PROT_READ, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 171 | /* low_4gb= */ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 172 | &error_msg); |
| 173 | ASSERT_TRUE(dest.IsValid()); |
| 174 | MemMap source = MemMap::MapAnonymous("MapAnonymous-atomic-replace-source", |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 175 | /* addr= */ nullptr, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 176 | kPageSize, |
| 177 | PROT_WRITE | PROT_READ, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 178 | /* low_4gb= */ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 179 | &error_msg); |
| 180 | ASSERT_TRUE(source.IsValid()); |
| 181 | void* source_addr = source.Begin(); |
| 182 | void* dest_addr = dest.Begin(); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 183 | ASSERT_TRUE(IsAddressMapped(source_addr)); |
| 184 | ASSERT_TRUE(IsAddressMapped(dest_addr)); |
| 185 | |
| 186 | std::vector<uint8_t> data = RandomData(kPageSize); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 187 | memcpy(source.Begin(), data.data(), data.size()); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 188 | |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 189 | ASSERT_TRUE(dest.ReplaceWith(&source, &error_msg)) << error_msg; |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 190 | |
| 191 | ASSERT_FALSE(IsAddressMapped(source_addr)); |
| 192 | ASSERT_TRUE(IsAddressMapped(dest_addr)); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 193 | ASSERT_FALSE(source.IsValid()); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 194 | |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 195 | ASSERT_EQ(dest.Size(), static_cast<size_t>(kPageSize)); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 196 | |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 197 | ASSERT_EQ(memcmp(dest.Begin(), data.data(), dest.Size()), 0); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 198 | } |
| 199 | |
| 200 | TEST_F(MemMapTest, ReplaceMapping_MakeLarger) { |
| 201 | std::string error_msg; |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 202 | MemMap dest = MemMap::MapAnonymous("MapAnonymousEmpty-atomic-replace-dest", |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 203 | /* addr= */ nullptr, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 204 | 5 * kPageSize, // Need to make it larger |
| 205 | // initially so we know |
| 206 | // there won't be mappings |
| 207 | // in the way we we move |
| 208 | // source. |
| 209 | PROT_READ, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 210 | /* low_4gb= */ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 211 | &error_msg); |
| 212 | ASSERT_TRUE(dest.IsValid()); |
| 213 | MemMap source = MemMap::MapAnonymous("MapAnonymous-atomic-replace-source", |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 214 | /* addr= */ nullptr, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 215 | 3 * kPageSize, |
| 216 | PROT_WRITE | PROT_READ, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 217 | /* low_4gb= */ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 218 | &error_msg); |
| 219 | ASSERT_TRUE(source.IsValid()); |
| 220 | uint8_t* source_addr = source.Begin(); |
| 221 | uint8_t* dest_addr = dest.Begin(); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 222 | ASSERT_TRUE(IsAddressMapped(source_addr)); |
| 223 | |
| 224 | // Fill the source with random data. |
| 225 | std::vector<uint8_t> data = RandomData(3 * kPageSize); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 226 | memcpy(source.Begin(), data.data(), data.size()); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 227 | |
| 228 | // Make the dest smaller so that we know we'll have space. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 229 | dest.SetSize(kPageSize); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 230 | |
| 231 | ASSERT_TRUE(IsAddressMapped(dest_addr)); |
| 232 | ASSERT_FALSE(IsAddressMapped(dest_addr + 2 * kPageSize)); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 233 | ASSERT_EQ(dest.Size(), static_cast<size_t>(kPageSize)); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 234 | |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 235 | ASSERT_TRUE(dest.ReplaceWith(&source, &error_msg)) << error_msg; |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 236 | |
| 237 | ASSERT_FALSE(IsAddressMapped(source_addr)); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 238 | ASSERT_EQ(dest.Size(), static_cast<size_t>(3 * kPageSize)); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 239 | ASSERT_TRUE(IsAddressMapped(dest_addr)); |
| 240 | ASSERT_TRUE(IsAddressMapped(dest_addr + 2 * kPageSize)); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 241 | ASSERT_FALSE(source.IsValid()); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 242 | |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 243 | ASSERT_EQ(memcmp(dest.Begin(), data.data(), dest.Size()), 0); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 244 | } |
| 245 | |
| 246 | TEST_F(MemMapTest, ReplaceMapping_MakeSmaller) { |
| 247 | std::string error_msg; |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 248 | MemMap dest = MemMap::MapAnonymous("MapAnonymousEmpty-atomic-replace-dest", |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 249 | /* addr= */ nullptr, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 250 | 3 * kPageSize, |
| 251 | PROT_READ, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 252 | /* low_4gb= */ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 253 | &error_msg); |
| 254 | ASSERT_TRUE(dest.IsValid()); |
| 255 | MemMap source = MemMap::MapAnonymous("MapAnonymous-atomic-replace-source", |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 256 | /* addr= */ nullptr, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 257 | kPageSize, |
| 258 | PROT_WRITE | PROT_READ, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 259 | /* low_4gb= */ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 260 | &error_msg); |
| 261 | ASSERT_TRUE(source.IsValid()); |
| 262 | uint8_t* source_addr = source.Begin(); |
| 263 | uint8_t* dest_addr = dest.Begin(); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 264 | ASSERT_TRUE(IsAddressMapped(source_addr)); |
| 265 | ASSERT_TRUE(IsAddressMapped(dest_addr)); |
| 266 | ASSERT_TRUE(IsAddressMapped(dest_addr + 2 * kPageSize)); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 267 | ASSERT_EQ(dest.Size(), static_cast<size_t>(3 * kPageSize)); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 268 | |
| 269 | std::vector<uint8_t> data = RandomData(kPageSize); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 270 | memcpy(source.Begin(), data.data(), kPageSize); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 271 | |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 272 | ASSERT_TRUE(dest.ReplaceWith(&source, &error_msg)) << error_msg; |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 273 | |
| 274 | ASSERT_FALSE(IsAddressMapped(source_addr)); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 275 | ASSERT_EQ(dest.Size(), static_cast<size_t>(kPageSize)); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 276 | ASSERT_TRUE(IsAddressMapped(dest_addr)); |
| 277 | ASSERT_FALSE(IsAddressMapped(dest_addr + 2 * kPageSize)); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 278 | ASSERT_FALSE(source.IsValid()); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 279 | |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 280 | ASSERT_EQ(memcmp(dest.Begin(), data.data(), dest.Size()), 0); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 281 | } |
| 282 | |
| 283 | TEST_F(MemMapTest, ReplaceMapping_FailureOverlap) { |
| 284 | std::string error_msg; |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 285 | MemMap dest = |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 286 | MemMap::MapAnonymous( |
| 287 | "MapAnonymousEmpty-atomic-replace-dest", |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 288 | /* addr= */ nullptr, |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 289 | 3 * kPageSize, // Need to make it larger initially so we know there won't be mappings in |
| 290 | // the way we we move source. |
| 291 | PROT_READ | PROT_WRITE, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 292 | /* low_4gb= */ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 293 | &error_msg); |
| 294 | ASSERT_TRUE(dest.IsValid()); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 295 | // Resize down to 1 page so we can remap the rest. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 296 | dest.SetSize(kPageSize); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 297 | // Create source from the last 2 pages |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 298 | MemMap source = MemMap::MapAnonymous("MapAnonymous-atomic-replace-source", |
| 299 | dest.Begin() + kPageSize, |
| 300 | 2 * kPageSize, |
| 301 | PROT_WRITE | PROT_READ, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 302 | /* low_4gb= */ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 303 | &error_msg); |
| 304 | ASSERT_TRUE(source.IsValid()); |
| 305 | ASSERT_EQ(dest.Begin() + kPageSize, source.Begin()); |
| 306 | uint8_t* source_addr = source.Begin(); |
| 307 | uint8_t* dest_addr = dest.Begin(); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 308 | ASSERT_TRUE(IsAddressMapped(source_addr)); |
| 309 | |
| 310 | // Fill the source and dest with random data. |
| 311 | std::vector<uint8_t> data = RandomData(2 * kPageSize); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 312 | memcpy(source.Begin(), data.data(), data.size()); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 313 | std::vector<uint8_t> dest_data = RandomData(kPageSize); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 314 | memcpy(dest.Begin(), dest_data.data(), dest_data.size()); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 315 | |
| 316 | ASSERT_TRUE(IsAddressMapped(dest_addr)); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 317 | ASSERT_EQ(dest.Size(), static_cast<size_t>(kPageSize)); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 318 | |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 319 | ASSERT_FALSE(dest.ReplaceWith(&source, &error_msg)) << error_msg; |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 320 | |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 321 | ASSERT_TRUE(IsAddressMapped(source_addr)); |
| 322 | ASSERT_TRUE(IsAddressMapped(dest_addr)); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 323 | ASSERT_EQ(source.Size(), data.size()); |
| 324 | ASSERT_EQ(dest.Size(), dest_data.size()); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 325 | |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 326 | ASSERT_EQ(memcmp(source.Begin(), data.data(), data.size()), 0); |
| 327 | ASSERT_EQ(memcmp(dest.Begin(), dest_data.data(), dest_data.size()), 0); |
Alex Light | ca97ada | 2018-02-02 09:25:31 -0800 | [diff] [blame] | 328 | } |
| 329 | #endif // HAVE_MREMAP_SYSCALL |
| 330 | |
Brian Carlstrom | 9004cb6 | 2013-07-26 15:48:31 -0700 | [diff] [blame] | 331 | TEST_F(MemMapTest, MapAnonymousEmpty) { |
Mathieu Chartier | 6e88ef6 | 2014-10-14 15:01:24 -0700 | [diff] [blame] | 332 | CommonInit(); |
Ian Rogers | 8d31bbd | 2013-10-13 10:44:14 -0700 | [diff] [blame] | 333 | std::string error_msg; |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 334 | MemMap map = MemMap::MapAnonymous("MapAnonymousEmpty", |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 335 | /* addr= */ nullptr, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 336 | 0, |
| 337 | PROT_READ, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 338 | /* low_4gb= */ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 339 | &error_msg); |
| 340 | ASSERT_FALSE(map.IsValid()) << error_msg; |
| 341 | ASSERT_FALSE(error_msg.empty()); |
| 342 | |
| 343 | error_msg.clear(); |
| 344 | map = MemMap::MapAnonymous("MapAnonymousNonEmpty", |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 345 | /* addr= */ nullptr, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 346 | kPageSize, |
| 347 | PROT_READ | PROT_WRITE, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 348 | /* low_4gb= */ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 349 | &error_msg); |
| 350 | ASSERT_TRUE(map.IsValid()) << error_msg; |
Ian Rogers | 8d31bbd | 2013-10-13 10:44:14 -0700 | [diff] [blame] | 351 | ASSERT_TRUE(error_msg.empty()); |
Brian Carlstrom | 9004cb6 | 2013-07-26 15:48:31 -0700 | [diff] [blame] | 352 | } |
| 353 | |
Mathieu Chartier | 486932a | 2016-02-24 10:09:23 -0800 | [diff] [blame] | 354 | TEST_F(MemMapTest, MapAnonymousFailNullError) { |
| 355 | CommonInit(); |
| 356 | // Test that we don't crash with a null error_str when mapping at an invalid location. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 357 | MemMap map = MemMap::MapAnonymous("MapAnonymousInvalid", |
| 358 | reinterpret_cast<uint8_t*>(kPageSize), |
| 359 | 0x20000, |
| 360 | PROT_READ | PROT_WRITE, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 361 | /* low_4gb= */ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 362 | nullptr); |
| 363 | ASSERT_FALSE(map.IsValid()); |
Mathieu Chartier | 486932a | 2016-02-24 10:09:23 -0800 | [diff] [blame] | 364 | } |
| 365 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 366 | #ifdef __LP64__ |
| 367 | TEST_F(MemMapTest, MapAnonymousEmpty32bit) { |
Mathieu Chartier | 6e88ef6 | 2014-10-14 15:01:24 -0700 | [diff] [blame] | 368 | CommonInit(); |
Hiroshi Yamauchi | fd7e7f1 | 2013-10-22 14:17:48 -0700 | [diff] [blame] | 369 | std::string error_msg; |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 370 | MemMap map = MemMap::MapAnonymous("MapAnonymousEmpty", |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 371 | /* addr= */ nullptr, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 372 | 0, |
| 373 | PROT_READ, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 374 | /* low_4gb= */ true, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 375 | &error_msg); |
| 376 | ASSERT_FALSE(map.IsValid()) << error_msg; |
| 377 | ASSERT_FALSE(error_msg.empty()); |
| 378 | |
| 379 | error_msg.clear(); |
| 380 | map = MemMap::MapAnonymous("MapAnonymousNonEmpty", |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 381 | /* addr= */ nullptr, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 382 | kPageSize, |
| 383 | PROT_READ | PROT_WRITE, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 384 | /* low_4gb= */ true, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 385 | &error_msg); |
| 386 | ASSERT_TRUE(map.IsValid()) << error_msg; |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 387 | ASSERT_TRUE(error_msg.empty()); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 388 | ASSERT_LT(reinterpret_cast<uintptr_t>(map.BaseBegin()), 1ULL << 32); |
Hiroshi Yamauchi | fd7e7f1 | 2013-10-22 14:17:48 -0700 | [diff] [blame] | 389 | } |
Mathieu Chartier | 42bddce | 2015-11-09 15:16:56 -0800 | [diff] [blame] | 390 | TEST_F(MemMapTest, MapFile32Bit) { |
| 391 | CommonInit(); |
| 392 | std::string error_msg; |
| 393 | ScratchFile scratch_file; |
| 394 | constexpr size_t kMapSize = kPageSize; |
| 395 | std::unique_ptr<uint8_t[]> data(new uint8_t[kMapSize]()); |
| 396 | ASSERT_TRUE(scratch_file.GetFile()->WriteFully(&data[0], kMapSize)); |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 397 | MemMap map = MemMap::MapFile(/*byte_count=*/kMapSize, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 398 | PROT_READ, |
| 399 | MAP_PRIVATE, |
| 400 | scratch_file.GetFd(), |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 401 | /*start=*/0, |
| 402 | /*low_4gb=*/true, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 403 | scratch_file.GetFilename().c_str(), |
| 404 | &error_msg); |
| 405 | ASSERT_TRUE(map.IsValid()) << error_msg; |
Mathieu Chartier | 42bddce | 2015-11-09 15:16:56 -0800 | [diff] [blame] | 406 | ASSERT_TRUE(error_msg.empty()); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 407 | ASSERT_EQ(map.Size(), kMapSize); |
| 408 | ASSERT_LT(reinterpret_cast<uintptr_t>(map.BaseBegin()), 1ULL << 32); |
Mathieu Chartier | 42bddce | 2015-11-09 15:16:56 -0800 | [diff] [blame] | 409 | } |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 410 | #endif |
| 411 | |
Hiroshi Yamauchi | 4fb5df8 | 2014-03-13 15:10:27 -0700 | [diff] [blame] | 412 | TEST_F(MemMapTest, MapAnonymousExactAddr) { |
Mathieu Chartier | 6e88ef6 | 2014-10-14 15:01:24 -0700 | [diff] [blame] | 413 | CommonInit(); |
Hiroshi Yamauchi | 4fb5df8 | 2014-03-13 15:10:27 -0700 | [diff] [blame] | 414 | std::string error_msg; |
Mathieu Chartier | 16d29f8 | 2015-11-10 10:32:52 -0800 | [diff] [blame] | 415 | // Find a valid address. |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 416 | uint8_t* valid_address = GetValidMapAddress(kPageSize, /*low_4gb=*/false); |
Hiroshi Yamauchi | 4fb5df8 | 2014-03-13 15:10:27 -0700 | [diff] [blame] | 417 | // Map at an address that should work, which should succeed. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 418 | MemMap map0 = MemMap::MapAnonymous("MapAnonymous0", |
| 419 | valid_address, |
| 420 | kPageSize, |
| 421 | PROT_READ | PROT_WRITE, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 422 | /* low_4gb= */ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 423 | &error_msg); |
| 424 | ASSERT_TRUE(map0.IsValid()) << error_msg; |
Hiroshi Yamauchi | 4fb5df8 | 2014-03-13 15:10:27 -0700 | [diff] [blame] | 425 | ASSERT_TRUE(error_msg.empty()); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 426 | ASSERT_TRUE(map0.BaseBegin() == valid_address); |
Hiroshi Yamauchi | 4fb5df8 | 2014-03-13 15:10:27 -0700 | [diff] [blame] | 427 | // Map at an unspecified address, which should succeed. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 428 | MemMap map1 = MemMap::MapAnonymous("MapAnonymous1", |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 429 | /* addr= */ nullptr, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 430 | kPageSize, |
| 431 | PROT_READ | PROT_WRITE, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 432 | /* low_4gb= */ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 433 | &error_msg); |
| 434 | ASSERT_TRUE(map1.IsValid()) << error_msg; |
Hiroshi Yamauchi | 4fb5df8 | 2014-03-13 15:10:27 -0700 | [diff] [blame] | 435 | ASSERT_TRUE(error_msg.empty()); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 436 | ASSERT_TRUE(map1.BaseBegin() != nullptr); |
Hiroshi Yamauchi | 4fb5df8 | 2014-03-13 15:10:27 -0700 | [diff] [blame] | 437 | // Attempt to map at the same address, which should fail. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 438 | MemMap map2 = MemMap::MapAnonymous("MapAnonymous2", |
| 439 | reinterpret_cast<uint8_t*>(map1.BaseBegin()), |
| 440 | kPageSize, |
| 441 | PROT_READ | PROT_WRITE, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 442 | /* low_4gb= */ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 443 | &error_msg); |
| 444 | ASSERT_FALSE(map2.IsValid()) << error_msg; |
Hiroshi Yamauchi | 4fb5df8 | 2014-03-13 15:10:27 -0700 | [diff] [blame] | 445 | ASSERT_TRUE(!error_msg.empty()); |
| 446 | } |
| 447 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 448 | TEST_F(MemMapTest, RemapAtEnd) { |
| 449 | RemapAtEndTest(false); |
| 450 | } |
| 451 | |
| 452 | #ifdef __LP64__ |
| 453 | TEST_F(MemMapTest, RemapAtEnd32bit) { |
| 454 | RemapAtEndTest(true); |
| 455 | } |
| 456 | #endif |
Hiroshi Yamauchi | fd7e7f1 | 2013-10-22 14:17:48 -0700 | [diff] [blame] | 457 | |
Orion Hodson | 1d3fd08 | 2018-09-28 09:38:35 +0100 | [diff] [blame] | 458 | TEST_F(MemMapTest, RemapFileViewAtEnd) { |
| 459 | CommonInit(); |
| 460 | std::string error_msg; |
| 461 | ScratchFile scratch_file; |
| 462 | |
| 463 | // Create a scratch file 3 pages large. |
| 464 | constexpr size_t kMapSize = 3 * kPageSize; |
| 465 | std::unique_ptr<uint8_t[]> data(new uint8_t[kMapSize]()); |
| 466 | memset(data.get(), 1, kPageSize); |
| 467 | memset(&data[0], 0x55, kPageSize); |
| 468 | memset(&data[kPageSize], 0x5a, kPageSize); |
| 469 | memset(&data[2 * kPageSize], 0xaa, kPageSize); |
| 470 | ASSERT_TRUE(scratch_file.GetFile()->WriteFully(&data[0], kMapSize)); |
| 471 | |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 472 | MemMap map = MemMap::MapFile(/*byte_count=*/kMapSize, |
Orion Hodson | 1d3fd08 | 2018-09-28 09:38:35 +0100 | [diff] [blame] | 473 | PROT_READ, |
| 474 | MAP_PRIVATE, |
| 475 | scratch_file.GetFd(), |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 476 | /*start=*/0, |
| 477 | /*low_4gb=*/true, |
Orion Hodson | 1d3fd08 | 2018-09-28 09:38:35 +0100 | [diff] [blame] | 478 | scratch_file.GetFilename().c_str(), |
| 479 | &error_msg); |
| 480 | ASSERT_TRUE(map.IsValid()) << error_msg; |
| 481 | ASSERT_TRUE(error_msg.empty()); |
| 482 | ASSERT_EQ(map.Size(), kMapSize); |
| 483 | ASSERT_LT(reinterpret_cast<uintptr_t>(map.BaseBegin()), 1ULL << 32); |
| 484 | ASSERT_EQ(data[0], *map.Begin()); |
| 485 | ASSERT_EQ(data[kPageSize], *(map.Begin() + kPageSize)); |
| 486 | ASSERT_EQ(data[2 * kPageSize], *(map.Begin() + 2 * kPageSize)); |
| 487 | |
| 488 | for (size_t offset = 2 * kPageSize; offset > 0; offset -= kPageSize) { |
| 489 | MemMap tail = map.RemapAtEnd(map.Begin() + offset, |
| 490 | "bad_offset_map", |
| 491 | PROT_READ, |
| 492 | MAP_PRIVATE | MAP_FIXED, |
| 493 | scratch_file.GetFd(), |
| 494 | offset, |
| 495 | &error_msg); |
| 496 | ASSERT_TRUE(tail.IsValid()) << error_msg; |
| 497 | ASSERT_TRUE(error_msg.empty()); |
| 498 | ASSERT_EQ(offset, map.Size()); |
| 499 | ASSERT_EQ(static_cast<size_t>(kPageSize), tail.Size()); |
| 500 | ASSERT_EQ(tail.Begin(), map.Begin() + map.Size()); |
| 501 | ASSERT_EQ(data[offset], *tail.Begin()); |
| 502 | } |
| 503 | } |
| 504 | |
Qiming Shi | 84d49cc | 2014-04-24 15:38:41 +0800 | [diff] [blame] | 505 | TEST_F(MemMapTest, MapAnonymousExactAddr32bitHighAddr) { |
Roland Levillain | 14306b8 | 2016-01-20 12:13:57 +0000 | [diff] [blame] | 506 | // Some MIPS32 hardware (namely the Creator Ci20 development board) |
| 507 | // cannot allocate in the 2GB-4GB region. |
| 508 | TEST_DISABLED_FOR_MIPS(); |
| 509 | |
Roland Levillain | 0b0d3b4 | 2018-06-14 13:55:49 +0100 | [diff] [blame] | 510 | // This test does not work under AddressSanitizer. |
| 511 | // Historical note: This test did not work under Valgrind either. |
Roland Levillain | 05e34f4 | 2018-05-24 13:19:05 +0000 | [diff] [blame] | 512 | TEST_DISABLED_FOR_MEMORY_TOOL(); |
| 513 | |
Mathieu Chartier | 6e88ef6 | 2014-10-14 15:01:24 -0700 | [diff] [blame] | 514 | CommonInit(); |
Roland Levillain | 05e34f4 | 2018-05-24 13:19:05 +0000 | [diff] [blame] | 515 | constexpr size_t size = 0x100000; |
| 516 | // Try all addresses starting from 2GB to 4GB. |
| 517 | size_t start_addr = 2 * GB; |
| 518 | std::string error_msg; |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 519 | MemMap map; |
Roland Levillain | 05e34f4 | 2018-05-24 13:19:05 +0000 | [diff] [blame] | 520 | for (; start_addr <= std::numeric_limits<uint32_t>::max() - size; start_addr += size) { |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 521 | map = MemMap::MapAnonymous("MapAnonymousExactAddr32bitHighAddr", |
| 522 | reinterpret_cast<uint8_t*>(start_addr), |
| 523 | size, |
| 524 | PROT_READ | PROT_WRITE, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 525 | /*low_4gb=*/ true, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 526 | &error_msg); |
| 527 | if (map.IsValid()) { |
Roland Levillain | 05e34f4 | 2018-05-24 13:19:05 +0000 | [diff] [blame] | 528 | break; |
Mathieu Chartier | 16d29f8 | 2015-11-10 10:32:52 -0800 | [diff] [blame] | 529 | } |
Andreas Gampe | 928f72b | 2014-09-09 19:53:48 -0700 | [diff] [blame] | 530 | } |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 531 | ASSERT_TRUE(map.IsValid()) << error_msg; |
| 532 | ASSERT_GE(reinterpret_cast<uintptr_t>(map.End()), 2u * GB); |
Roland Levillain | 05e34f4 | 2018-05-24 13:19:05 +0000 | [diff] [blame] | 533 | ASSERT_TRUE(error_msg.empty()); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 534 | ASSERT_EQ(map.BaseBegin(), reinterpret_cast<void*>(start_addr)); |
Qiming Shi | 84d49cc | 2014-04-24 15:38:41 +0800 | [diff] [blame] | 535 | } |
| 536 | |
| 537 | TEST_F(MemMapTest, MapAnonymousOverflow) { |
Mathieu Chartier | 6e88ef6 | 2014-10-14 15:01:24 -0700 | [diff] [blame] | 538 | CommonInit(); |
Qiming Shi | 84d49cc | 2014-04-24 15:38:41 +0800 | [diff] [blame] | 539 | std::string error_msg; |
| 540 | uintptr_t ptr = 0; |
| 541 | ptr -= kPageSize; // Now it's close to the top. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 542 | MemMap map = MemMap::MapAnonymous("MapAnonymousOverflow", |
| 543 | reinterpret_cast<uint8_t*>(ptr), |
| 544 | 2 * kPageSize, // brings it over the top. |
| 545 | PROT_READ | PROT_WRITE, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 546 | /* low_4gb= */ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 547 | &error_msg); |
| 548 | ASSERT_FALSE(map.IsValid()); |
Qiming Shi | 84d49cc | 2014-04-24 15:38:41 +0800 | [diff] [blame] | 549 | ASSERT_FALSE(error_msg.empty()); |
| 550 | } |
| 551 | |
| 552 | #ifdef __LP64__ |
| 553 | TEST_F(MemMapTest, MapAnonymousLow4GBExpectedTooHigh) { |
Mathieu Chartier | 6e88ef6 | 2014-10-14 15:01:24 -0700 | [diff] [blame] | 554 | CommonInit(); |
Qiming Shi | 84d49cc | 2014-04-24 15:38:41 +0800 | [diff] [blame] | 555 | std::string error_msg; |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 556 | MemMap map = |
Vladimir Marko | 5c42c29 | 2015-02-25 12:02:49 +0000 | [diff] [blame] | 557 | MemMap::MapAnonymous("MapAnonymousLow4GBExpectedTooHigh", |
| 558 | reinterpret_cast<uint8_t*>(UINT64_C(0x100000000)), |
| 559 | kPageSize, |
| 560 | PROT_READ | PROT_WRITE, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 561 | /* low_4gb= */ true, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 562 | &error_msg); |
| 563 | ASSERT_FALSE(map.IsValid()); |
Qiming Shi | 84d49cc | 2014-04-24 15:38:41 +0800 | [diff] [blame] | 564 | ASSERT_FALSE(error_msg.empty()); |
| 565 | } |
| 566 | |
| 567 | TEST_F(MemMapTest, MapAnonymousLow4GBRangeTooHigh) { |
Mathieu Chartier | 6e88ef6 | 2014-10-14 15:01:24 -0700 | [diff] [blame] | 568 | CommonInit(); |
Qiming Shi | 84d49cc | 2014-04-24 15:38:41 +0800 | [diff] [blame] | 569 | std::string error_msg; |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 570 | MemMap map = MemMap::MapAnonymous("MapAnonymousLow4GBRangeTooHigh", |
| 571 | reinterpret_cast<uint8_t*>(0xF0000000), |
| 572 | 0x20000000, |
| 573 | PROT_READ | PROT_WRITE, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 574 | /* low_4gb= */ true, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 575 | &error_msg); |
| 576 | ASSERT_FALSE(map.IsValid()); |
Qiming Shi | 84d49cc | 2014-04-24 15:38:41 +0800 | [diff] [blame] | 577 | ASSERT_FALSE(error_msg.empty()); |
| 578 | } |
| 579 | #endif |
| 580 | |
Vladimir Marko | 5c42c29 | 2015-02-25 12:02:49 +0000 | [diff] [blame] | 581 | TEST_F(MemMapTest, MapAnonymousReuse) { |
| 582 | CommonInit(); |
| 583 | std::string error_msg; |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 584 | MemMap map = MemMap::MapAnonymous("MapAnonymousReserve", |
| 585 | nullptr, |
| 586 | 0x20000, |
| 587 | PROT_READ | PROT_WRITE, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 588 | /* low_4gb= */ false, |
| 589 | /* reuse= */ false, |
| 590 | /* reservation= */ nullptr, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 591 | &error_msg); |
| 592 | ASSERT_TRUE(map.IsValid()); |
Vladimir Marko | 5c42c29 | 2015-02-25 12:02:49 +0000 | [diff] [blame] | 593 | ASSERT_TRUE(error_msg.empty()); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 594 | MemMap map2 = MemMap::MapAnonymous("MapAnonymousReused", |
| 595 | reinterpret_cast<uint8_t*>(map.BaseBegin()), |
| 596 | 0x10000, |
| 597 | PROT_READ | PROT_WRITE, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 598 | /* low_4gb= */ false, |
| 599 | /* reuse= */ true, |
| 600 | /* reservation= */ nullptr, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 601 | &error_msg); |
| 602 | ASSERT_TRUE(map2.IsValid()); |
Vladimir Marko | 5c42c29 | 2015-02-25 12:02:49 +0000 | [diff] [blame] | 603 | ASSERT_TRUE(error_msg.empty()); |
| 604 | } |
| 605 | |
Hiroshi Yamauchi | 3eed93d | 2014-06-04 11:43:59 -0700 | [diff] [blame] | 606 | TEST_F(MemMapTest, CheckNoGaps) { |
Mathieu Chartier | 6e88ef6 | 2014-10-14 15:01:24 -0700 | [diff] [blame] | 607 | CommonInit(); |
Hiroshi Yamauchi | 3eed93d | 2014-06-04 11:43:59 -0700 | [diff] [blame] | 608 | std::string error_msg; |
| 609 | constexpr size_t kNumPages = 3; |
| 610 | // Map a 3-page mem map. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 611 | MemMap map = MemMap::MapAnonymous("MapAnonymous0", |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 612 | /* addr= */ nullptr, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 613 | kPageSize * kNumPages, |
| 614 | PROT_READ | PROT_WRITE, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 615 | /* low_4gb= */ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 616 | &error_msg); |
| 617 | ASSERT_TRUE(map.IsValid()) << error_msg; |
Hiroshi Yamauchi | 3eed93d | 2014-06-04 11:43:59 -0700 | [diff] [blame] | 618 | ASSERT_TRUE(error_msg.empty()); |
| 619 | // Record the base address. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 620 | uint8_t* map_base = reinterpret_cast<uint8_t*>(map.BaseBegin()); |
Hiroshi Yamauchi | 3eed93d | 2014-06-04 11:43:59 -0700 | [diff] [blame] | 621 | // Unmap it. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 622 | map.Reset(); |
Hiroshi Yamauchi | 3eed93d | 2014-06-04 11:43:59 -0700 | [diff] [blame] | 623 | |
| 624 | // Map at the same address, but in page-sized separate mem maps, |
| 625 | // assuming the space at the address is still available. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 626 | MemMap map0 = MemMap::MapAnonymous("MapAnonymous0", |
| 627 | map_base, |
| 628 | kPageSize, |
| 629 | PROT_READ | PROT_WRITE, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 630 | /* low_4gb= */ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 631 | &error_msg); |
| 632 | ASSERT_TRUE(map0.IsValid()) << error_msg; |
Hiroshi Yamauchi | 3eed93d | 2014-06-04 11:43:59 -0700 | [diff] [blame] | 633 | ASSERT_TRUE(error_msg.empty()); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 634 | MemMap map1 = MemMap::MapAnonymous("MapAnonymous1", |
| 635 | map_base + kPageSize, |
| 636 | kPageSize, |
| 637 | PROT_READ | PROT_WRITE, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 638 | /* low_4gb= */ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 639 | &error_msg); |
| 640 | ASSERT_TRUE(map1.IsValid()) << error_msg; |
Hiroshi Yamauchi | 3eed93d | 2014-06-04 11:43:59 -0700 | [diff] [blame] | 641 | ASSERT_TRUE(error_msg.empty()); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 642 | MemMap map2 = MemMap::MapAnonymous("MapAnonymous2", |
| 643 | map_base + kPageSize * 2, |
| 644 | kPageSize, |
| 645 | PROT_READ | PROT_WRITE, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 646 | /* low_4gb= */ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 647 | &error_msg); |
| 648 | ASSERT_TRUE(map2.IsValid()) << error_msg; |
Hiroshi Yamauchi | 3eed93d | 2014-06-04 11:43:59 -0700 | [diff] [blame] | 649 | ASSERT_TRUE(error_msg.empty()); |
| 650 | |
| 651 | // One-map cases. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 652 | ASSERT_TRUE(MemMap::CheckNoGaps(map0, map0)); |
| 653 | ASSERT_TRUE(MemMap::CheckNoGaps(map1, map1)); |
| 654 | ASSERT_TRUE(MemMap::CheckNoGaps(map2, map2)); |
Hiroshi Yamauchi | 3eed93d | 2014-06-04 11:43:59 -0700 | [diff] [blame] | 655 | |
| 656 | // Two or three-map cases. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 657 | ASSERT_TRUE(MemMap::CheckNoGaps(map0, map1)); |
| 658 | ASSERT_TRUE(MemMap::CheckNoGaps(map1, map2)); |
| 659 | ASSERT_TRUE(MemMap::CheckNoGaps(map0, map2)); |
Hiroshi Yamauchi | 3eed93d | 2014-06-04 11:43:59 -0700 | [diff] [blame] | 660 | |
| 661 | // Unmap the middle one. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 662 | map1.Reset(); |
Hiroshi Yamauchi | 3eed93d | 2014-06-04 11:43:59 -0700 | [diff] [blame] | 663 | |
| 664 | // Should return false now that there's a gap in the middle. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 665 | ASSERT_FALSE(MemMap::CheckNoGaps(map0, map2)); |
Hiroshi Yamauchi | 3eed93d | 2014-06-04 11:43:59 -0700 | [diff] [blame] | 666 | } |
| 667 | |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 668 | TEST_F(MemMapTest, AlignBy) { |
| 669 | CommonInit(); |
| 670 | std::string error_msg; |
| 671 | // Cast the page size to size_t. |
| 672 | const size_t page_size = static_cast<size_t>(kPageSize); |
| 673 | // Map a region. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 674 | MemMap m0 = MemMap::MapAnonymous("MemMapTest_AlignByTest_map0", |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 675 | /* addr= */ nullptr, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 676 | 14 * page_size, |
| 677 | PROT_READ | PROT_WRITE, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 678 | /* low_4gb= */ false, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 679 | &error_msg); |
| 680 | ASSERT_TRUE(m0.IsValid()); |
| 681 | uint8_t* base0 = m0.Begin(); |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 682 | ASSERT_TRUE(base0 != nullptr) << error_msg; |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 683 | ASSERT_EQ(m0.Size(), 14 * page_size); |
| 684 | ASSERT_EQ(m0.BaseBegin(), base0); |
| 685 | ASSERT_EQ(m0.BaseSize(), m0.Size()); |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 686 | |
| 687 | // Break it into several regions by using RemapAtEnd. |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 688 | MemMap m1 = m0.RemapAtEnd(base0 + 3 * page_size, |
| 689 | "MemMapTest_AlignByTest_map1", |
| 690 | PROT_READ | PROT_WRITE, |
| 691 | &error_msg); |
| 692 | uint8_t* base1 = m1.Begin(); |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 693 | ASSERT_TRUE(base1 != nullptr) << error_msg; |
| 694 | ASSERT_EQ(base1, base0 + 3 * page_size); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 695 | ASSERT_EQ(m0.Size(), 3 * page_size); |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 696 | |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 697 | MemMap m2 = m1.RemapAtEnd(base1 + 4 * page_size, |
| 698 | "MemMapTest_AlignByTest_map2", |
| 699 | PROT_READ | PROT_WRITE, |
| 700 | &error_msg); |
| 701 | uint8_t* base2 = m2.Begin(); |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 702 | ASSERT_TRUE(base2 != nullptr) << error_msg; |
| 703 | ASSERT_EQ(base2, base1 + 4 * page_size); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 704 | ASSERT_EQ(m1.Size(), 4 * page_size); |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 705 | |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 706 | MemMap m3 = m2.RemapAtEnd(base2 + 3 * page_size, |
| 707 | "MemMapTest_AlignByTest_map1", |
| 708 | PROT_READ | PROT_WRITE, |
| 709 | &error_msg); |
| 710 | uint8_t* base3 = m3.Begin(); |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 711 | ASSERT_TRUE(base3 != nullptr) << error_msg; |
| 712 | ASSERT_EQ(base3, base2 + 3 * page_size); |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 713 | ASSERT_EQ(m2.Size(), 3 * page_size); |
| 714 | ASSERT_EQ(m3.Size(), 4 * page_size); |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 715 | |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 716 | uint8_t* end0 = base0 + m0.Size(); |
| 717 | uint8_t* end1 = base1 + m1.Size(); |
| 718 | uint8_t* end2 = base2 + m2.Size(); |
| 719 | uint8_t* end3 = base3 + m3.Size(); |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 720 | |
| 721 | ASSERT_EQ(static_cast<size_t>(end3 - base0), 14 * page_size); |
| 722 | |
| 723 | if (IsAlignedParam(base0, 2 * page_size)) { |
| 724 | ASSERT_FALSE(IsAlignedParam(base1, 2 * page_size)); |
| 725 | ASSERT_FALSE(IsAlignedParam(base2, 2 * page_size)); |
| 726 | ASSERT_TRUE(IsAlignedParam(base3, 2 * page_size)); |
| 727 | ASSERT_TRUE(IsAlignedParam(end3, 2 * page_size)); |
| 728 | } else { |
| 729 | ASSERT_TRUE(IsAlignedParam(base1, 2 * page_size)); |
| 730 | ASSERT_TRUE(IsAlignedParam(base2, 2 * page_size)); |
| 731 | ASSERT_FALSE(IsAlignedParam(base3, 2 * page_size)); |
| 732 | ASSERT_FALSE(IsAlignedParam(end3, 2 * page_size)); |
| 733 | } |
| 734 | |
| 735 | // Align by 2 * page_size; |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 736 | m0.AlignBy(2 * page_size); |
| 737 | m1.AlignBy(2 * page_size); |
| 738 | m2.AlignBy(2 * page_size); |
| 739 | m3.AlignBy(2 * page_size); |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 740 | |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 741 | EXPECT_TRUE(IsAlignedParam(m0.Begin(), 2 * page_size)); |
| 742 | EXPECT_TRUE(IsAlignedParam(m1.Begin(), 2 * page_size)); |
| 743 | EXPECT_TRUE(IsAlignedParam(m2.Begin(), 2 * page_size)); |
| 744 | EXPECT_TRUE(IsAlignedParam(m3.Begin(), 2 * page_size)); |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 745 | |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 746 | EXPECT_TRUE(IsAlignedParam(m0.Begin() + m0.Size(), 2 * page_size)); |
| 747 | EXPECT_TRUE(IsAlignedParam(m1.Begin() + m1.Size(), 2 * page_size)); |
| 748 | EXPECT_TRUE(IsAlignedParam(m2.Begin() + m2.Size(), 2 * page_size)); |
| 749 | EXPECT_TRUE(IsAlignedParam(m3.Begin() + m3.Size(), 2 * page_size)); |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 750 | |
| 751 | if (IsAlignedParam(base0, 2 * page_size)) { |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 752 | EXPECT_EQ(m0.Begin(), base0); |
| 753 | EXPECT_EQ(m0.Begin() + m0.Size(), end0 - page_size); |
| 754 | EXPECT_EQ(m1.Begin(), base1 + page_size); |
| 755 | EXPECT_EQ(m1.Begin() + m1.Size(), end1 - page_size); |
| 756 | EXPECT_EQ(m2.Begin(), base2 + page_size); |
| 757 | EXPECT_EQ(m2.Begin() + m2.Size(), end2); |
| 758 | EXPECT_EQ(m3.Begin(), base3); |
| 759 | EXPECT_EQ(m3.Begin() + m3.Size(), end3); |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 760 | } else { |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 761 | EXPECT_EQ(m0.Begin(), base0 + page_size); |
| 762 | EXPECT_EQ(m0.Begin() + m0.Size(), end0); |
| 763 | EXPECT_EQ(m1.Begin(), base1); |
| 764 | EXPECT_EQ(m1.Begin() + m1.Size(), end1); |
| 765 | EXPECT_EQ(m2.Begin(), base2); |
| 766 | EXPECT_EQ(m2.Begin() + m2.Size(), end2 - page_size); |
| 767 | EXPECT_EQ(m3.Begin(), base3 + page_size); |
| 768 | EXPECT_EQ(m3.Begin() + m3.Size(), end3 - page_size); |
Hiroshi Yamauchi | 3c3c4a1 | 2017-02-21 16:49:59 -0800 | [diff] [blame] | 769 | } |
| 770 | } |
| 771 | |
Vladimir Marko | c09cd05 | 2018-08-23 16:36:36 +0100 | [diff] [blame] | 772 | TEST_F(MemMapTest, Reservation) { |
| 773 | CommonInit(); |
| 774 | std::string error_msg; |
| 775 | ScratchFile scratch_file; |
| 776 | constexpr size_t kMapSize = 5 * kPageSize; |
| 777 | std::unique_ptr<uint8_t[]> data(new uint8_t[kMapSize]()); |
| 778 | ASSERT_TRUE(scratch_file.GetFile()->WriteFully(&data[0], kMapSize)); |
| 779 | |
| 780 | MemMap reservation = MemMap::MapAnonymous("Test reservation", |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 781 | /* addr= */ nullptr, |
Vladimir Marko | c09cd05 | 2018-08-23 16:36:36 +0100 | [diff] [blame] | 782 | kMapSize, |
| 783 | PROT_NONE, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 784 | /* low_4gb= */ false, |
Vladimir Marko | c09cd05 | 2018-08-23 16:36:36 +0100 | [diff] [blame] | 785 | &error_msg); |
| 786 | ASSERT_TRUE(reservation.IsValid()); |
| 787 | ASSERT_TRUE(error_msg.empty()); |
| 788 | |
| 789 | // Map first part of the reservation. |
| 790 | constexpr size_t kChunk1Size = kPageSize - 1u; |
| 791 | static_assert(kChunk1Size < kMapSize, "We want to split the reservation."); |
| 792 | uint8_t* addr1 = reservation.Begin(); |
| 793 | MemMap map1 = MemMap::MapFileAtAddress(addr1, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 794 | /* byte_count= */ kChunk1Size, |
Vladimir Marko | c09cd05 | 2018-08-23 16:36:36 +0100 | [diff] [blame] | 795 | PROT_READ, |
| 796 | MAP_PRIVATE, |
| 797 | scratch_file.GetFd(), |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 798 | /* start= */ 0, |
| 799 | /* low_4gb= */ false, |
Vladimir Marko | c09cd05 | 2018-08-23 16:36:36 +0100 | [diff] [blame] | 800 | scratch_file.GetFilename().c_str(), |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 801 | /* reuse= */ false, |
Vladimir Marko | c09cd05 | 2018-08-23 16:36:36 +0100 | [diff] [blame] | 802 | &reservation, |
| 803 | &error_msg); |
| 804 | ASSERT_TRUE(map1.IsValid()) << error_msg; |
| 805 | ASSERT_TRUE(error_msg.empty()); |
| 806 | ASSERT_EQ(map1.Size(), kChunk1Size); |
| 807 | ASSERT_EQ(addr1, map1.Begin()); |
| 808 | ASSERT_TRUE(reservation.IsValid()); |
| 809 | // Entire pages are taken from the `reservation`. |
| 810 | ASSERT_LT(map1.End(), map1.BaseEnd()); |
| 811 | ASSERT_EQ(map1.BaseEnd(), reservation.Begin()); |
| 812 | |
| 813 | // Map second part as an anonymous mapping. |
| 814 | constexpr size_t kChunk2Size = 2 * kPageSize; |
| 815 | DCHECK_LT(kChunk2Size, reservation.Size()); // We want to split the reservation. |
| 816 | uint8_t* addr2 = reservation.Begin(); |
| 817 | MemMap map2 = MemMap::MapAnonymous("MiddleReservation", |
| 818 | addr2, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 819 | /* byte_count= */ kChunk2Size, |
Vladimir Marko | c09cd05 | 2018-08-23 16:36:36 +0100 | [diff] [blame] | 820 | PROT_READ, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 821 | /* low_4gb= */ false, |
| 822 | /* reuse= */ false, |
Vladimir Marko | c09cd05 | 2018-08-23 16:36:36 +0100 | [diff] [blame] | 823 | &reservation, |
| 824 | &error_msg); |
| 825 | ASSERT_TRUE(map2.IsValid()) << error_msg; |
| 826 | ASSERT_TRUE(error_msg.empty()); |
| 827 | ASSERT_EQ(map2.Size(), kChunk2Size); |
| 828 | ASSERT_EQ(addr2, map2.Begin()); |
| 829 | ASSERT_EQ(map2.End(), map2.BaseEnd()); // kChunk2Size is page aligned. |
| 830 | ASSERT_EQ(map2.BaseEnd(), reservation.Begin()); |
| 831 | |
| 832 | // Map the rest of the reservation except the last byte. |
| 833 | const size_t kChunk3Size = reservation.Size() - 1u; |
| 834 | uint8_t* addr3 = reservation.Begin(); |
| 835 | MemMap map3 = MemMap::MapFileAtAddress(addr3, |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 836 | /* byte_count= */ kChunk3Size, |
Vladimir Marko | c09cd05 | 2018-08-23 16:36:36 +0100 | [diff] [blame] | 837 | PROT_READ, |
| 838 | MAP_PRIVATE, |
| 839 | scratch_file.GetFd(), |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 840 | /* start= */ dchecked_integral_cast<size_t>(addr3 - addr1), |
| 841 | /* low_4gb= */ false, |
Vladimir Marko | c09cd05 | 2018-08-23 16:36:36 +0100 | [diff] [blame] | 842 | scratch_file.GetFilename().c_str(), |
Andreas Gampe | 0de385f | 2018-10-11 11:11:13 -0700 | [diff] [blame^] | 843 | /* reuse= */ false, |
Vladimir Marko | c09cd05 | 2018-08-23 16:36:36 +0100 | [diff] [blame] | 844 | &reservation, |
| 845 | &error_msg); |
| 846 | ASSERT_TRUE(map3.IsValid()) << error_msg; |
| 847 | ASSERT_TRUE(error_msg.empty()); |
| 848 | ASSERT_EQ(map3.Size(), kChunk3Size); |
| 849 | ASSERT_EQ(addr3, map3.Begin()); |
| 850 | // Entire pages are taken from the `reservation`, so it's now exhausted. |
| 851 | ASSERT_FALSE(reservation.IsValid()); |
| 852 | |
| 853 | // Now split the MiddleReservation. |
| 854 | constexpr size_t kChunk2ASize = kPageSize - 1u; |
| 855 | DCHECK_LT(kChunk2ASize, map2.Size()); // We want to split the reservation. |
| 856 | MemMap map2a = map2.TakeReservedMemory(kChunk2ASize); |
| 857 | ASSERT_TRUE(map2a.IsValid()) << error_msg; |
| 858 | ASSERT_TRUE(error_msg.empty()); |
| 859 | ASSERT_EQ(map2a.Size(), kChunk2ASize); |
| 860 | ASSERT_EQ(addr2, map2a.Begin()); |
| 861 | ASSERT_TRUE(map2.IsValid()); |
| 862 | ASSERT_LT(map2a.End(), map2a.BaseEnd()); |
| 863 | ASSERT_EQ(map2a.BaseEnd(), map2.Begin()); |
| 864 | |
| 865 | // And take the rest of the middle reservation. |
| 866 | const size_t kChunk2BSize = map2.Size() - 1u; |
| 867 | uint8_t* addr2b = map2.Begin(); |
| 868 | MemMap map2b = map2.TakeReservedMemory(kChunk2BSize); |
| 869 | ASSERT_TRUE(map2b.IsValid()) << error_msg; |
| 870 | ASSERT_TRUE(error_msg.empty()); |
| 871 | ASSERT_EQ(map2b.Size(), kChunk2ASize); |
| 872 | ASSERT_EQ(addr2b, map2b.Begin()); |
| 873 | ASSERT_FALSE(map2.IsValid()); |
| 874 | } |
| 875 | |
Brian Carlstrom | 9004cb6 | 2013-07-26 15:48:31 -0700 | [diff] [blame] | 876 | } // namespace art |