Improve the region space memory mapping.

Add a region space mmap fallback when the initial address isn't
available.

Clean up around the asan-specific base address.

Add MemMap::AlignBy to align the region space base address by the
region size, which is currently required by ReadBarrierTable.

Disable some read barriers in ZygoteCompactingCollector to avoid a
DCHECK failure in LockWord::SetMarkBitState when classes are in the
forward state due to unnecessary read barriers on
SizeOf/VisitReference.

Bug: 12687968
Test: test-art-host with CC and CMS.
Test: marlin-userdebug_asan_coverage boot.
Test: angler boots with CC and CMS.

Change-Id: I70f99779df6acc1b64cab6402f3ef7c73ce5b39b
diff --git a/runtime/mem_map.cc b/runtime/mem_map.cc
index 93c212b..40309b9 100644
--- a/runtime/mem_map.cc
+++ b/runtime/mem_map.cc
@@ -962,4 +962,52 @@
   }
 }
 
+void MemMap::AlignBy(size_t size) {
+  CHECK_EQ(begin_, base_begin_) << "Unsupported";
+  CHECK_EQ(size_, base_size_) << "Unsupported";
+  CHECK_GT(size, static_cast<size_t>(kPageSize));
+  CHECK_ALIGNED(size, kPageSize);
+  if (IsAlignedParam(reinterpret_cast<uintptr_t>(base_begin_), size) &&
+      IsAlignedParam(base_size_, size)) {
+    // Already aligned.
+    return;
+  }
+  uint8_t* base_begin = reinterpret_cast<uint8_t*>(base_begin_);
+  uint8_t* base_end = base_begin + base_size_;
+  uint8_t* aligned_base_begin = AlignUp(base_begin, size);
+  uint8_t* aligned_base_end = AlignDown(base_end, size);
+  CHECK_LE(base_begin, aligned_base_begin);
+  CHECK_LE(aligned_base_end, base_end);
+  size_t aligned_base_size = aligned_base_end - aligned_base_begin;
+  CHECK_LT(aligned_base_begin, aligned_base_end)
+      << "base_begin = " << reinterpret_cast<void*>(base_begin)
+      << " base_end = " << reinterpret_cast<void*>(base_end);
+  CHECK_GE(aligned_base_size, size);
+  // Unmap the unaligned parts.
+  if (base_begin < aligned_base_begin) {
+    MEMORY_TOOL_MAKE_UNDEFINED(base_begin, aligned_base_begin - base_begin);
+    CHECK_EQ(munmap(base_begin, aligned_base_begin - base_begin), 0)
+        << "base_begin=" << reinterpret_cast<void*>(base_begin)
+        << " aligned_base_begin=" << reinterpret_cast<void*>(aligned_base_begin);
+  }
+  if (aligned_base_end < base_end) {
+    MEMORY_TOOL_MAKE_UNDEFINED(aligned_base_end, base_end - aligned_base_end);
+    CHECK_EQ(munmap(aligned_base_end, base_end - aligned_base_end), 0)
+        << "base_end=" << reinterpret_cast<void*>(base_end)
+        << " aligned_base_end=" << reinterpret_cast<void*>(aligned_base_end);
+  }
+  std::lock_guard<std::mutex> mu(*mem_maps_lock_);
+  base_begin_ = aligned_base_begin;
+  base_size_ = aligned_base_size;
+  begin_ = aligned_base_begin;
+  size_ = aligned_base_size;
+  DCHECK(maps_ != nullptr);
+  if (base_begin < aligned_base_begin) {
+    auto it = maps_->find(base_begin);
+    CHECK(it != maps_->end()) << "MemMap not found";
+    maps_->erase(it);
+    maps_->insert(std::make_pair(base_begin_, this));
+  }
+}
+
 }  // namespace art