Remove unnecessary indirection from MemMap.
Avoid plain MemMap pointers being passed around by changing
the MemMap to moveable and return MemMap objects by value.
Previously we could have a valid zero-size MemMap but this
is now forbidden.
MemMap::RemapAtEnd() is changed to avoid the explicit call
to munmap(); mmap() with MAP_FIXED automatically removes
old mappings for overlapping regions.
Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing
Test: Pixel 2 XL boots.
Test: m test-art-target-gtest
Test: testrunner.py --target --optimizing
Change-Id: I12bd453c26a396edc20eb141bfd4dad20923f170
diff --git a/runtime/runtime.h b/runtime/runtime.h
index a98e8a8..f98d7b9 100644
--- a/runtime/runtime.h
+++ b/runtime/runtime.h
@@ -29,6 +29,7 @@
#include "arch/instruction_set.h"
#include "base/macros.h"
+#include "base/mem_map.h"
#include "base/mutex.h"
#include "deoptimization_kind.h"
#include "dex/dex_file_types.h"
@@ -86,7 +87,6 @@
class IsMarkedVisitor;
class JavaVMExt;
class LinearAlloc;
-class MemMap;
class MonitorList;
class MonitorPool;
class NullPointerHandler;
@@ -1090,7 +1090,7 @@
std::atomic<uint32_t> deoptimization_counts_[
static_cast<uint32_t>(DeoptimizationKind::kLast) + 1];
- std::unique_ptr<MemMap> protected_fault_page_;
+ MemMap protected_fault_page_;
uint32_t verifier_logging_threshold_ms_;