Revert^2 "Remove support for Valgrind in ART."
- Disable test configuration art-gtest-valgrind64
(art-gtest-valgrind32 was already disabled).
- Remove Makefile logic regarding testing with Valgrind.
- Remove occurrences of `TEST_DISABLED_FOR_MEMORY_TOOL_VALGRIND`.
- Replace occurrences of `TEST_DISABLED_FOR_MEMORY_TOOL_ASAN` with
`TEST_DISABLED_FOR_MEMORY_TOOL`.
- Replace the potentially dynamically evaluated
`RUNNING_ON_MEMORY_TOOL` expression with constant
`kRunningOnMemoryTool`.
- Simplify and fold the logic of
`art::ArenaAllocatorMemoryToolCheckImpl` and
`art::ArenaAllocatorMemoryToolCheck` into
`art::ArenaAllocatorMemoryTool`.
- Adjust comments regarding memory tools.
- Remove Valgrind suppression files.
- Remove `--callgrind` option from tools/art.
This reverts commit 8b362a87d52a6668ffd2283ef6ffc274315f41c8.
Change-Id: I23c76845e6ccf766f19b22b58a0d5161f60842a9
Test: art/test.py
Test: art/test/testrunner/run_build_test_target.py art-asan
Bug: 77856586
Bug: 29282211
diff --git a/libartbase/base/arena_allocator.h b/libartbase/base/arena_allocator.h
index 4dccd03..a9ccae1 100644
--- a/libartbase/base/arena_allocator.h
+++ b/libartbase/base/arena_allocator.h
@@ -148,34 +148,9 @@
typedef ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> ArenaAllocatorStats;
-template <bool kAvailable, bool kValgrind>
-class ArenaAllocatorMemoryToolCheckImpl {
- // This is the generic template but since there is a partial specialization
- // for kValgrind == false, this can be instantiated only for kValgrind == true.
- static_assert(kValgrind, "This template can be instantiated only for Valgrind.");
- static_assert(kAvailable, "Valgrind implies memory tool availability.");
-
+class ArenaAllocatorMemoryTool {
public:
- ArenaAllocatorMemoryToolCheckImpl() : is_running_on_valgrind_(RUNNING_ON_MEMORY_TOOL) { }
- bool IsRunningOnMemoryTool() { return is_running_on_valgrind_; }
-
- private:
- const bool is_running_on_valgrind_;
-};
-
-template <bool kAvailable>
-class ArenaAllocatorMemoryToolCheckImpl<kAvailable, false> {
- public:
- ArenaAllocatorMemoryToolCheckImpl() { }
- bool IsRunningOnMemoryTool() { return kAvailable; }
-};
-
-typedef ArenaAllocatorMemoryToolCheckImpl<kMemoryToolIsAvailable, kMemoryToolIsValgrind>
- ArenaAllocatorMemoryToolCheck;
-
-class ArenaAllocatorMemoryTool : private ArenaAllocatorMemoryToolCheck {
- public:
- using ArenaAllocatorMemoryToolCheck::IsRunningOnMemoryTool;
+ bool IsRunningOnMemoryTool() { return kMemoryToolIsAvailable; }
void MakeDefined(void* ptr, size_t size) {
if (UNLIKELY(IsRunningOnMemoryTool())) {
diff --git a/libartbase/base/arena_allocator_test.cc b/libartbase/base/arena_allocator_test.cc
index e358710..6323a2b 100644
--- a/libartbase/base/arena_allocator_test.cc
+++ b/libartbase/base/arena_allocator_test.cc
@@ -16,6 +16,7 @@
#include "arena_allocator-inl.h"
#include "arena_bit_vector.h"
+#include "base/common_art_test.h"
#include "gtest/gtest.h"
#include "malloc_arena_pool.h"
#include "memory_tool.h"
@@ -146,11 +147,8 @@
}
TEST_F(ArenaAllocatorTest, ReallocReuse) {
- // Realloc does not reuse arenas when running under sanitization. So we cannot do those
- if (RUNNING_ON_MEMORY_TOOL != 0) {
- printf("WARNING: TEST DISABLED FOR MEMORY_TOOL\n");
- return;
- }
+ // Realloc does not reuse arenas when running under sanitization.
+ TEST_DISABLED_FOR_MEMORY_TOOL();
{
// Case 1: small aligned allocation, aligned extend inside arena.
diff --git a/libartbase/base/common_art_test.h b/libartbase/base/common_art_test.h
index 3998be5..d9bea3d 100644
--- a/libartbase/base/common_art_test.h
+++ b/libartbase/base/common_art_test.h
@@ -210,23 +210,11 @@
}
#define TEST_DISABLED_FOR_MEMORY_TOOL() \
- if (RUNNING_ON_MEMORY_TOOL > 0) { \
+ if (kRunningOnMemoryTool) { \
printf("WARNING: TEST DISABLED FOR MEMORY TOOL\n"); \
return; \
}
-#define TEST_DISABLED_FOR_MEMORY_TOOL_VALGRIND() \
- if (RUNNING_ON_MEMORY_TOOL > 0 && kMemoryToolIsValgrind) { \
- printf("WARNING: TEST DISABLED FOR MEMORY TOOL VALGRIND\n"); \
- return; \
- }
-
-#define TEST_DISABLED_FOR_MEMORY_TOOL_ASAN() \
- if (RUNNING_ON_MEMORY_TOOL > 0 && !kMemoryToolIsValgrind) { \
- printf("WARNING: TEST DISABLED FOR MEMORY TOOL ASAN\n"); \
- return; \
- }
-
#define TEST_DISABLED_FOR_HEAP_POISONING() \
if (kPoisonHeapReferences) { \
printf("WARNING: TEST DISABLED FOR HEAP POISONING\n"); \
diff --git a/libartbase/base/malloc_arena_pool.cc b/libartbase/base/malloc_arena_pool.cc
index 144b06c..15a5d71 100644
--- a/libartbase/base/malloc_arena_pool.cc
+++ b/libartbase/base/malloc_arena_pool.cc
@@ -53,7 +53,7 @@
memory_ = unaligned_memory_;
} else {
memory_ = AlignUp(unaligned_memory_, ArenaAllocator::kArenaAlignment);
- if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
+ if (kRunningOnMemoryTool) {
size_t head = memory_ - unaligned_memory_;
size_t tail = overallocation - head;
MEMORY_TOOL_MAKE_NOACCESS(unaligned_memory_, head);
@@ -66,7 +66,7 @@
MallocArena::~MallocArena() {
constexpr size_t overallocation = RequiredOverallocation();
- if (overallocation != 0u && UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
+ if (overallocation != 0u && kRunningOnMemoryTool) {
size_t head = memory_ - unaligned_memory_;
size_t tail = overallocation - head;
MEMORY_TOOL_MAKE_UNDEFINED(unaligned_memory_, head);
@@ -132,7 +132,7 @@
}
void MallocArenaPool::FreeArenaChain(Arena* first) {
- if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
+ if (kRunningOnMemoryTool) {
for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
}
diff --git a/libartbase/base/mem_map.cc b/libartbase/base/mem_map.cc
index c455fed..9ba1d6c 100644
--- a/libartbase/base/mem_map.cc
+++ b/libartbase/base/mem_map.cc
@@ -460,7 +460,7 @@
(expected_ptr == nullptr) ? nullptr : (expected_ptr - page_offset);
size_t redzone_size = 0;
- if (RUNNING_ON_MEMORY_TOOL && kMemoryToolAddsRedzones && expected_ptr == nullptr) {
+ if (kRunningOnMemoryTool && kMemoryToolAddsRedzones && expected_ptr == nullptr) {
redzone_size = kPageSize;
page_aligned_byte_count += redzone_size;
}
@@ -649,9 +649,11 @@
bool MemMap::Sync() {
bool result;
if (redzone_size_ != 0) {
- // To avoid valgrind errors, temporarily lift the lower-end noaccess protection before passing
- // it to msync() as it only accepts page-aligned base address, and exclude the higher-end
- // noaccess protection from the msync range. b/27552451.
+ // To avoid errors when running on a memory tool, temporarily lift the lower-end noaccess
+ // protection before passing it to msync() as it only accepts page-aligned base address,
+ // and exclude the higher-end noaccess protection from the msync range. b/27552451.
+ // TODO: Valgrind is no longer supported, but Address Sanitizer is:
+ // check whether this special case is needed for ASan.
uint8_t* base_begin = reinterpret_cast<uint8_t*>(base_begin_);
MEMORY_TOOL_MAKE_DEFINED(base_begin, begin_ - base_begin);
result = msync(BaseBegin(), End() - base_begin, MS_SYNC) == 0;
diff --git a/libartbase/base/mem_map_test.cc b/libartbase/base/mem_map_test.cc
index d956126..4a78bdc 100644
--- a/libartbase/base/mem_map_test.cc
+++ b/libartbase/base/mem_map_test.cc
@@ -471,31 +471,33 @@
// cannot allocate in the 2GB-4GB region.
TEST_DISABLED_FOR_MIPS();
+ // This test may not work under Valgrind.
+ // TODO: Valgrind is no longer supported, but Address Sanitizer is:
+ // check whether this test works with ASan.
+ TEST_DISABLED_FOR_MEMORY_TOOL();
+
CommonInit();
- // This test may not work under valgrind.
- if (RUNNING_ON_MEMORY_TOOL == 0) {
- constexpr size_t size = 0x100000;
- // Try all addresses starting from 2GB to 4GB.
- size_t start_addr = 2 * GB;
- std::string error_msg;
- std::unique_ptr<MemMap> map;
- for (; start_addr <= std::numeric_limits<uint32_t>::max() - size; start_addr += size) {
- map.reset(MemMap::MapAnonymous("MapAnonymousExactAddr32bitHighAddr",
- reinterpret_cast<uint8_t*>(start_addr),
- size,
- PROT_READ | PROT_WRITE,
- /*low_4gb*/true,
- false,
- &error_msg));
- if (map != nullptr) {
- break;
- }
+ constexpr size_t size = 0x100000;
+ // Try all addresses starting from 2GB to 4GB.
+ size_t start_addr = 2 * GB;
+ std::string error_msg;
+ std::unique_ptr<MemMap> map;
+ for (; start_addr <= std::numeric_limits<uint32_t>::max() - size; start_addr += size) {
+ map.reset(MemMap::MapAnonymous("MapAnonymousExactAddr32bitHighAddr",
+ reinterpret_cast<uint8_t*>(start_addr),
+ size,
+ PROT_READ | PROT_WRITE,
+ /*low_4gb*/true,
+ false,
+ &error_msg));
+ if (map != nullptr) {
+ break;
}
- ASSERT_TRUE(map.get() != nullptr) << error_msg;
- ASSERT_GE(reinterpret_cast<uintptr_t>(map->End()), 2u * GB);
- ASSERT_TRUE(error_msg.empty());
- ASSERT_EQ(BaseBegin(map.get()), reinterpret_cast<void*>(start_addr));
}
+ ASSERT_TRUE(map.get() != nullptr) << error_msg;
+ ASSERT_GE(reinterpret_cast<uintptr_t>(map->End()), 2u * GB);
+ ASSERT_TRUE(error_msg.empty());
+ ASSERT_EQ(BaseBegin(map.get()), reinterpret_cast<void*>(start_addr));
}
TEST_F(MemMapTest, MapAnonymousOverflow) {
diff --git a/libartbase/base/memory_tool.h b/libartbase/base/memory_tool.h
index e1df99f..d381f01 100644
--- a/libartbase/base/memory_tool.h
+++ b/libartbase/base/memory_tool.h
@@ -19,53 +19,53 @@
#include <stddef.h>
+namespace art {
+
#if !defined(__has_feature)
-#define __has_feature(x) 0
+# define __has_feature(x) 0
#endif
#if __has_feature(address_sanitizer)
-#include <sanitizer/asan_interface.h>
-#define ADDRESS_SANITIZER
+# include <sanitizer/asan_interface.h>
+# define ADDRESS_SANITIZER
-#ifdef ART_ENABLE_ADDRESS_SANITIZER
-#define MEMORY_TOOL_MAKE_NOACCESS(p, s) __asan_poison_memory_region(p, s)
-#define MEMORY_TOOL_MAKE_UNDEFINED(p, s) __asan_unpoison_memory_region(p, s)
-#define MEMORY_TOOL_MAKE_DEFINED(p, s) __asan_unpoison_memory_region(p, s)
+# ifdef ART_ENABLE_ADDRESS_SANITIZER
+# define MEMORY_TOOL_MAKE_NOACCESS(p, s) __asan_poison_memory_region(p, s)
+# define MEMORY_TOOL_MAKE_UNDEFINED(p, s) __asan_unpoison_memory_region(p, s)
+# define MEMORY_TOOL_MAKE_DEFINED(p, s) __asan_unpoison_memory_region(p, s)
constexpr bool kMemoryToolIsAvailable = true;
-#else
-#define MEMORY_TOOL_MAKE_NOACCESS(p, s) do { (void)(p); (void)(s); } while (0)
-#define MEMORY_TOOL_MAKE_UNDEFINED(p, s) do { (void)(p); (void)(s); } while (0)
-#define MEMORY_TOOL_MAKE_DEFINED(p, s) do { (void)(p); (void)(s); } while (0)
+# else
+# define MEMORY_TOOL_MAKE_NOACCESS(p, s) do { (void)(p); (void)(s); } while (0)
+# define MEMORY_TOOL_MAKE_UNDEFINED(p, s) do { (void)(p); (void)(s); } while (0)
+# define MEMORY_TOOL_MAKE_DEFINED(p, s) do { (void)(p); (void)(s); } while (0)
constexpr bool kMemoryToolIsAvailable = false;
-#endif
+# endif
extern "C" void __asan_handle_no_return();
-#define ATTRIBUTE_NO_SANITIZE_ADDRESS __attribute__((no_sanitize_address))
-#define MEMORY_TOOL_HANDLE_NO_RETURN __asan_handle_no_return()
-#define RUNNING_ON_MEMORY_TOOL 1U
-constexpr bool kMemoryToolIsValgrind = false;
+# define ATTRIBUTE_NO_SANITIZE_ADDRESS __attribute__((no_sanitize_address))
+# define MEMORY_TOOL_HANDLE_NO_RETURN __asan_handle_no_return()
+constexpr bool kRunningOnMemoryTool = true;
constexpr bool kMemoryToolDetectsLeaks = true;
constexpr bool kMemoryToolAddsRedzones = true;
constexpr size_t kMemoryToolStackGuardSizeScale = 2;
#else
-#include <memcheck/memcheck.h>
-#include <valgrind.h>
-#define MEMORY_TOOL_MAKE_NOACCESS(p, s) VALGRIND_MAKE_MEM_NOACCESS(p, s)
-#define MEMORY_TOOL_MAKE_UNDEFINED(p, s) VALGRIND_MAKE_MEM_UNDEFINED(p, s)
-#define MEMORY_TOOL_MAKE_DEFINED(p, s) VALGRIND_MAKE_MEM_DEFINED(p, s)
-#define ATTRIBUTE_NO_SANITIZE_ADDRESS
-#define MEMORY_TOOL_HANDLE_NO_RETURN do { } while (0)
-#define RUNNING_ON_MEMORY_TOOL RUNNING_ON_VALGRIND
-constexpr bool kMemoryToolIsAvailable = true;
-constexpr bool kMemoryToolIsValgrind = true;
-constexpr bool kMemoryToolDetectsLeaks = true;
-constexpr bool kMemoryToolAddsRedzones = true;
+# define MEMORY_TOOL_MAKE_NOACCESS(p, s) do { (void)(p); (void)(s); } while (0)
+# define MEMORY_TOOL_MAKE_UNDEFINED(p, s) do { (void)(p); (void)(s); } while (0)
+# define MEMORY_TOOL_MAKE_DEFINED(p, s) do { (void)(p); (void)(s); } while (0)
+# define ATTRIBUTE_NO_SANITIZE_ADDRESS
+# define MEMORY_TOOL_HANDLE_NO_RETURN do { } while (0)
+constexpr bool kRunningOnMemoryTool = false;
+constexpr bool kMemoryToolIsAvailable = false;
+constexpr bool kMemoryToolDetectsLeaks = false;
+constexpr bool kMemoryToolAddsRedzones = false;
constexpr size_t kMemoryToolStackGuardSizeScale = 1;
#endif
+} // namespace art
+
#endif // ART_LIBARTBASE_BASE_MEMORY_TOOL_H_
diff --git a/libartbase/base/scoped_arena_containers.h b/libartbase/base/scoped_arena_containers.h
index 44d7ebb..6c78bad 100644
--- a/libartbase/base/scoped_arena_containers.h
+++ b/libartbase/base/scoped_arena_containers.h
@@ -236,7 +236,7 @@
protected:
// Used for variable sized objects such as RegisterLine.
ALWAYS_INLINE void ProtectMemory(T* ptr, size_t size) const {
- if (RUNNING_ON_MEMORY_TOOL > 0) {
+ if (kRunningOnMemoryTool) {
// Writing to the memory will fail ift we already destroyed the pointer with
// DestroyOnlyDelete since we make it no access.
memset(ptr, kMagicFill, size);