summaryrefslogtreecommitdiff
path: root/runtime/base/arena_allocator_test.cc
diff options
context:
space:
mode:
authorVladimir Marko <vmarko@google.com>2016-04-22 18:07:13 +0100
committerVladimir Marko <vmarko@google.com>2016-04-26 09:51:44 +0100
commit46ea0147b49e3539492be160e1631e73f58d2c3c (patch)
tree46956cf23fa38ab2d4727508341869f01bf00156 /runtime/base/arena_allocator_test.cc
parentff2d53a16d844054874e41a98e2984e2818ee210 (diff)
Reduce memory lost by ArenaAllocator for large allocations.
When allocating from a new arena, check if the old arena has more remaining space than the new one after the current allocation. If so, keep using the old arena to reduce the amount of "lost" arena memory. This can happen when we try to allocate more than half the default arena size. If the allocation exceeds the default arena size, it's very likely to happen even though the ArenaPool could still provide some much larger previously allocated arena. Also avoid artithmetic overflow when checking if the request can be satisfied from the current arena. And abort immediately if calloc() fails. Bug: 28173563 Bug: 28256882 In addition to the initial CL (cherry picked from commit 3e0e7173c0cdfc57dba39fe781e30d187d50fa9c) this contains a squashed subsequent fix Fix valgrind tests: mark allocated space as defined. (cherry picked from commit 3f84f2cb3cadc25d75e1e3e2c1bc26c1a671f336) Change-Id: Id80d5601874e8e28d930c0dd47a51c73c4810094
Diffstat (limited to 'runtime/base/arena_allocator_test.cc')
-rw-r--r--runtime/base/arena_allocator_test.cc127
1 files changed, 127 insertions, 0 deletions
diff --git a/runtime/base/arena_allocator_test.cc b/runtime/base/arena_allocator_test.cc
new file mode 100644
index 00000000000..9de3cc43128
--- /dev/null
+++ b/runtime/base/arena_allocator_test.cc
@@ -0,0 +1,127 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "base/arena_allocator.h"
+#include "base/arena_bit_vector.h"
+#include "gtest/gtest.h"
+
+namespace art {
+
+class ArenaAllocatorTest : public testing::Test {
+ protected:
+ size_t NumberOfArenas(ArenaAllocator* arena) {
+ size_t result = 0u;
+ for (Arena* a = arena->arena_head_; a != nullptr; a = a->next_) {
+ ++result;
+ }
+ return result;
+ }
+};
+
+TEST_F(ArenaAllocatorTest, Test) {
+ ArenaPool pool;
+ ArenaAllocator arena(&pool);
+ ArenaBitVector bv(&arena, 10, true);
+ bv.SetBit(5);
+ EXPECT_EQ(1U, bv.GetStorageSize());
+ bv.SetBit(35);
+ EXPECT_EQ(2U, bv.GetStorageSize());
+}
+
+TEST_F(ArenaAllocatorTest, MakeDefined) {
+ // Regression test to make sure we mark the allocated area defined.
+ ArenaPool pool;
+ static constexpr size_t kSmallArraySize = 10;
+ static constexpr size_t kLargeArraySize = 50;
+ uint32_t* small_array;
+ {
+ // Allocate a small array from an arena and release it.
+ ArenaAllocator arena(&pool);
+ small_array = arena.AllocArray<uint32_t>(kSmallArraySize);
+ ASSERT_EQ(0u, small_array[kSmallArraySize - 1u]);
+ }
+ {
+ // Reuse the previous arena and allocate more than previous allocation including red zone.
+ ArenaAllocator arena(&pool);
+ uint32_t* large_array = arena.AllocArray<uint32_t>(kLargeArraySize);
+ ASSERT_EQ(0u, large_array[kLargeArraySize - 1u]);
+ // Verify that the allocation was made on the same arena.
+ ASSERT_EQ(small_array, large_array);
+ }
+}
+
+TEST_F(ArenaAllocatorTest, LargeAllocations) {
+ {
+ ArenaPool pool;
+ ArenaAllocator arena(&pool);
+ // Note: Leaving some space for memory tool red zones.
+ void* alloc1 = arena.Alloc(Arena::kDefaultSize * 5 / 8);
+ void* alloc2 = arena.Alloc(Arena::kDefaultSize * 2 / 8);
+ ASSERT_NE(alloc1, alloc2);
+ ASSERT_EQ(1u, NumberOfArenas(&arena));
+ }
+ {
+ ArenaPool pool;
+ ArenaAllocator arena(&pool);
+ void* alloc1 = arena.Alloc(Arena::kDefaultSize * 13 / 16);
+ void* alloc2 = arena.Alloc(Arena::kDefaultSize * 11 / 16);
+ ASSERT_NE(alloc1, alloc2);
+ ASSERT_EQ(2u, NumberOfArenas(&arena));
+ void* alloc3 = arena.Alloc(Arena::kDefaultSize * 7 / 16);
+ ASSERT_NE(alloc1, alloc3);
+ ASSERT_NE(alloc2, alloc3);
+ ASSERT_EQ(3u, NumberOfArenas(&arena));
+ }
+ {
+ ArenaPool pool;
+ ArenaAllocator arena(&pool);
+ void* alloc1 = arena.Alloc(Arena::kDefaultSize * 13 / 16);
+ void* alloc2 = arena.Alloc(Arena::kDefaultSize * 9 / 16);
+ ASSERT_NE(alloc1, alloc2);
+ ASSERT_EQ(2u, NumberOfArenas(&arena));
+ // Note: Leaving some space for memory tool red zones.
+ void* alloc3 = arena.Alloc(Arena::kDefaultSize * 5 / 16);
+ ASSERT_NE(alloc1, alloc3);
+ ASSERT_NE(alloc2, alloc3);
+ ASSERT_EQ(2u, NumberOfArenas(&arena));
+ }
+ {
+ ArenaPool pool;
+ ArenaAllocator arena(&pool);
+ void* alloc1 = arena.Alloc(Arena::kDefaultSize * 9 / 16);
+ void* alloc2 = arena.Alloc(Arena::kDefaultSize * 13 / 16);
+ ASSERT_NE(alloc1, alloc2);
+ ASSERT_EQ(2u, NumberOfArenas(&arena));
+ // Note: Leaving some space for memory tool red zones.
+ void* alloc3 = arena.Alloc(Arena::kDefaultSize * 5 / 16);
+ ASSERT_NE(alloc1, alloc3);
+ ASSERT_NE(alloc2, alloc3);
+ ASSERT_EQ(2u, NumberOfArenas(&arena));
+ }
+ {
+ ArenaPool pool;
+ ArenaAllocator arena(&pool);
+ // Note: Leaving some space for memory tool red zones.
+ for (size_t i = 0; i != 15; ++i) {
+ arena.Alloc(Arena::kDefaultSize * 1 / 16); // Allocate 15 times from the same arena.
+ ASSERT_EQ(i + 1u, NumberOfArenas(&arena));
+ arena.Alloc(Arena::kDefaultSize * 17 / 16); // Allocate a separate arena.
+ ASSERT_EQ(i + 2u, NumberOfArenas(&arena));
+ }
+ }
+}
+
+} // namespace art