diff options
-rw-r--r-- | build/Android.gtest.mk | 2 | ||||
-rw-r--r-- | compiler/utils/arena_allocator_test.cc | 33 | ||||
-rw-r--r-- | runtime/base/arena_allocator.cc | 58 | ||||
-rw-r--r-- | runtime/base/arena_allocator.h | 21 | ||||
-rw-r--r-- | runtime/base/arena_allocator_test.cc | 127 |
5 files changed, 180 insertions, 61 deletions
diff --git a/build/Android.gtest.mk b/build/Android.gtest.mk index e9b6a6d7f6..cd463ecc7c 100644 --- a/build/Android.gtest.mk +++ b/build/Android.gtest.mk @@ -172,6 +172,7 @@ RUNTIME_GTEST_COMMON_SRC_FILES := \ runtime/arch/x86/instruction_set_features_x86_test.cc \ runtime/arch/x86_64/instruction_set_features_x86_64_test.cc \ runtime/barrier_test.cc \ + runtime/base/arena_allocator_test.cc \ runtime/base/bit_field_test.cc \ runtime/base/bit_utils_test.cc \ runtime/base/bit_vector_test.cc \ @@ -274,7 +275,6 @@ COMPILER_GTEST_COMMON_SRC_FILES := \ compiler/optimizing/ssa_test.cc \ compiler/optimizing/stack_map_test.cc \ compiler/optimizing/suspend_check_test.cc \ - compiler/utils/arena_allocator_test.cc \ compiler/utils/dedupe_set_test.cc \ compiler/utils/intrusive_forward_list_test.cc \ compiler/utils/swap_space_test.cc \ diff --git a/compiler/utils/arena_allocator_test.cc b/compiler/utils/arena_allocator_test.cc deleted file mode 100644 index 7f67ef14bd..0000000000 --- a/compiler/utils/arena_allocator_test.cc +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include "base/arena_allocator.h" -#include "base/arena_bit_vector.h" -#include "gtest/gtest.h" - -namespace art { - -TEST(ArenaAllocator, Test) { - ArenaPool pool; - ArenaAllocator arena(&pool); - ArenaBitVector bv(&arena, 10, true); - bv.SetBit(5); - EXPECT_EQ(1U, bv.GetStorageSize()); - bv.SetBit(35); - EXPECT_EQ(2U, bv.GetStorageSize()); -} - -} // namespace art diff --git a/runtime/base/arena_allocator.cc b/runtime/base/arena_allocator.cc index d951089bb2..b84e29f7ce 100644 --- a/runtime/base/arena_allocator.cc +++ b/runtime/base/arena_allocator.cc @@ -162,6 +162,7 @@ Arena::Arena() : bytes_allocated_(0), next_(nullptr) { MallocArena::MallocArena(size_t size) { memory_ = reinterpret_cast<uint8_t*>(calloc(1, size)); + CHECK(memory_ != nullptr); // Abort on OOM. size_ = size; } @@ -319,15 +320,27 @@ void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) { // mark only the actually allocated memory as defined. That leaves red zones // and padding between allocations marked as inaccessible. size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8); - if (UNLIKELY(ptr_ + rounded_bytes > end_)) { - // Obtain a new block. - ObtainNewArenaForAllocation(rounded_bytes); - CHECK(ptr_ != nullptr); - MEMORY_TOOL_MAKE_NOACCESS(ptr_, end_ - ptr_); - } ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind); - uint8_t* ret = ptr_; - ptr_ += rounded_bytes; + uint8_t* ret; + if (UNLIKELY(rounded_bytes > static_cast<size_t>(end_ - ptr_))) { + ret = AllocFromNewArena(rounded_bytes); + uint8_t* noaccess_begin = ret + bytes; + uint8_t* noaccess_end; + if (ret == arena_head_->Begin()) { + DCHECK(ptr_ - rounded_bytes == ret); + noaccess_end = end_; + } else { + // We're still using the old arena but `ret` comes from a new one just after it. + DCHECK(arena_head_->next_ != nullptr); + DCHECK(ret == arena_head_->next_->Begin()); + DCHECK_EQ(rounded_bytes, arena_head_->next_->GetBytesAllocated()); + noaccess_end = arena_head_->next_->End(); + } + MEMORY_TOOL_MAKE_NOACCESS(noaccess_begin, noaccess_end - noaccess_begin); + } else { + ret = ptr_; + ptr_ += rounded_bytes; + } MEMORY_TOOL_MAKE_DEFINED(ret, bytes); // Check that the memory is already zeroed out. DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; })); @@ -340,14 +353,27 @@ ArenaAllocator::~ArenaAllocator() { pool_->FreeArenaChain(arena_head_); } -void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) { - UpdateBytesAllocated(); - Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size)); - new_arena->next_ = arena_head_; - arena_head_ = new_arena; - // Update our internal data structures. - ptr_ = begin_ = new_arena->Begin(); - end_ = new_arena->End(); +uint8_t* ArenaAllocator::AllocFromNewArena(size_t bytes) { + Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, bytes)); + DCHECK(new_arena != nullptr); + DCHECK_LE(bytes, new_arena->Size()); + if (static_cast<size_t>(end_ - ptr_) > new_arena->Size() - bytes) { + // The old arena has more space remaining than the new one, so keep using it. + // This can happen when the requested size is over half of the default size. + DCHECK(arena_head_ != nullptr); + new_arena->bytes_allocated_ = bytes; // UpdateBytesAllocated() on the new_arena. + new_arena->next_ = arena_head_->next_; + arena_head_->next_ = new_arena; + } else { + UpdateBytesAllocated(); + new_arena->next_ = arena_head_; + arena_head_ = new_arena; + // Update our internal data structures. + begin_ = new_arena->Begin(); + ptr_ = begin_ + bytes; + end_ = new_arena->End(); + } + return new_arena->Begin(); } bool ArenaAllocator::Contains(const void* ptr) const { diff --git a/runtime/base/arena_allocator.h b/runtime/base/arena_allocator.h index 52a1002e05..6c1a8984cd 100644 --- a/runtime/base/arena_allocator.h +++ b/runtime/base/arena_allocator.h @@ -234,6 +234,8 @@ class Arena { friend class ScopedArenaAllocator; template <bool kCount> friend class ArenaAllocatorStatsImpl; + friend class ArenaAllocatorTest; + private: DISALLOW_COPY_AND_ASSIGN(Arena); }; @@ -303,14 +305,10 @@ class ArenaAllocator return AllocWithMemoryTool(bytes, kind); } bytes = RoundUp(bytes, kAlignment); - if (UNLIKELY(ptr_ + bytes > end_)) { - // Obtain a new block. - ObtainNewArenaForAllocation(bytes); - if (UNLIKELY(ptr_ == nullptr)) { - return nullptr; - } - } ArenaAllocatorStats::RecordAlloc(bytes, kind); + if (UNLIKELY(bytes > static_cast<size_t>(end_ - ptr_))) { + return AllocFromNewArena(bytes); + } uint8_t* ret = ptr_; ptr_ += bytes; return ret; @@ -350,10 +348,6 @@ class ArenaAllocator return static_cast<T*>(Alloc(length * sizeof(T), kind)); } - void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind); - - void ObtainNewArenaForAllocation(size_t allocation_size); - size_t BytesAllocated() const; MemStats GetMemStats() const; @@ -369,6 +363,9 @@ class ArenaAllocator bool Contains(const void* ptr) const; private: + void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind); + uint8_t* AllocFromNewArena(size_t bytes); + static constexpr size_t kAlignment = 8; void UpdateBytesAllocated(); @@ -382,6 +379,8 @@ class ArenaAllocator template <typename U> friend class ArenaAllocatorAdapter; + friend class ArenaAllocatorTest; + DISALLOW_COPY_AND_ASSIGN(ArenaAllocator); }; // ArenaAllocator diff --git a/runtime/base/arena_allocator_test.cc b/runtime/base/arena_allocator_test.cc new file mode 100644 index 0000000000..9de3cc4312 --- /dev/null +++ b/runtime/base/arena_allocator_test.cc @@ -0,0 +1,127 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "base/arena_allocator.h" +#include "base/arena_bit_vector.h" +#include "gtest/gtest.h" + +namespace art { + +class ArenaAllocatorTest : public testing::Test { + protected: + size_t NumberOfArenas(ArenaAllocator* arena) { + size_t result = 0u; + for (Arena* a = arena->arena_head_; a != nullptr; a = a->next_) { + ++result; + } + return result; + } +}; + +TEST_F(ArenaAllocatorTest, Test) { + ArenaPool pool; + ArenaAllocator arena(&pool); + ArenaBitVector bv(&arena, 10, true); + bv.SetBit(5); + EXPECT_EQ(1U, bv.GetStorageSize()); + bv.SetBit(35); + EXPECT_EQ(2U, bv.GetStorageSize()); +} + +TEST_F(ArenaAllocatorTest, MakeDefined) { + // Regression test to make sure we mark the allocated area defined. + ArenaPool pool; + static constexpr size_t kSmallArraySize = 10; + static constexpr size_t kLargeArraySize = 50; + uint32_t* small_array; + { + // Allocate a small array from an arena and release it. + ArenaAllocator arena(&pool); + small_array = arena.AllocArray<uint32_t>(kSmallArraySize); + ASSERT_EQ(0u, small_array[kSmallArraySize - 1u]); + } + { + // Reuse the previous arena and allocate more than previous allocation including red zone. + ArenaAllocator arena(&pool); + uint32_t* large_array = arena.AllocArray<uint32_t>(kLargeArraySize); + ASSERT_EQ(0u, large_array[kLargeArraySize - 1u]); + // Verify that the allocation was made on the same arena. + ASSERT_EQ(small_array, large_array); + } +} + +TEST_F(ArenaAllocatorTest, LargeAllocations) { + { + ArenaPool pool; + ArenaAllocator arena(&pool); + // Note: Leaving some space for memory tool red zones. + void* alloc1 = arena.Alloc(Arena::kDefaultSize * 5 / 8); + void* alloc2 = arena.Alloc(Arena::kDefaultSize * 2 / 8); + ASSERT_NE(alloc1, alloc2); + ASSERT_EQ(1u, NumberOfArenas(&arena)); + } + { + ArenaPool pool; + ArenaAllocator arena(&pool); + void* alloc1 = arena.Alloc(Arena::kDefaultSize * 13 / 16); + void* alloc2 = arena.Alloc(Arena::kDefaultSize * 11 / 16); + ASSERT_NE(alloc1, alloc2); + ASSERT_EQ(2u, NumberOfArenas(&arena)); + void* alloc3 = arena.Alloc(Arena::kDefaultSize * 7 / 16); + ASSERT_NE(alloc1, alloc3); + ASSERT_NE(alloc2, alloc3); + ASSERT_EQ(3u, NumberOfArenas(&arena)); + } + { + ArenaPool pool; + ArenaAllocator arena(&pool); + void* alloc1 = arena.Alloc(Arena::kDefaultSize * 13 / 16); + void* alloc2 = arena.Alloc(Arena::kDefaultSize * 9 / 16); + ASSERT_NE(alloc1, alloc2); + ASSERT_EQ(2u, NumberOfArenas(&arena)); + // Note: Leaving some space for memory tool red zones. + void* alloc3 = arena.Alloc(Arena::kDefaultSize * 5 / 16); + ASSERT_NE(alloc1, alloc3); + ASSERT_NE(alloc2, alloc3); + ASSERT_EQ(2u, NumberOfArenas(&arena)); + } + { + ArenaPool pool; + ArenaAllocator arena(&pool); + void* alloc1 = arena.Alloc(Arena::kDefaultSize * 9 / 16); + void* alloc2 = arena.Alloc(Arena::kDefaultSize * 13 / 16); + ASSERT_NE(alloc1, alloc2); + ASSERT_EQ(2u, NumberOfArenas(&arena)); + // Note: Leaving some space for memory tool red zones. + void* alloc3 = arena.Alloc(Arena::kDefaultSize * 5 / 16); + ASSERT_NE(alloc1, alloc3); + ASSERT_NE(alloc2, alloc3); + ASSERT_EQ(2u, NumberOfArenas(&arena)); + } + { + ArenaPool pool; + ArenaAllocator arena(&pool); + // Note: Leaving some space for memory tool red zones. + for (size_t i = 0; i != 15; ++i) { + arena.Alloc(Arena::kDefaultSize * 1 / 16); // Allocate 15 times from the same arena. + ASSERT_EQ(i + 1u, NumberOfArenas(&arena)); + arena.Alloc(Arena::kDefaultSize * 17 / 16); // Allocate a separate arena. + ASSERT_EQ(i + 2u, NumberOfArenas(&arena)); + } + } +} + +} // namespace art |