summaryrefslogtreecommitdiff
path: root/tests/bionic_allocator_test.cpp
diff options
context:
space:
mode:
authorRyan Prichard <rprichard@google.com>2019-01-24 15:22:50 -0800
committerRyan Prichard <rprichard@google.com>2019-01-25 15:31:35 -0800
commit96773a2daf8d11f2d95801935e26f71eb0265559 (patch)
tree44a8da51e6b6acfb7054830a44e23b00d82afefc /tests/bionic_allocator_test.cpp
parent083d850b30c5eb29c16500bca934d7f6682e4aaa (diff)
Add BionicAllocator::memalign
Bionic needs this functionality to allocate a TLS segment with greater than 16-byte alignment. For simplicity, this allocator only supports up to one page of alignment. The memory layout changes slightly when allocating an object of exactly PAGE_SIZE alignment. Instead of allocating the page_info header at the start of the page containing the pointer, it is allocated at the start of the preceding page. Bug: http://b/78026329 Test: linker-unit-tests{32,64} Change-Id: I1c8d1cd7ca72d113bced5ee15ba8d831426b0081
Diffstat (limited to 'tests/bionic_allocator_test.cpp')
-rw-r--r--tests/bionic_allocator_test.cpp45
1 files changed, 45 insertions, 0 deletions
diff --git a/tests/bionic_allocator_test.cpp b/tests/bionic_allocator_test.cpp
index d0ca8ec5f..f71090704 100644
--- a/tests/bionic_allocator_test.cpp
+++ b/tests/bionic_allocator_test.cpp
@@ -212,4 +212,49 @@ TEST(bionic_allocator, test_large) {
allocator.free(ptr_to_free);
}
+TEST(bionic_allocator, test_memalign_small) {
+ BionicAllocator allocator;
+ void* ptr;
+
+ // simple case
+ ptr = allocator.memalign(0x100, 0x100);
+ ASSERT_TRUE(ptr != nullptr);
+ ASSERT_EQ(0U, reinterpret_cast<uintptr_t>(ptr) % 0x100);
+ allocator.free(ptr);
+
+ // small objects are automatically aligned to their size.
+ ptr = allocator.alloc(0x200);
+ ASSERT_TRUE(ptr != nullptr);
+ ASSERT_EQ(0U, reinterpret_cast<uintptr_t>(ptr) % 0x200);
+ allocator.free(ptr);
+
+ // the size (0x10) is bumped up to the alignment (0x100)
+ ptr = allocator.memalign(0x100, 0x10);
+ ASSERT_TRUE(ptr != nullptr);
+ ASSERT_EQ(0U, reinterpret_cast<uintptr_t>(ptr) % 0x100);
+ allocator.free(ptr);
+}
+TEST(bionic_allocator, test_memalign_large) {
+ BionicAllocator allocator;
+ void* ptr;
+
+ // a large object with alignment < PAGE_SIZE
+ ptr = allocator.memalign(0x100, 0x2000);
+ ASSERT_TRUE(ptr != nullptr);
+ ASSERT_EQ(0U, reinterpret_cast<uintptr_t>(ptr) % 0x100);
+ allocator.free(ptr);
+
+ // a large object with alignment == PAGE_SIZE
+ ptr = allocator.memalign(0x1000, 0x2000);
+ ASSERT_TRUE(ptr != nullptr);
+ ASSERT_EQ(0U, reinterpret_cast<uintptr_t>(ptr) % 0x1000);
+ allocator.free(ptr);
+
+ // A large object with alignment > PAGE_SIZE is only guaranteed to have page
+ // alignment.
+ ptr = allocator.memalign(0x2000, 0x4000);
+ ASSERT_TRUE(ptr != nullptr);
+ ASSERT_EQ(0U, reinterpret_cast<uintptr_t>(ptr) % 0x1000);
+ allocator.free(ptr);
+}