diff options
Diffstat (limited to 'arch/x86_64/src/memory/heap')
| -rw-r--r-- | arch/x86_64/src/memory/heap/global_heap_allocator.cpp | 20 | ||||
| -rw-r--r-- | arch/x86_64/src/memory/heap/user_heap_allocator.cpp | 55 |
2 files changed, 64 insertions, 11 deletions
diff --git a/arch/x86_64/src/memory/heap/global_heap_allocator.cpp b/arch/x86_64/src/memory/heap/global_heap_allocator.cpp index 51f6261..acba02d 100644 --- a/arch/x86_64/src/memory/heap/global_heap_allocator.cpp +++ b/arch/x86_64/src/memory/heap/global_heap_allocator.cpp @@ -3,11 +3,12 @@ #include "arch/exception_handling/assert.hpp" #include "arch/memory/heap/bump_allocator.hpp" #include "arch/memory/heap/linked_list_allocator.hpp" +#include "arch/memory/heap/user_heap_allocator.hpp" namespace teachos::arch::memory::heap { heap_allocator * global_heap_allocator::kernel_allocator_instance = nullptr; - heap_allocator * global_heap_allocator::user_allocator_instance = nullptr; + user_heap_allocator * global_heap_allocator::user_allocator_instance = nullptr; auto global_heap_allocator::kmalloc(std::size_t size) -> void * { return kernel().allocate(size); } @@ -30,20 +31,17 @@ namespace teachos::arch::memory::heap case heap_allocator_type::BUMP: { static bump_allocator kernel_allocator{KERNEL_HEAP_START, KERNEL_HEAP_START + KERNEL_HEAP_SIZE}; kernel_allocator_instance = &kernel_allocator; - - static bump_allocator user_allocator{USER_HEAP_START, USER_HEAP_START + USER_HEAP_SIZE}; - user_allocator_instance = &user_allocator; break; } case heap_allocator_type::LINKED_LIST: { static linked_list_allocator kernel_allocator{KERNEL_HEAP_START, KERNEL_HEAP_START + KERNEL_HEAP_SIZE}; kernel_allocator_instance = &kernel_allocator; - - static linked_list_allocator user_allocator{USER_HEAP_START, USER_HEAP_START + USER_HEAP_SIZE}; - user_allocator_instance = &user_allocator; break; } } + + static user_heap_allocator user_allocator{USER_HEAP_START, USER_HEAP_START + USER_HEAP_SIZE}; + user_allocator_instance = &user_allocator; } auto global_heap_allocator::kernel() -> heap_allocator & @@ -55,11 +53,11 @@ namespace teachos::arch::memory::heap return *kernel_allocator_instance; } - auto global_heap_allocator::user() -> heap_allocator & + auto global_heap_allocator::user() -> user_heap_allocator & { - exception_handling::assert(user_allocator_instance != nullptr, - "Attempted to allocate or deallocate using the global_heap_allocator before " - "register_heap_allocation_type was called."); + // exception_handling::assert(user_allocator_instance != nullptr, + // "Attempted to allocate or deallocate using the global_heap_allocator before " + // "register_heap_allocation_type was called."); return *user_allocator_instance; } diff --git a/arch/x86_64/src/memory/heap/user_heap_allocator.cpp b/arch/x86_64/src/memory/heap/user_heap_allocator.cpp new file mode 100644 index 0000000..f09811d --- /dev/null +++ b/arch/x86_64/src/memory/heap/user_heap_allocator.cpp @@ -0,0 +1,55 @@ +#include "arch/memory/heap/user_heap_allocator.hpp" + +#include "arch/exception_handling/assert.hpp" + +#include <limits> +#include <type_traits> + +namespace teachos::arch::memory::heap +{ + namespace + { + template<typename T> + [[gnu::section(".user_text")]] + auto saturating_add(T x, T y) -> T + requires std::is_unsigned_v<T> + { + if (x > std::numeric_limits<T>::max() - y) + { + return std::numeric_limits<T>::max(); + } + T result = x + y; + return result; + } + } // namespace + + auto user_heap_allocator::allocate(std::size_t size) -> void * + { + // Reading the value only has to be done once, because compare_exchange_weak updates the value as well if the + // exchange failed, becuase the value was not the expected one. + auto alloc_start = next.load(std::memory_order::relaxed); + // Repeat allocation until it succeeds, has to be done, because another allocator could overtake it at any time + // causing the value to differ and the calculation to have to be redone. + for (;;) + { + auto const alloc_end = saturating_add(alloc_start, size); + arch::exception_handling::assert(alloc_end <= heap_end, "[Heap Allocator] Out of memory"); + // Check if the atomic value is still the one initally loaded, if it isn't we have been overtaken by another + // thread and need to redo the calculation. Spurious failure by weak can be ignored, because the whole allocation + // is wrapped in an infinite for loop so a failure that wasn't actually one will simply be retried until it works. + auto const updated = next.compare_exchange_weak(alloc_start, alloc_end, std::memory_order::relaxed); + if (updated) + { + return reinterpret_cast<void *>(alloc_start); + } + } + } + + auto user_heap_allocator::deallocate(void * pointer) noexcept -> void + { + if (pointer) + { + } + } + +} // namespace teachos::arch::memory::heap |
