|
| 1 | +#ifndef MEM_ALLOC_ARENA_HPP |
| 2 | +#define MEM_ALLOC_ARENA_HPP |
| 3 | + |
| 4 | +#include "kprint" |
| 5 | +#include <cstdint> |
| 6 | +#include <cstddef> |
| 7 | +#include <list> |
| 8 | +#include <algorithm> |
| 9 | + |
| 10 | +namespace os::mem { |
| 11 | + |
| 12 | +struct Region { uintptr_t addr; size_t size; }; |
| 13 | + |
| 14 | +class Arena { |
| 15 | +public: |
| 16 | + static constexpr size_t PAGE_SZ = 4096; |
| 17 | + |
| 18 | + Arena(uintptr_t base, size_t size) |
| 19 | + : base_{align_up(base, PAGE_SZ)}, size_{align_down(size, PAGE_SZ)} { |
| 20 | + if (size_) free_.push_back({base_, size_}); |
| 21 | + } |
| 22 | + |
| 23 | + void* allocate(size_t len) noexcept { |
| 24 | + return allocate_aligned(PAGE_SZ, len); |
| 25 | + } |
| 26 | + |
| 27 | + void* allocate_aligned(size_t alignment, size_t len) noexcept { |
| 28 | + alignment = std::max(alignment, PAGE_SZ); |
| 29 | + len = align_up(len, PAGE_SZ); |
| 30 | + |
| 31 | + for (auto it = free_.begin(); it != free_.end(); ++it) { |
| 32 | + uintptr_t a = align_up(it->addr, alignment); |
| 33 | + if (a + len > it->addr + it->size) continue; |
| 34 | + |
| 35 | + // split left |
| 36 | + if (a > it->addr) { |
| 37 | + Region left{it->addr, a - it->addr}; |
| 38 | + it->addr = a; |
| 39 | + it->size -= left.size; |
| 40 | + free_.insert(std::upper_bound(free_.begin(), free_.end(), left, cmp), left); |
| 41 | + } |
| 42 | + |
| 43 | + // split right |
| 44 | + const uintptr_t end = it->addr + len; |
| 45 | + const uintptr_t it_end = it->addr + it->size; |
| 46 | + if (end < it_end) { |
| 47 | + Region right{end, it_end - end}; |
| 48 | + it->size = len; |
| 49 | + free_.insert(std::upper_bound(free_.begin(), free_.end(), right, cmp), right); |
| 50 | + } |
| 51 | + |
| 52 | + void* ret = (void*)it->addr; |
| 53 | + free_.erase(it); |
| 54 | + return ret; |
| 55 | + } |
| 56 | + return nullptr; |
| 57 | + } |
| 58 | + |
| 59 | + void* allocate_at(void* addr, size_t len) noexcept { |
| 60 | + // assumes region is deallocated |
| 61 | + kprintf("[arena] allocating at %p, %zu bytes", addr, len); |
| 62 | + |
| 63 | + uintptr_t a = (uintptr_t)addr; |
| 64 | + len = align_up(len, PAGE_SZ); |
| 65 | + if (a < base_ || a + len > base_ + size_) return nullptr; |
| 66 | + |
| 67 | + for (auto it = free_.begin(); it != free_.end(); ++it) { |
| 68 | + const uintptr_t start = it->addr; |
| 69 | + const uintptr_t end = start + it->size; |
| 70 | + if (a >= start && a + len <= end) { |
| 71 | + Region left{start, a - start}; |
| 72 | + Region right{a + len, end - (a + len)}; |
| 73 | + free_.erase(it); |
| 74 | + if (left.size) |
| 75 | + free_.insert(std::upper_bound(free_.begin(), free_.end(), left, cmp), left); |
| 76 | + if (right.size) |
| 77 | + free_.insert(std::upper_bound(free_.begin(), free_.end(), right, cmp), right); |
| 78 | + return addr; |
| 79 | + } |
| 80 | + } |
| 81 | + return nullptr; |
| 82 | + } |
| 83 | + |
| 84 | + void deallocate(void* addr, size_t len) noexcept { |
| 85 | + if (!addr || !len) return; |
| 86 | + Region r{align_down((uintptr_t)addr, PAGE_SZ), align_up(len, PAGE_SZ)}; |
| 87 | + |
| 88 | + auto it = std::upper_bound(free_.begin(), free_.end(), r, cmp); |
| 89 | + if (it != free_.begin()) { |
| 90 | + auto prev = std::prev(it); |
| 91 | + if (prev->addr + prev->size == r.addr) { |
| 92 | + prev->size += r.size; |
| 93 | + maybe_merge_with_next(prev); |
| 94 | + return; |
| 95 | + } |
| 96 | + } |
| 97 | + it = free_.insert(it, r); |
| 98 | + maybe_merge_with_next(it); |
| 99 | + } |
| 100 | + |
| 101 | + bool is_range_free(uintptr_t a, size_t len) const noexcept { |
| 102 | + len = align_up(len, PAGE_SZ); |
| 103 | + for (auto& r : free_) { |
| 104 | + if (a >= r.addr && a + len <= r.addr + r.size) { |
| 105 | + return true; |
| 106 | + } |
| 107 | + if (a + len <= r.addr) { |
| 108 | + break; // past it |
| 109 | + } |
| 110 | + } |
| 111 | + return false; |
| 112 | + } |
| 113 | + |
| 114 | + size_t bytes_free() const noexcept { |
| 115 | + size_t s = 0; |
| 116 | + for (auto& r : free_) s += r.size; |
| 117 | + return s; |
| 118 | + } |
| 119 | + |
| 120 | + size_t bytes_used() const noexcept { |
| 121 | + return size_ - bytes_free(); |
| 122 | + } |
| 123 | + |
| 124 | + uintptr_t allocation_end() const noexcept { |
| 125 | + if (free_.empty()) return base_ + size_; |
| 126 | + auto last = std::prev(free_.end()); |
| 127 | + if (last->addr + last->size == base_ + size_) return last->addr; |
| 128 | + return base_ + size_; |
| 129 | + } |
| 130 | + |
| 131 | + void reset() { |
| 132 | + free_.clear(); |
| 133 | + if (size_) free_.push_back({base_, size_}); |
| 134 | + } |
| 135 | + |
| 136 | + // alignment helpers |
| 137 | + template<typename T> |
| 138 | + static constexpr T align_up(T x, size_t a) noexcept { |
| 139 | + return (x + a - 1) & ~(T)(a - 1); |
| 140 | + } |
| 141 | + template<typename T> |
| 142 | + static constexpr T align_down(T x, size_t a) noexcept { |
| 143 | + return x & ~(T)(a - 1); |
| 144 | + } |
| 145 | + |
| 146 | +private: |
| 147 | + static inline bool cmp(const Region& a, const Region& b) noexcept { |
| 148 | + return a.addr < b.addr; |
| 149 | + } |
| 150 | + |
| 151 | + void maybe_merge_with_next(std::list<Region>::iterator it) { |
| 152 | + auto nx = std::next(it); |
| 153 | + if (nx != free_.end() && it->addr + it->size == nx->addr) { |
| 154 | + it->size += nx->size; |
| 155 | + free_.erase(nx); |
| 156 | + } |
| 157 | + } |
| 158 | + |
| 159 | + uintptr_t base_{0}; |
| 160 | + size_t size_{0}; |
| 161 | + std::list<Region> free_; |
| 162 | +}; |
| 163 | + |
| 164 | +} // namespace os::mem |
| 165 | + |
| 166 | +#endif // MEM_ALLOC_ARENA_HPP |
| 167 | + |
0 commit comments