37 explicit Arena(
size_t block_size = 64 * 1024)
38 : block_size_(block_size) {}
52 void*
allocate(
size_t size,
size_t alignment = 8) {
53 if (size == 0)
return nullptr;
56 if (!blocks_.empty()) {
57 Block& current = blocks_.back();
58 void* ptr = try_allocate_from(current, size, alignment);
66 if (size > SIZE_MAX - alignment)
return nullptr;
67 size_t new_block_size = (size + alignment > block_size_)
70 allocate_block(new_block_size);
72 Block& fresh = blocks_.back();
73 void* ptr = try_allocate_from(fresh, size, alignment);
92 void*
allocate_zeroed(
size_t size,
size_t alignment =
alignof(std::max_align_t)) {
93 void* ptr =
allocate(size, alignment);
94 if (ptr) std::memset(ptr, 0, size);
106 template <
typename T>
108 if (count == 0)
return nullptr;
109 if (count > SIZE_MAX /
sizeof(T))
return nullptr;
110 void* raw =
allocate(count *
sizeof(T),
alignof(T));
111 return static_cast<T*
>(raw);
119 const uint8_t*
copy(
const uint8_t* data,
size_t size) {
120 if (size == 0 || data ==
nullptr)
return nullptr;
122 if (!dst)
return nullptr;
123 std::memcpy(dst, data, size);
124 return static_cast<const uint8_t*
>(dst);
132 size_t len = str.size() + 1;
134 if (!dst)
return nullptr;
135 std::memcpy(dst, str.c_str(), len);
136 return static_cast<const char*
>(dst);
141 [[nodiscard]]
size_t bytes_used()
const {
return total_used_; }
148 for (
auto& block : blocks_) {
163 std::unique_ptr<uint8_t[]> data;
168 std::vector<Block> blocks_;
170 size_t total_used_ = 0;
173 static void* try_allocate_from(Block& block,
size_t size,
size_t alignment) {
175 size_t aligned_offset = align_up(block.used, alignment);
176 if (aligned_offset + size > block.size) {
179 void* ptr = block.data.get() + aligned_offset;
180 block.used = aligned_offset + size;
186 void allocate_block(
size_t size) {
189 block.data = std::make_unique<uint8_t[]>(size);
192 blocks_.push_back(std::move(block));
193 }
catch (
const std::bad_alloc&) {
198 blocks_.push_back(std::move(empty));
203 static size_t align_up(
size_t offset,
size_t alignment) {
204 size_t mask = alignment - 1;
206 if (offset > SIZE_MAX - mask)
return SIZE_MAX;
207 return (offset + mask) & ~mask;
Bump-pointer arena allocator for batch Parquet reads.
Arena & operator=(const Arena &)=delete
Non-copyable.
void reset()
Reset the arena, reusing all memory blocks without freeing them.
Arena(const Arena &)=delete
Non-copyable.
Arena(Arena &&) noexcept=default
Move-constructible.
T * allocate_array(size_t count)
Allocate a typed array of count elements from the arena.
const char * copy_string(const std::string &str)
Copy a string into the arena (including null terminator).
size_t bytes_used() const
Total bytes allocated (excluding alignment padding).
void * allocate_zeroed(size_t size, size_t alignment=alignof(std::max_align_t))
Allocate zero-initialized memory from the arena.
Arena(size_t block_size=64 *1024)
Construct an arena with the given default block size.
void * allocate(size_t size, size_t alignment=8)
Allocate aligned memory from the arena.
const uint8_t * copy(const uint8_t *data, size_t size)
Copy raw bytes into the arena and return a pointer to the copy.