294 lines
8.6 KiB
C++
294 lines
8.6 KiB
C++
#pragma once
|
|
|
|
constexpr s64 ARRAY_ARENA_START_OFFSET = 64;
|
|
|
|
template <typename T>
|
|
struct ArenaArray { // #downcasts to an ArrayView.
|
|
using ValueType = T;
|
|
s64 count;
|
|
T* data;
|
|
s64 allocated;
|
|
Arena* arena;
|
|
|
|
ArenaArray() {
|
|
memset(this, 0, sizeof(*this));
|
|
}
|
|
|
|
T& operator[] (s64 index) {
|
|
#if ARRAY_ENABLE_BOUNDS_CHECKING
|
|
if (index < 0 || index >= count) { debug_break(); } // index out of bounds
|
|
#endif
|
|
|
|
return static_cast<T*>(data)[index];
|
|
}
|
|
};
|
|
|
|
// #NOTE: I am not defining arena_array_init (ArenaArray<T>*), because I do not want to
|
|
// encourage it's usage!
|
|
// Use arena_array_free to reset
|
|
template <typename T>
|
|
ArenaArray<T>* arena_array_new (s64 preallocate_count, Arena_Reserve reserve_size) {
|
|
Arena* arena = next_arena(reserve_size);
|
|
push_arena(arena);
|
|
push_alignment(arena, 1);
|
|
ArenaArray<T>* array = New<ArenaArray<T>>(true);
|
|
array->arena = arena;
|
|
|
|
s64 commit_size_bytes = preallocate_count * sizeof(T);
|
|
if (arena_commit_first_pages(array->arena, commit_size_bytes, ARRAY_ARENA_START_OFFSET)) {
|
|
array->allocated = preallocate_count;
|
|
}
|
|
|
|
array->count = 0;
|
|
array->arena = arena;
|
|
array->data = array_start<T>(*array);
|
|
|
|
return array;
|
|
}
|
|
|
|
template <typename T> T* array_start (ArenaArray<T>& array) {
|
|
return (T*)(array.arena->memory_base + ARRAY_ARENA_START_OFFSET);
|
|
}
|
|
|
|
template <typename T> bool is_valid (ArenaArray<T>* array) {
|
|
if (array == nullptr) return false;
|
|
if (array->arena == nullptr) return false;
|
|
return is_valid(array->arena);
|
|
}
|
|
|
|
template <typename T> bool is_empty (ArenaArray<T>& array) {
|
|
return ((array.count == 0) || !is_valid(array.arena));
|
|
}
|
|
|
|
template <typename T> s64 memory_usage (ArenaArray<T>& array) {
|
|
if (array == nullptr) return 0;
|
|
return arena_usage_committed_bytes(array.arena);
|
|
}
|
|
|
|
template <typename T> void arena_array_free (ArenaArray<T>& array, bool delete_pages=true) {
|
|
release_arena(array.arena, delete_pages);
|
|
array.arena = nullptr;
|
|
#if BUILD_DEBUG
|
|
poison_struct(&array);
|
|
#endif
|
|
}
|
|
|
|
template <typename T> ArrayView<T> array_view (ArenaArray<T> array) {
|
|
ArrayView<T> av;
|
|
av.count = array.count;
|
|
av.data = array.data;
|
|
return av;
|
|
}
|
|
|
|
template <typename T> ArrayView<T> to_view (ArenaArray<T>& array) {
|
|
ArrayView<T> av;
|
|
av.count = array.count;
|
|
av.data = array.data;
|
|
return av;
|
|
}
|
|
|
|
template <typename T> ArrayView<T> to_view (ArenaArray<T>& array, s64 start_offset, s64 count) {
|
|
Assert(start_offset >= 0); Assert(count >= 0);
|
|
|
|
ArrayView<T> av = { 0, nullptr };
|
|
|
|
if (start_offset >= array.count) {
|
|
return av; // empty
|
|
}
|
|
|
|
av.count = count;
|
|
av.data = array.data + start_offset;
|
|
|
|
if (start_offset + count > array.count) {
|
|
av.count = array.count - offset;
|
|
}
|
|
|
|
return av;
|
|
}
|
|
|
|
template <typename T> void copy_from_view (ArenaArray<T>* array, ArrayView<T> view) {
|
|
T* start = array->data + array->count;
|
|
s64 new_count = array->count + view.count;
|
|
array_resize(*array, new_count, false);
|
|
memcpy(start, view.data, view.count * sizeof(T));
|
|
}
|
|
|
|
template <typename T> void array_add (ArenaArray<T>& array, ArrayView<T> items) {
|
|
T* current_point = &array.data[array.count];
|
|
s64 final_count = array.count + items.count;
|
|
|
|
if (array.allocated < final_count) {
|
|
array_reserve(array, final_count);
|
|
}
|
|
|
|
memcpy(current_point, items.data, items.count * sizeof(T));
|
|
|
|
array.count += items.count;
|
|
}
|
|
|
|
template <typename T> void array_add (ArenaArray<T>& array, T item) {
|
|
maybe_grow(array);
|
|
array.data[array.count] = item;
|
|
array.count += 1;
|
|
}
|
|
|
|
template <typename T> T* array_add (ArenaArray<T>& array) {
|
|
maybe_grow(array);
|
|
|
|
T* result = &array.data[array.count];
|
|
|
|
(*result) = T();
|
|
|
|
array.count += 1;
|
|
return result;
|
|
}
|
|
|
|
template <typename T> force_inline void maybe_grow (ArenaArray<T>& array) {
|
|
if (array.count >= array.allocated) {
|
|
s64 reserve = 2 * array.allocated;
|
|
// if reserve < 8 reserve = 8; // no point doing this because we allocate by page, and we're never realloc'ing
|
|
reserve_internal((ArenaArray<u8>&)array, reserve, sizeof(T));
|
|
}
|
|
}
|
|
|
|
template <typename T> force_inline void array_reserve (ArenaArray<T>& array, s64 desired_item_count) {
|
|
reserve_internal((ArenaArray<u8>&)array, desired_item_count, sizeof(T));
|
|
}
|
|
|
|
template <typename T> void array_resize (ArenaArray<T>& array, s64 desired_item_count, bool initialize) {
|
|
s64 old_count = array.count;
|
|
reserve_internal((ArenaArray<u8>&)array, desired_item_count, sizeof(T));
|
|
array.count = desired_item_count;
|
|
if (initialize) {
|
|
init_range(array.data, old_count, desired_item_count);
|
|
}
|
|
}
|
|
|
|
s64 max_array_size (ArenaArray<u8>& array) {
|
|
return reserve_size(array.arena) - sizeof(Arena) - sizeof(ArenaArray<u8>);
|
|
}
|
|
|
|
void array_arena_realloc (ArenaArray<u8>& array, s64 new_size, s64 old_size) {
|
|
Assert(new_size <= max_array_size(array));
|
|
|
|
u8* array_begin = (u8*)array.data;
|
|
void* result_end = (void*)(array_begin + new_size);
|
|
|
|
// Check if we need more pages:
|
|
if (result_end > array.arena->first_uncommitted_page) {
|
|
// Critical error if we run out of address space!
|
|
if (result_end > arena_address_limit(array.arena)) {
|
|
printf("[Error] Failed to allocate because Arena is full and cannot expand!\n");
|
|
Assert(false); // Failed to allocate because Arena is full and cannot expand
|
|
return;
|
|
}
|
|
|
|
extend_committed_pages(array.arena, (u8*)result_end);
|
|
}
|
|
}
|
|
|
|
void reserve_internal (ArenaArray<u8>& array, s64 desired_item_count, s64 element_size) {
|
|
if (desired_item_count <= array.allocated) return;
|
|
|
|
array_arena_realloc(array, desired_item_count * element_size, array.allocated * element_size);
|
|
|
|
array.allocated = desired_item_count;
|
|
}
|
|
|
|
template <typename T> void init_range (T* ptr, s64 start_offset, s64 end_offset) {
|
|
for (s64 i = start_offset; i < end_offset; i += 1) {
|
|
T* current_item = ptr + i;
|
|
(*current_item) = T(); // is this correct in-place init?
|
|
}
|
|
}
|
|
|
|
template <typename T> void array_poison_range (ArenaArray<T>& array, s64 start, s64 count) {
|
|
#if BUILD_DEBUG
|
|
Assert(start >= 0 && start < array.count);
|
|
Assert(start + count <= array.count);
|
|
// Check that these ranges make sense
|
|
T* start_address = &array[start];
|
|
memset(start_address, 0xCD, count * sizeof(T));
|
|
#endif
|
|
}
|
|
|
|
template <typename T> force_inline void array_reset (ArenaArray<T>& array) {
|
|
// reset backing array:
|
|
arena_reset(array.arena);
|
|
array.count = 0;
|
|
// should be # of bytes committed / sizeof(T):
|
|
s64 committed_bytes = (s64)(array.arena->first_uncommitted_page - array.data);
|
|
array.allocated = committed_bytes / sizeof(T);
|
|
}
|
|
|
|
template <typename T> force_inline void reset_keeping_memory (ArenaArray<T>& array) {
|
|
array.count = 0;
|
|
}
|
|
|
|
template <typename T> force_inline void ordered_remove_by_index (ArenaArray<T>& array, s64 index) {
|
|
Assert(index >= 0); Assert(index < array.count);
|
|
|
|
for (s64 i = index; i < array.count-1; i += 1) {
|
|
array.data[i] = array.data[i - 1];
|
|
}
|
|
|
|
array.count -= 1;
|
|
}
|
|
|
|
template <typename T> force_inline void unordered_remove_by_index (ArenaArray<T>& array, s64 index) {
|
|
Assert(index >= 0); Assert(index < array.count);
|
|
|
|
s64 last_index = array.count - 1;
|
|
if index != last_index { array.data[index] = array.data[last_index]; }
|
|
|
|
array.count -= 1;
|
|
}
|
|
|
|
template <typename T> void insert_at(ArenaArray<T>& array, s64 offset, ArenaArray<T>& src_array) {
|
|
Assert(offset >= 0 && offset <= array.count);
|
|
if (!is_valid(src_array)) return;
|
|
|
|
s64 new_count = array.count + src_array.count;
|
|
array_reserve(array, new_count);
|
|
|
|
T* src = array.data + offset;
|
|
T* dst = src + src_array.count;
|
|
memcpy(dst, src, (array.count - offset) * sizeof(T));
|
|
|
|
memcpy(array.data + offset, src_array.data, src_array.count * sizeof(T));
|
|
}
|
|
|
|
template <typename T> T pop (ArenaArray<T>& array) {
|
|
T result = array.data[array.count-1];
|
|
array.count -= 1;
|
|
return result;
|
|
}
|
|
|
|
template <typename T> T peek (ArenaArray<T>& array) {
|
|
return array.data[array.count-1];
|
|
}
|
|
|
|
template <typename T> T* peek_pointer (ArenaArray<T>& array) {
|
|
return &(array.data[array.count-1]);
|
|
}
|
|
|
|
template <typename T> void delete_range (ArenaArray<T>& array, s64 start_offset, s64 count) {
|
|
Assert(start_offset >= 0 && count >= 0 && start_offset + count <= array.count);
|
|
memcpy(array.data + start_offset, array.data + start_offset + count, (array.count - start_offset - count) * sizeof(T));
|
|
array.count -= count;
|
|
}
|
|
|
|
template <typename T> ArenaArray<T>& array_copy (ArenaArray<T>& array) {
|
|
auto new_array = arena_array_new<T>(array.arena.reserve_size);
|
|
array_reserve(new_array, array.count);
|
|
memcpy(new_array.data, array.data, array.count * sizeof(T));
|
|
new_array.count = array.count;
|
|
return new_array;
|
|
}
|
|
|
|
template <typename T> ArrayView<T> array_copy_as_view (ArenaArray<T>& array) {
|
|
ArrayView<T> view = { array.count, array.data };
|
|
return view;
|
|
}
|
|
|