summaryrefslogtreecommitdiff
path: root/asl/handle_pool/index_pool.hpp
diff options
context:
space:
mode:
authorSteven Le Rouzic <steven.lerouzic@gmail.com>2025-06-17 00:23:38 +0200
committerSteven Le Rouzic <steven.lerouzic@gmail.com>2025-06-18 00:05:33 +0200
commita89f23da8fddfa7adb28cb8d5fafdfbfe4e485b8 (patch)
treee46c729de613ef63f487e27a87d45b1be9a08c48 /asl/handle_pool/index_pool.hpp
parent92f908ee1b1385c1c3e924e3349b55682238232e (diff)
Add IndexPool
Diffstat (limited to 'asl/handle_pool/index_pool.hpp')
-rw-r--r--asl/handle_pool/index_pool.hpp196
1 files changed, 189 insertions, 7 deletions
diff --git a/asl/handle_pool/index_pool.hpp b/asl/handle_pool/index_pool.hpp
index 1d359d5..56345e8 100644
--- a/asl/handle_pool/index_pool.hpp
+++ b/asl/handle_pool/index_pool.hpp
@@ -6,12 +6,11 @@
#include "asl/base/meta.hpp"
#include "asl/containers/chunked_buffer.hpp"
#include "asl/memory/allocator.hpp"
+#include "asl/types/option.hpp"
namespace asl
{
-// @Todo Uniquely represented for the handle?
-
template<
int kIndexBits_,
int kGenBits_,
@@ -27,6 +26,7 @@ struct index_pool_config
using PrimitiveUserType = smallest_unsigned_integer_type_for_width<size_of<UserType> * 8>;
static_assert(trivially_copy_constructible<UserType>);
+ static_assert(trivially_destructible<UserType>);
static_assert(size_of<UserType> == size_of<PrimitiveUserType>, "UserType should be of size 1, 2 or 4");
static constexpr int kUserBits = []() static -> int {
@@ -43,13 +43,17 @@ struct index_pool_config
using HandleType = smallest_unsigned_integer_type_for_width<kIndexBits + kGenBits + kUserBits + 1>;
- static constexpr int kGenShift = kIndexBits;
- static constexpr int kUserShift = kIndexBits + kGenBits;
+ static constexpr int kGenShift = kIndexBits;
+ static constexpr int kUserShift = kIndexBits + kGenBits;
static constexpr HandleType kValidMask = HandleType{1} << (size_of<HandleType> * 8 - 1);
static constexpr HandleType kIndexMask = (HandleType{1} << kIndexBits) - 1;
static constexpr HandleType kGenMask = ((HandleType{1} << kGenBits) - 1) << kGenShift;
static constexpr HandleType kUserMask = ((HandleType{1} << kUserBits) - 1) << kUserShift;
+ static constexpr HandleType kNiche = static_cast<HandleType>(~uint64_t{kValidMask});
+
+ static constexpr uint64_t kMaxGen = (uint64_t{1} << kGenBits) - 1;
+ static constexpr uint64_t kMaxIndex = (uint64_t{1} << kIndexBits) - 1;
};
template<
@@ -60,14 +64,19 @@ template<
>
class index_pool_handle
{
- // using config = index_pool_config<5, 5>;
+public:
using config = index_pool_config<kIndexBits_, kGenBits_, UserType_, kUserBits_>;
+private:
config::HandleType m_handle{};
public:
constexpr index_pool_handle() = default;
+ constexpr explicit index_pool_handle(niche_t)
+ : m_handle{config::kNiche}
+ {}
+
constexpr index_pool_handle(uint64_t index, uint64_t gen)
requires (!config::kHasUser)
: m_handle{static_cast<config::HandleType>(
@@ -92,9 +101,9 @@ public:
ASL_ASSERT((bit_cast<typename config::PrimitiveUserType>(user) & (uint64_t{config::kUserMask} >> config::kUserShift)) == bit_cast<typename config::PrimitiveUserType>(user));
}
- constexpr bool is_valid(this index_pool_handle self)
+ constexpr bool is_null(this index_pool_handle self)
{
- return self.m_handle & config::kValidMask;
+ return !(self.m_handle & config::kValidMask);
}
constexpr uint64_t index(this index_pool_handle self)
@@ -114,6 +123,179 @@ public:
}
constexpr bool operator==(this index_pool_handle self, index_pool_handle other) = default;
+
+ constexpr bool operator==(this index_pool_handle self, niche_t)
+ {
+ return self.m_handle == config::kNiche;
+ }
+};
+
+template<
+ int kIndexBits_,
+ int kGenBits_,
+ typename UserType_,
+ int kUserBits_
+>
+struct is_uniquely_represented<index_pool_handle<kIndexBits_, kGenBits_, UserType_, kUserBits_>> : true_type {};
+
+template<
+ int kIndexBits_,
+ int kGenBits_,
+ typename UserType_ = empty,
+ int kUserBits_ = 0,
+ typename Payload = empty,
+ allocator Allocator = DefaultAllocator
+>
+class IndexPool
+{
+public:
+ using handle = index_pool_handle<kIndexBits_, kGenBits_, UserType_, kUserBits_>;
+
+private:
+ using config = handle::config;
+
+ static constexpr bool kHasPayload = !same_as<Payload, empty>;
+
+ // @Todo Remove need for default constructible & trivially destructible for payload
+ // Use maybe_uninit for it
+
+ // @Todo Use dummy user type with inner handle type -> no need to set user data when allocating handle
+
+ static_assert(default_constructible<Payload>);
+ static_assert(copy_constructible<Payload>);
+ static_assert(trivially_destructible<Payload>);
+
+ struct Slot
+ {
+ bool is_end_of_list : 1;
+ bool is_active : 1;
+
+ handle handle;
+
+ ASL_NO_UNIQUE_ADDRESS Payload payload;
+ };
+
+ chunked_buffer<Slot, 256, Allocator> m_slots;
+
+ // We only use the index, this is essentially the head of the linked
+ // list to the first available slot.
+ // Then the index of each slot points to the next available one.
+ handle m_first_available;
+
+ static constexpr handle make_handle(uint64_t index, uint64_t gen, [[maybe_unused]] config::UserType user_data)
+ {
+ if constexpr (config::kHasUser)
+ {
+ return handle(index, gen, user_data);
+ }
+ else
+ {
+ return handle(index, gen);
+ }
+ };
+
+ void allocate_new_slot(config::UserType user)
+ {
+ const auto new_index = static_cast<uint64_t>(m_slots.size());
+ if (new_index > config::kMaxIndex) { return; }
+
+ const handle new_handle = make_handle(new_index, 0, user);
+
+ m_slots.push(Slot{
+ .is_end_of_list = true,
+ .is_active = false,
+ .handle = new_handle,
+ .payload = Payload{}
+ });
+
+ m_first_available = new_handle;
+ }
+
+ option<handle> acquire_handle(const Payload& payload, config::UserType user)
+ {
+ if (m_first_available.is_null())
+ {
+ allocate_new_slot(user);
+ }
+
+ if (m_first_available.is_null())
+ {
+ return nullopt;
+ }
+
+ auto index = m_first_available.index();
+
+ Slot& slot = m_slots[static_cast<isize_t>(index)];
+ ASL_ASSERT(!slot.is_active);
+
+ m_first_available = slot.is_end_of_list ? handle{} : slot.handle;
+
+ slot.is_active = true;
+ slot.payload = payload;
+
+ return make_handle(index, slot.handle.gen(), user);
+ }
+
+ auto get_slot_if_valid(this auto&& self, handle h)
+ -> copy_const_t<decltype(self), Slot>*
+ {
+ auto index = static_cast<isize_t>(h.index());
+ if (index < 0 || index >= self.m_slots.size()) { return nullptr; }
+
+ auto& slot = self.m_slots[index];
+ if (!slot.is_active || slot.handle.gen() != h.gen())
+ {
+ return nullptr;
+ }
+
+ return &slot;
+ }
+
+public:
+ IndexPool() requires default_constructible<Allocator> = default;
+
+ explicit IndexPool(Allocator allocator) : m_slots{std::move(allocator)} {}
+
+ option<handle> acquire(const Payload& payload)
+ {
+ return acquire_handle(payload, {});
+ }
+
+ handle acquire_ensure(const Payload& payload)
+ {
+ auto opt = acquire(payload);
+ ASL_ASSERT_RELEASE(opt.has_value());
+ return opt.value();
+ }
+
+ // @Todo Add a policy to abandon slots that reached max generation
+ void release(handle h)
+ {
+ if (Slot* slot = get_slot_if_valid(h); slot != nullptr)
+ {
+ const uint64_t next_gen = h.gen() == config::kMaxGen ? 0 : h.gen() + 1;
+
+ slot->is_active = false;
+
+ if (m_first_available.is_null())
+ {
+ slot->is_end_of_list = true;
+ slot->handle = make_handle(h.index(), next_gen, {});
+ }
+ else
+ {
+ slot->is_end_of_list = false;
+ slot->handle = make_handle(m_first_available.index(), next_gen, {});
+ }
+
+ m_first_available = h;
+ }
+ }
+
+ bool is_valid(handle h) const
+ {
+ return get_slot_if_valid(h) != nullptr;
+ }
};
} // namespace asl