summaryrefslogtreecommitdiff
path: root/deimos/core/hash.h
diff options
context:
space:
mode:
authorSteven Le Rouzic <steven.lerouzic@gmail.com>2024-03-25 19:32:02 +0100
committerSteven Le Rouzic <steven.lerouzic@gmail.com>2024-03-25 19:32:02 +0100
commit5606b4c399404c0b8f745c6702d70f26eff8b371 (patch)
treeaeafe10a9697196b101a457654319b73fb1c89a0 /deimos/core/hash.h
parent00c0d78199fcfbbb20828be5e06fd2d271fa4c1e (diff)
Update to Clang 18, C++23, rework allocator
Diffstat (limited to 'deimos/core/hash.h')
-rw-r--r--deimos/core/hash.h74
1 files changed, 43 insertions, 31 deletions
diff --git a/deimos/core/hash.h b/deimos/core/hash.h
index 3f8935c..f1e8958 100644
--- a/deimos/core/hash.h
+++ b/deimos/core/hash.h
@@ -9,15 +9,25 @@ constexpr uint64 MurmurHash3_GetBlock64(const char* key, uint64 block)
{
// NOLINTBEGIN
key += block * 8;
- return
- (uint64)((uint8)key[0])
- | ((uint64)((uint8)key[1]) << 8)
- | ((uint64)((uint8)key[2]) << 16)
- | ((uint64)((uint8)key[3]) << 24)
- | ((uint64)((uint8)key[4]) << 32)
- | ((uint64)((uint8)key[5]) << 40)
- | ((uint64)((uint8)key[6]) << 48)
- | ((uint64)((uint8)key[7]) << 56);
+
+ if consteval
+ {
+ return
+ (uint64)((uint8)key[0])
+ | ((uint64)((uint8)key[1]) << 8)
+ | ((uint64)((uint8)key[2]) << 16)
+ | ((uint64)((uint8)key[3]) << 24)
+ | ((uint64)((uint8)key[4]) << 32)
+ | ((uint64)((uint8)key[5]) << 40)
+ | ((uint64)((uint8)key[6]) << 48)
+ | ((uint64)((uint8)key[7]) << 56);
+ }
+ else
+ {
+ uint64 value{};
+ __builtin_memcpy(&value, key, 8);
+ return value;
+ }
// NOLINTEND
}
@@ -33,6 +43,8 @@ constexpr uint64 MurmurHash3_Fmix64(uint64 k)
constexpr uint128 MurmurHash3_x64_128(const char* key)
{
+ if consteval { return { 12, 12 }; }
+
// NOLINTBEGIN
const uint64 len = __builtin_strlen(key);
const uint64 nblocks = len / 16;
@@ -49,13 +61,13 @@ constexpr uint128 MurmurHash3_x64_128(const char* key)
for(uint64 i = 0; i < nblocks; i++)
{
- uint64 k1 = MurmurHash3_GetBlock64(key, i * 2 + 0);
- uint64 k2 = MurmurHash3_GetBlock64(key, i * 2 + 1);
+ uint64 k1 = MurmurHash3_GetBlock64(key, i * 2 + 0);
+ uint64 k2 = MurmurHash3_GetBlock64(key, i * 2 + 1);
- k1 *= c1; k1 = _rotl64(k1, 31); k1 *= c2; h1 ^= k1;
- h1 = _rotl64(h1, 27); h1 += h2; h1 = h1*5+0x52dce729;
- k2 *= c2; k2 = _rotl64(k2, 33); k2 *= c1; h2 ^= k2;
- h2 = _rotl64(h2, 31); h2 += h1; h2 = h2*5+0x38495ab5;
+ k1 *= c1; k1 = _rotl64(k1, 31); k1 *= c2; h1 ^= k1;
+ h1 = _rotl64(h1, 27); h1 += h2; h1 = h1*5+0x52dce729;
+ k2 *= c2; k2 = _rotl64(k2, 33); k2 *= c1; h2 ^= k2;
+ h2 = _rotl64(h2, 31); h2 += h1; h2 = h2*5+0x38495ab5;
}
//----------
@@ -68,24 +80,24 @@ constexpr uint128 MurmurHash3_x64_128(const char* key)
switch (len & 15ULL)
{
- case 15: k2 ^= ((uint64)(uint8)tail[14]) << 48U;
- case 14: k2 ^= ((uint64)(uint8)tail[13]) << 40U;
- case 13: k2 ^= ((uint64)(uint8)tail[12]) << 32U;
- case 12: k2 ^= ((uint64)(uint8)tail[11]) << 24U;
- case 11: k2 ^= ((uint64)(uint8)tail[10]) << 16U;
- case 10: k2 ^= ((uint64)(uint8)tail[ 9]) << 8U;
+ case 15: k2 ^= ((uint64)(uint8)tail[14]) << 48U; [[fallthrough]];
+ case 14: k2 ^= ((uint64)(uint8)tail[13]) << 40U; [[fallthrough]];
+ case 13: k2 ^= ((uint64)(uint8)tail[12]) << 32U; [[fallthrough]];
+ case 12: k2 ^= ((uint64)(uint8)tail[11]) << 24U; [[fallthrough]];
+ case 11: k2 ^= ((uint64)(uint8)tail[10]) << 16U; [[fallthrough]];
+ case 10: k2 ^= ((uint64)(uint8)tail[ 9]) << 8U; [[fallthrough]];
case 9: k2 ^= ((uint64)(uint8)tail[ 8]) << 0U;
- k2 *= c2; k2 = _rotl64(k2,33); k2 *= c1; h2 ^= k2;
-
- case 8: k1 ^= ((uint64)(uint8)tail[ 7]) << 56U;
- case 7: k1 ^= ((uint64)(uint8)tail[ 6]) << 48U;
- case 6: k1 ^= ((uint64)(uint8)tail[ 5]) << 40U;
- case 5: k1 ^= ((uint64)(uint8)tail[ 4]) << 32U;
- case 4: k1 ^= ((uint64)(uint8)tail[ 3]) << 24U;
- case 3: k1 ^= ((uint64)(uint8)tail[ 2]) << 16U;
- case 2: k1 ^= ((uint64)(uint8)tail[ 1]) << 8U;
+ k2 *= c2; k2 = _rotl64(k2,33); k2 *= c1; h2 ^= k2;
+ [[fallthrough]];
+ case 8: k1 ^= ((uint64)(uint8)tail[ 7]) << 56U; [[fallthrough]];
+ case 7: k1 ^= ((uint64)(uint8)tail[ 6]) << 48U; [[fallthrough]];
+ case 6: k1 ^= ((uint64)(uint8)tail[ 5]) << 40U; [[fallthrough]];
+ case 5: k1 ^= ((uint64)(uint8)tail[ 4]) << 32U; [[fallthrough]];
+ case 4: k1 ^= ((uint64)(uint8)tail[ 3]) << 24U; [[fallthrough]];
+ case 3: k1 ^= ((uint64)(uint8)tail[ 2]) << 16U; [[fallthrough]];
+ case 2: k1 ^= ((uint64)(uint8)tail[ 1]) << 8U; [[fallthrough]];
case 1: k1 ^= ((uint64)(uint8)tail[ 0]) << 0U;
- k1 *= c1; k1 = _rotl64(k1,31); k1 *= c2; h1 ^= k1;
+ k1 *= c1; k1 = _rotl64(k1,31); k1 *= c2; h1 ^= k1;
};
//----------