Implement Phantom typing for Tiny FastCache layer
Refactor FastCache and TLS cache APIs to use Phantom types (hak_base_ptr_t) for compile-time type safety, preventing BASE/USER pointer confusion. Changes: 1. core/hakmem_tiny_fastcache.inc.h: - fastcache_pop() returns hak_base_ptr_t instead of void* - fastcache_push() accepts hak_base_ptr_t instead of void* 2. core/hakmem_tiny.c: - Updated forward declarations to match new signatures 3. core/tiny_alloc_fast.inc.h, core/hakmem_tiny_alloc.inc: - Alloc paths now use hak_base_ptr_t for cache operations - BASE->USER conversion via HAK_RET_ALLOC macro 4. core/hakmem_tiny_refill.inc.h, core/refill/ss_refill_fc.h: - Refill paths properly handle BASE pointer types - Fixed: Removed unnecessary HAK_BASE_FROM_RAW() in ss_refill_fc.h line 176 5. core/hakmem_tiny_free.inc, core/tiny_free_magazine.inc.h: - Free paths convert USER->BASE before cache push - USER->BASE conversion via HAK_USER_TO_BASE or ptr_user_to_base() 6. core/hakmem_tiny_legacy_slow_box.inc: - Legacy path properly wraps pointers for cache API Benefits: - Type safety at compile time (in debug builds) - Zero runtime overhead (debug builds only, release builds use typedef=void*) - All BASE->USER conversions verified via Task analysis - Prevents pointer type confusion bugs Testing: - Build: SUCCESS (all 9 files) - Smoke test: PASS (sh8bench runs to completion) - Conversion path verification: 3/3 paths correct 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@ -193,10 +193,10 @@ SuperSlab* adopt_gate_try(int class_idx, TinyTLSSlab* tls) {
|
||||
// ============================================================================
|
||||
// Functions: tiny_fast_pop(), tiny_fast_push() - 28 lines (lines 377-404)
|
||||
// Forward declarations for functions defined in hakmem_tiny_fastcache.inc.h
|
||||
static inline void* tiny_fast_pop(int class_idx);
|
||||
static inline int tiny_fast_push(int class_idx, void* ptr);
|
||||
static inline void* fastcache_pop(int class_idx);
|
||||
static inline int fastcache_push(int class_idx, void* ptr);
|
||||
static inline hak_base_ptr_t tiny_fast_pop(int class_idx);
|
||||
static inline int tiny_fast_push(int class_idx, hak_base_ptr_t ptr);
|
||||
static inline hak_base_ptr_t fastcache_pop(int class_idx);
|
||||
static inline int fastcache_push(int class_idx, hak_base_ptr_t ptr);
|
||||
|
||||
// ============================================================================
|
||||
// EXTRACTED TO hakmem_tiny_hot_pop.inc.h (Phase 2D-1)
|
||||
@ -665,4 +665,4 @@ static void tiny_tls_sll_diag_atexit(void) {
|
||||
// ============================================================================
|
||||
// ACE Learning Layer & Tiny Guard - EXTRACTED to hakmem_tiny_ace_guard_box.inc
|
||||
// ============================================================================
|
||||
#include "hakmem_tiny_ace_guard_box.inc"
|
||||
#include "hakmem_tiny_ace_guard_box.inc"
|
||||
|
||||
@ -310,13 +310,14 @@ void* hak_tiny_alloc(size_t size) {
|
||||
}
|
||||
}
|
||||
|
||||
void* fast = tiny_fast_pop(class_idx);
|
||||
if (__builtin_expect(fast != NULL, 0)) {
|
||||
hak_base_ptr_t fast = tiny_fast_pop(class_idx);
|
||||
if (__builtin_expect(!hak_base_is_null(fast), 0)) {
|
||||
void* fast_raw = HAK_BASE_TO_RAW(fast);
|
||||
#if HAKMEM_BUILD_DEBUG
|
||||
g_tls_hit_count[class_idx]++;
|
||||
#endif
|
||||
tiny_debug_ring_record(TINY_RING_EVENT_ALLOC_SUCCESS, (uint16_t)class_idx, fast, 5);
|
||||
HAK_RET_ALLOC_WITH_METRIC(fast);
|
||||
tiny_debug_ring_record(TINY_RING_EVENT_ALLOC_SUCCESS, (uint16_t)class_idx, fast_raw, 5);
|
||||
HAK_RET_ALLOC_WITH_METRIC(fast_raw);
|
||||
}
|
||||
} else {
|
||||
tiny_debug_ring_record(TINY_RING_EVENT_FRONT_BYPASS, (uint16_t)class_idx, NULL, 0);
|
||||
|
||||
@ -81,12 +81,14 @@ static inline void tiny_fast_debug_log(int class_idx, const char* event, uint16_
|
||||
#endif
|
||||
|
||||
// Basic fast cache operations
|
||||
static inline __attribute__((always_inline)) void* tiny_fast_pop(int class_idx) {
|
||||
if (!g_fast_enable) return NULL;
|
||||
// NOTE: These APIs conceptually operate on BASE pointers.
|
||||
// Interfaces use hak_base_ptr_t for type-safety; storage remains void*.
|
||||
static inline __attribute__((always_inline)) hak_base_ptr_t tiny_fast_pop(int class_idx) {
|
||||
if (!g_fast_enable) return HAK_BASE_FROM_RAW(NULL);
|
||||
uint16_t cap = g_fast_cap[class_idx];
|
||||
if (cap == 0) return NULL;
|
||||
if (cap == 0) return HAK_BASE_FROM_RAW(NULL);
|
||||
void* head = g_fast_head[class_idx];
|
||||
if (!head) return NULL;
|
||||
if (!head) return HAK_BASE_FROM_RAW(NULL);
|
||||
// Phase 7: header-aware next pointer (C0-C6: base+1, C7: base)
|
||||
#if HAKMEM_TINY_HEADER_CLASSIDX
|
||||
// Phase E1-CORRECT: ALL classes have 1-byte header, next ptr at offset 1
|
||||
@ -105,10 +107,11 @@ static inline __attribute__((always_inline)) void* tiny_fast_pop(int class_idx)
|
||||
g_fast_count[class_idx] = 0;
|
||||
}
|
||||
// Phase E1-CORRECT: Return BASE pointer; caller (HAK_RET_ALLOC) performs BASE→USER
|
||||
return head;
|
||||
return HAK_BASE_FROM_RAW(head);
|
||||
}
|
||||
|
||||
static inline __attribute__((always_inline)) int tiny_fast_push(int class_idx, void* ptr) {
|
||||
static inline __attribute__((always_inline)) int tiny_fast_push(int class_idx, hak_base_ptr_t base) {
|
||||
void* ptr = HAK_BASE_TO_RAW(base);
|
||||
// NEW: Check Front-Direct/SLL-OFF bypass (priority check before any work)
|
||||
static __thread int s_front_direct_free = -1;
|
||||
if (__builtin_expect(s_front_direct_free == -1, 0)) {
|
||||
@ -184,19 +187,20 @@ static inline __attribute__((always_inline)) int tiny_fast_push(int class_idx, v
|
||||
}
|
||||
|
||||
// Frontend fast cache operations
|
||||
static inline void* fastcache_pop(int class_idx) {
|
||||
static inline hak_base_ptr_t fastcache_pop(int class_idx) {
|
||||
TinyFastCache* fc = &g_fast_cache[class_idx];
|
||||
if (__builtin_expect(fc->top > 0, 1)) {
|
||||
void* base = fc->items[--fc->top];
|
||||
// ✅ FIX #16: Return BASE pointer (not USER)
|
||||
// FastCache stores base pointers. Caller will apply HAK_RET_ALLOC
|
||||
// which does BASE → USER conversion via tiny_region_id_write_header
|
||||
return base;
|
||||
return HAK_BASE_FROM_RAW(base);
|
||||
}
|
||||
return NULL;
|
||||
return HAK_BASE_FROM_RAW(NULL);
|
||||
}
|
||||
|
||||
static inline int fastcache_push(int class_idx, void* ptr) {
|
||||
static inline int fastcache_push(int class_idx, hak_base_ptr_t base) {
|
||||
void* ptr = HAK_BASE_TO_RAW(base);
|
||||
TinyFastCache* fc = &g_fast_cache[class_idx];
|
||||
if (__builtin_expect(fc->top < TINY_FASTCACHE_CAP, 1)) {
|
||||
fc->items[fc->top++] = ptr;
|
||||
|
||||
@ -247,7 +247,7 @@ void hak_tiny_free_with_slab(void* ptr, TinySlab* slab) {
|
||||
|
||||
if (g_fast_enable && g_fast_cap[class_idx] != 0) {
|
||||
// Phase E1-CORRECT: ALL classes (C0-C7) have 1-byte header
|
||||
void* base = HAK_BASE_TO_RAW(ptr_user_to_base(HAK_USER_FROM_RAW(ptr), class_idx));
|
||||
hak_base_ptr_t base = ptr_user_to_base(HAK_USER_FROM_RAW(ptr), class_idx);
|
||||
int pushed = 0;
|
||||
// Phase 7-Step5: Use config macro for dead code elimination in PGO mode
|
||||
if (__builtin_expect(TINY_FRONT_FASTCACHE_ENABLED && class_idx <= 3, 1)) {
|
||||
@ -530,7 +530,7 @@ void hak_tiny_free(void* ptr) {
|
||||
}
|
||||
if (fast_class_idx >= 0 && g_fast_enable && g_fast_cap[fast_class_idx] != 0) {
|
||||
// Phase E1-CORRECT: ALL classes (C0-C7) have 1-byte header
|
||||
void* base2 = HAK_BASE_TO_RAW(ptr_user_to_base(HAK_USER_FROM_RAW(ptr), fast_class_idx));
|
||||
hak_base_ptr_t base2 = ptr_user_to_base(HAK_USER_FROM_RAW(ptr), fast_class_idx);
|
||||
// PRIORITY 1: Try FastCache first (bypasses SLL when Front-Direct)
|
||||
int pushed = 0;
|
||||
// Phase 7-Step5: Use config macro for dead code elimination in PGO mode
|
||||
|
||||
@ -60,9 +60,9 @@ static __attribute__((cold, noinline, unused)) void* tiny_slow_alloc_fast(int cl
|
||||
void* extra = (void*)(base + ((size_t)extra_idx * block_size));
|
||||
int pushed = 0;
|
||||
if (__builtin_expect(g_fastcache_enable && class_idx <= 3, 1)) {
|
||||
pushed = fastcache_push(class_idx, extra);
|
||||
pushed = fastcache_push(class_idx, HAK_BASE_FROM_RAW(extra));
|
||||
} else {
|
||||
pushed = tiny_fast_push(class_idx, extra);
|
||||
pushed = tiny_fast_push(class_idx, HAK_BASE_FROM_RAW(extra));
|
||||
}
|
||||
if (!pushed) {
|
||||
if (tls_enabled) {
|
||||
|
||||
@ -161,18 +161,18 @@ static inline void* tiny_fast_refill_and_take(int class_idx, TinyTLSList* tls) {
|
||||
// 1) Front FastCache から直接
|
||||
// Phase 7-Step6-Fix: Use config macro for dead code elimination in PGO mode
|
||||
if (__builtin_expect(TINY_FRONT_FASTCACHE_ENABLED && class_idx <= 3, 1)) {
|
||||
void* fc = fastcache_pop(class_idx);
|
||||
if (fc) {
|
||||
hak_base_ptr_t fc = fastcache_pop(class_idx);
|
||||
if (!hak_base_is_null(fc)) {
|
||||
extern unsigned long long g_front_fc_hit[TINY_NUM_CLASSES];
|
||||
g_front_fc_hit[class_idx]++;
|
||||
return fc;
|
||||
return HAK_BASE_TO_RAW(fc);
|
||||
}
|
||||
}
|
||||
|
||||
// 2) ローカルfast list
|
||||
{
|
||||
void* p = tiny_fast_pop(class_idx);
|
||||
if (p) return p;
|
||||
hak_base_ptr_t p = tiny_fast_pop(class_idx);
|
||||
if (!hak_base_is_null(p)) return HAK_BASE_TO_RAW(p);
|
||||
}
|
||||
|
||||
uint16_t cap = g_fast_cap[class_idx];
|
||||
|
||||
@ -389,12 +389,13 @@ static inline void* tiny_alloc_fast_pop(int class_idx) {
|
||||
// Phase 1: Try array stack (FastCache) first for hottest tiny classes (C0–C3)
|
||||
// Phase 7-Step4: Use config macro for dead code elimination in PGO mode
|
||||
if (__builtin_expect(TINY_FRONT_FASTCACHE_ENABLED && class_idx <= 3, 1)) {
|
||||
void* fc = fastcache_pop(class_idx);
|
||||
if (__builtin_expect(fc != NULL, 1)) {
|
||||
hak_base_ptr_t fc = fastcache_pop(class_idx);
|
||||
if (__builtin_expect(!hak_base_is_null(fc), 1)) {
|
||||
void* fc_raw = HAK_BASE_TO_RAW(fc);
|
||||
// Frontend FastCache hit (already tracked by g_front_fc_hit)
|
||||
extern unsigned long long g_front_fc_hit[];
|
||||
g_front_fc_hit[class_idx]++;
|
||||
return fc;
|
||||
return fc_raw;
|
||||
} else {
|
||||
// Frontend FastCache miss (already tracked by g_front_fc_miss)
|
||||
extern unsigned long long g_front_fc_miss[];
|
||||
|
||||
@ -165,7 +165,7 @@
|
||||
if (g_fastcache_enable && class_idx <= 4) {
|
||||
// Phase 10: Use hak_base_ptr_t
|
||||
hak_base_ptr_t base_ptr = hak_user_to_base(HAK_USER_FROM_RAW(ptr));
|
||||
if (fastcache_push(class_idx, HAK_BASE_TO_RAW(base_ptr))) {
|
||||
if (fastcache_push(class_idx, base_ptr)) {
|
||||
HAK_TP1(front_push, class_idx);
|
||||
HAK_STAT_FREE(class_idx);
|
||||
return;
|
||||
@ -255,7 +255,7 @@
|
||||
// 32/64B: SLL優先(mag優先は無効化)
|
||||
// Fast path: FastCache push (preferred for ≤128B), then TLS SLL
|
||||
if (g_fastcache_enable && class_idx <= 4) {
|
||||
if (fastcache_push(class_idx, ptr)) {
|
||||
if (fastcache_push(class_idx, HAK_BASE_FROM_RAW(ptr))) {
|
||||
HAK_STAT_FREE(class_idx);
|
||||
return;
|
||||
}
|
||||
@ -502,4 +502,4 @@
|
||||
void* base = HAK_BASE_TO_RAW(hak_user_to_base(HAK_USER_FROM_RAW(ptr)));
|
||||
tiny_remote_push(slab, base);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user