Box TLS-SLL + free boundary hardening: normalize C0–C6 to base (ptr-1) at free boundary; route all caches/freelists via base; replace remaining g_tls_sll_head direct writes with Box API (tls_sll_push/splice) in refill/magazine/ultra; keep C7 excluded. Fixes rbp=0xa0 free crash by preventing header overwrite and centralizing TLS-SLL invariants.

This commit is contained in:
Moe Charm (CI)
2025-11-10 16:48:20 +09:00
parent 1b6624dec4
commit b09ba4d40d
26 changed files with 1079 additions and 354 deletions

View File

@ -23,6 +23,7 @@
#include "hakmem_tiny_magazine.h"
#include "hakmem_tiny_tls_list.h"
#include "tiny_box_geometry.h" // Box 3: Geometry & Capacity Calculator
#include "box/tls_sll_box.h" // Box TLS-SLL: Safe SLL operations API
#include <stdint.h>
#include <pthread.h>
#include <stdlib.h>
@ -147,10 +148,9 @@ static inline int quick_refill_from_sll(int class_idx) {
if (room > 2) room = 2;
int filled = 0;
while (room > 0) {
void* head = g_tls_sll_head[class_idx];
if (!head) break;
g_tls_sll_head[class_idx] = *(void**)head;
if (g_tls_sll_count[class_idx] > 0) g_tls_sll_count[class_idx]--;
// CRITICAL: Use Box TLS-SLL API to avoid race condition (rbp=0xa0 SEGV)
void* head = NULL;
if (!tls_sll_pop(class_idx, &head)) break;
qs->items[qs->top++] = head;
room--; filled++;
}
@ -209,6 +209,11 @@ __attribute__((noinline)) int sll_refill_small_from_ss(int class_idx, int max_ta
#else
static inline int sll_refill_small_from_ss(int class_idx, int max_take) {
#endif
// CRITICAL: C7 (1KB) is headerless - incompatible with TLS SLL refill
if (__builtin_expect(class_idx == 7, 0)) {
return 0; // C7 uses slow path exclusively
}
if (!g_use_superslab || max_take <= 0) return 0;
// ランタイムA/B: P0を有効化している場合はバッチrefillへ委譲
do {
@ -259,9 +264,12 @@ static inline int sll_refill_small_from_ss(int class_idx, int max_take) {
void* p = tiny_block_at_index(base, meta->carved, bs);
meta->carved++;
meta->used++;
*(void**)p = g_tls_sll_head[class_idx];
g_tls_sll_head[class_idx] = p;
g_tls_sll_count[class_idx]++;
// CRITICAL: Use Box TLS-SLL API (C7-safe, no race)
if (!tls_sll_push(class_idx, p, sll_cap)) {
// SLL full (should not happen, room was checked)
meta->used--; meta->carved--; // Rollback
break;
}
ss_active_inc(tls->ss);
taken++;
continue;
@ -271,9 +279,14 @@ static inline int sll_refill_small_from_ss(int class_idx, int max_take) {
void* p = meta->freelist;
meta->freelist = *(void**)p;
meta->used++;
*(void**)p = g_tls_sll_head[class_idx];
g_tls_sll_head[class_idx] = p;
g_tls_sll_count[class_idx]++;
// CRITICAL: Use Box TLS-SLL API (C7-safe, no race)
if (!tls_sll_push(class_idx, p, sll_cap)) {
// SLL full (should not happen, room was checked)
*(void**)p = meta->freelist; // Rollback freelist
meta->freelist = p;
meta->used--;
break;
}
ss_active_inc(tls->ss);
taken++;
continue;
@ -322,9 +335,12 @@ static inline int sll_refill_small_from_ss(int class_idx, int max_take) {
continue;
}
if (!p) break;
*(void**)p = g_tls_sll_head[class_idx];
g_tls_sll_head[class_idx] = p;
g_tls_sll_count[class_idx]++;
// CRITICAL: Use Box TLS-SLL API (C7-safe, no race)
if (!tls_sll_push(class_idx, p, sll_cap)) {
// SLL full (should not happen, room was checked)
// Rollback: need to return block to meta (complex, just break)
break;
}
taken++;
}
return taken;
@ -398,11 +414,11 @@ static inline int frontend_refill_fc(int class_idx) {
int filled = 0;
// Step A: First bulk transfer from TLS SLL to FastCache (lock-free, O(1))
// CRITICAL: Use Box TLS-SLL API to avoid race condition (rbp=0xa0 SEGV)
if (g_tls_sll_enable) {
while (need > 0 && g_tls_sll_head[class_idx] != NULL) {
void* h = g_tls_sll_head[class_idx];
g_tls_sll_head[class_idx] = *(void**)h;
if (g_tls_sll_count[class_idx] > 0) g_tls_sll_count[class_idx]--; // underflow prevention
while (need > 0) {
void* h = NULL;
if (!tls_sll_pop(class_idx, &h)) break;
fc->items[fc->top++] = h;
need--; filled++;
if (fc->top >= TINY_FASTCACHE_CAP) break;
@ -445,9 +461,11 @@ static inline int bulk_mag_to_sll_if_room(int class_idx, TinyTLSMag* mag, int n)
if (take <= 0) return 0;
for (int i = 0; i < take; i++) {
void* p = mag->items[--mag->top].ptr;
*(void**)p = g_tls_sll_head[class_idx];
g_tls_sll_head[class_idx] = p;
g_tls_sll_count[class_idx]++;
if (!tls_sll_push(class_idx, p, cap)) {
// No more room; return remaining items to magazine and stop
mag->top++; // undo pop
break;
}
}
HAK_PATHDBG_INC(g_path_refill_calls, class_idx);
return take;
@ -480,9 +498,10 @@ static inline void ultra_refill_sll(int class_idx) {
hak_tiny_set_used(slab, first);
slab->free_count--;
void* p0 = (char*)slab->base + ((size_t)first * bs);
*(void**)p0 = g_tls_sll_head[class_idx];
g_tls_sll_head[class_idx] = p0;
g_tls_sll_count[class_idx]++;
if (!tls_sll_push(class_idx, p0, (uint32_t)sll_cap)) {
// SLL saturated; stop refilling
break;
}
remaining--;
// Try to allocate more from the same word to amortize scanning
int word_idx = first / 64;
@ -495,9 +514,9 @@ static inline void ultra_refill_sll(int class_idx) {
hak_tiny_set_used(slab, block_idx);
slab->free_count--;
void* p = (char*)slab->base + ((size_t)block_idx * bs);
*(void**)p = g_tls_sll_head[class_idx];
g_tls_sll_head[class_idx] = p;
g_tls_sll_count[class_idx]++;
if (!tls_sll_push(class_idx, p, (uint32_t)sll_cap)) {
break;
}
remaining--;
// Update free_bits for next iteration
used = slab->bitmap[word_idx];