TLS SLL triage: add class mask gating (HAKMEM_TINY_SLL_C03_ONLY / HAKMEM_TINY_SLL_MASK), honor mask in inline POP/PUSH and tls_sll_box; SLL-off path stable. This gates SLL to C0..C3 for now to unblock shared SS triage.

This commit is contained in:
Moe Charm (CI)
2025-11-14 01:05:30 +09:00
parent fcf098857a
commit 3b05d0f048
4 changed files with 39 additions and 4 deletions

View File

@ -44,6 +44,11 @@ extern __thread uint32_t g_tls_sll_count[TINY_NUM_CLASSES];
// Result: 5-10 fewer instructions, better register allocation
//
#define TINY_ALLOC_FAST_POP_INLINE(class_idx, ptr_out) do { \
extern int g_tls_sll_class_mask; \
if (__builtin_expect(((g_tls_sll_class_mask & (1u << (class_idx))) == 0), 0)) { \
(ptr_out) = NULL; \
break; \
} \
void* _head = g_tls_sll_head[(class_idx)]; \
if (__builtin_expect(_head != NULL, 1)) { \
if (__builtin_expect((uintptr_t)_head == TINY_REMOTE_SENTINEL, 0)) { \
@ -88,6 +93,10 @@ extern __thread uint32_t g_tls_sll_count[TINY_NUM_CLASSES];
// byte 0 for HEADER_MAGIC. Without restoration, it finds 0x00 → uses wrong offset → SEGV.
// COST: 1 byte write (~1-2 cycles per free, negligible).
#define TINY_ALLOC_FAST_PUSH_INLINE(class_idx, ptr) do { \
extern int g_tls_sll_class_mask; \
if (__builtin_expect(((g_tls_sll_class_mask & (1u << (class_idx))) == 0), 0)) { \
break; \
} \
if (!(ptr)) break; \
/* Phase E1-CORRECT: API ptr is USER pointer (= base+1). Convert back to BASE. */ \
uint8_t* _base = (uint8_t*)(ptr) - 1; \