TLS SLL triage: add class mask gating (HAKMEM_TINY_SLL_C03_ONLY / HAKMEM_TINY_SLL_MASK), honor mask in inline POP/PUSH and tls_sll_box; SLL-off path stable. This gates SLL to C0..C3 for now to unblock shared SS triage.
This commit is contained in:
@ -44,6 +44,11 @@ extern __thread uint32_t g_tls_sll_count[TINY_NUM_CLASSES];
|
||||
// Result: 5-10 fewer instructions, better register allocation
|
||||
//
|
||||
#define TINY_ALLOC_FAST_POP_INLINE(class_idx, ptr_out) do { \
|
||||
extern int g_tls_sll_class_mask; \
|
||||
if (__builtin_expect(((g_tls_sll_class_mask & (1u << (class_idx))) == 0), 0)) { \
|
||||
(ptr_out) = NULL; \
|
||||
break; \
|
||||
} \
|
||||
void* _head = g_tls_sll_head[(class_idx)]; \
|
||||
if (__builtin_expect(_head != NULL, 1)) { \
|
||||
if (__builtin_expect((uintptr_t)_head == TINY_REMOTE_SENTINEL, 0)) { \
|
||||
@ -88,6 +93,10 @@ extern __thread uint32_t g_tls_sll_count[TINY_NUM_CLASSES];
|
||||
// byte 0 for HEADER_MAGIC. Without restoration, it finds 0x00 → uses wrong offset → SEGV.
|
||||
// COST: 1 byte write (~1-2 cycles per free, negligible).
|
||||
#define TINY_ALLOC_FAST_PUSH_INLINE(class_idx, ptr) do { \
|
||||
extern int g_tls_sll_class_mask; \
|
||||
if (__builtin_expect(((g_tls_sll_class_mask & (1u << (class_idx))) == 0), 0)) { \
|
||||
break; \
|
||||
} \
|
||||
if (!(ptr)) break; \
|
||||
/* Phase E1-CORRECT: API ptr is USER pointer (= base+1). Convert back to BASE. */ \
|
||||
uint8_t* _base = (uint8_t*)(ptr) - 1; \
|
||||
|
||||
Reference in New Issue
Block a user