245 lines
7.6 KiB
C
245 lines
7.6 KiB
C
|
|
// smallobject_hotbox_v7_box.h - SmallObject HotBox v7 (Phase v7-2: C6-only impl)
|
||
|
|
//
|
||
|
|
// Role:
|
||
|
|
// - SmallObject v7 fast path for alloc/free
|
||
|
|
// - C6-only implementation (512B blocks, 64KiB pages, 2MiB segments)
|
||
|
|
// - Uses SmallHeapCtx_v7 + SmallSegment_v7 + ColdIface_v7
|
||
|
|
|
||
|
|
#pragma once
|
||
|
|
|
||
|
|
#include <stdint.h>
|
||
|
|
#include <stddef.h>
|
||
|
|
#include <stdbool.h>
|
||
|
|
#include <stdio.h>
|
||
|
|
#include "smallsegment_v7_box.h"
|
||
|
|
#include "smallobject_cold_iface_v7_box.h"
|
||
|
|
#include "region_id_v6_box.h"
|
||
|
|
#include "../tiny_region_id.h" // For HEADER_MAGIC, HEADER_CLASS_MASK
|
||
|
|
|
||
|
|
#ifndef likely
|
||
|
|
#define likely(x) __builtin_expect(!!(x), 1)
|
||
|
|
#define unlikely(x) __builtin_expect(!!(x), 0)
|
||
|
|
#endif
|
||
|
|
|
||
|
|
// ============================================================================
|
||
|
|
// Debug/Observe Support
|
||
|
|
// ============================================================================
|
||
|
|
|
||
|
|
// V7 stats functions (defined in smallobject_cold_iface_v7.c)
|
||
|
|
extern void small_v7_stat_alloc(void);
|
||
|
|
extern void small_v7_stat_free(void);
|
||
|
|
extern void small_v7_stat_refill(void);
|
||
|
|
extern void small_v7_stat_retire(void);
|
||
|
|
|
||
|
|
// Class mismatch logging (for hint validation)
|
||
|
|
static inline void small_v7_log_class_mismatch(void* ptr, uint8_t hint, uint8_t actual) {
|
||
|
|
// TODO: Make this ENV-controlled
|
||
|
|
// For now, silent (Fail-Fast mode would assert here)
|
||
|
|
(void)ptr;
|
||
|
|
(void)hint;
|
||
|
|
(void)actual;
|
||
|
|
}
|
||
|
|
|
||
|
|
// ============================================================================
|
||
|
|
// Alloc Fast Path
|
||
|
|
// ============================================================================
|
||
|
|
|
||
|
|
// small_heap_alloc_fast_v7() - v7 alloc (C6-only for v7-2)
|
||
|
|
//
|
||
|
|
// Flow:
|
||
|
|
// 1. Get TLS context
|
||
|
|
// 2. Check current page freelist
|
||
|
|
// 3. If empty, check partial list
|
||
|
|
// 4. If no partial, call ColdIface refill
|
||
|
|
// 5. Pop from freelist and return USER ptr
|
||
|
|
//
|
||
|
|
static inline void* small_heap_alloc_fast_v7(size_t size, uint8_t class_idx) {
|
||
|
|
// v7-2: Only C6 is implemented
|
||
|
|
if (unlikely(class_idx != SMALL_V7_C6_CLASS_IDX)) {
|
||
|
|
return NULL; // Unsupported class -> front falls back
|
||
|
|
}
|
||
|
|
|
||
|
|
SmallHeapCtx_v7* ctx = small_heap_ctx_v7();
|
||
|
|
SmallClassHeap_v7* h = &ctx->cls[class_idx];
|
||
|
|
SmallPageMeta_v7* p = h->current;
|
||
|
|
|
||
|
|
// Fast path: current page has free slots
|
||
|
|
if (likely(p && p->free_list)) {
|
||
|
|
void* base = p->free_list;
|
||
|
|
p->free_list = *(void**)base;
|
||
|
|
p->used++;
|
||
|
|
|
||
|
|
// Update stats
|
||
|
|
p->alloc_count++;
|
||
|
|
p->live_current++;
|
||
|
|
if (p->live_current > p->peak_live) {
|
||
|
|
p->peak_live = p->live_current;
|
||
|
|
}
|
||
|
|
|
||
|
|
// Write header (HEADER_MAGIC | class_idx) for front compatibility
|
||
|
|
((uint8_t*)base)[0] = (uint8_t)(HEADER_MAGIC | (class_idx & HEADER_CLASS_MASK));
|
||
|
|
|
||
|
|
small_v7_stat_alloc();
|
||
|
|
// Return USER ptr (base + 1 for header compatibility with front)
|
||
|
|
return (uint8_t*)base + 1;
|
||
|
|
}
|
||
|
|
|
||
|
|
// Current exhausted -> try partial list
|
||
|
|
if (h->partial_head) {
|
||
|
|
p = h->partial_head;
|
||
|
|
h->partial_head = p->segment_next_partial;
|
||
|
|
p->segment_next_partial = NULL;
|
||
|
|
h->current = p;
|
||
|
|
|
||
|
|
if (likely(p->free_list)) {
|
||
|
|
void* base = p->free_list;
|
||
|
|
p->free_list = *(void**)base;
|
||
|
|
p->used++;
|
||
|
|
|
||
|
|
p->alloc_count++;
|
||
|
|
p->live_current++;
|
||
|
|
if (p->live_current > p->peak_live) {
|
||
|
|
p->peak_live = p->live_current;
|
||
|
|
}
|
||
|
|
|
||
|
|
// Write header (HEADER_MAGIC | class_idx) for front compatibility
|
||
|
|
((uint8_t*)base)[0] = (uint8_t)(HEADER_MAGIC | (class_idx & HEADER_CLASS_MASK));
|
||
|
|
|
||
|
|
small_v7_stat_alloc();
|
||
|
|
return (uint8_t*)base + 1;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
// Completely exhausted -> ColdIface refill
|
||
|
|
small_v7_stat_refill();
|
||
|
|
p = small_cold_v7_refill_page(ctx, class_idx);
|
||
|
|
if (unlikely(!p || !p->free_list)) {
|
||
|
|
return NULL; // front falls back to legacy/pool
|
||
|
|
}
|
||
|
|
|
||
|
|
h->current = p;
|
||
|
|
|
||
|
|
// Pop from new page
|
||
|
|
void* base = p->free_list;
|
||
|
|
p->free_list = *(void**)base;
|
||
|
|
p->used++;
|
||
|
|
|
||
|
|
p->alloc_count++;
|
||
|
|
p->live_current++;
|
||
|
|
if (p->live_current > p->peak_live) {
|
||
|
|
p->peak_live = p->live_current;
|
||
|
|
}
|
||
|
|
|
||
|
|
// Write header (HEADER_MAGIC | class_idx) for front compatibility
|
||
|
|
((uint8_t*)base)[0] = (uint8_t)(HEADER_MAGIC | (class_idx & HEADER_CLASS_MASK));
|
||
|
|
|
||
|
|
small_v7_stat_alloc();
|
||
|
|
return (uint8_t*)base + 1;
|
||
|
|
}
|
||
|
|
|
||
|
|
// ============================================================================
|
||
|
|
// Free Fast Path
|
||
|
|
// ============================================================================
|
||
|
|
|
||
|
|
// small_heap_free_fast_v7() - v7 free (C6-only for v7-2)
|
||
|
|
//
|
||
|
|
// Flow:
|
||
|
|
// 1. RegionIdBox lookup to verify ptr is v7-managed
|
||
|
|
// 2. Get page_meta from segment
|
||
|
|
// 3. Validate class_idx (hint vs actual)
|
||
|
|
// 4. Push to page freelist
|
||
|
|
// 5. If page becomes empty, retire to ColdIface
|
||
|
|
//
|
||
|
|
// @param ptr: USER pointer to free
|
||
|
|
// @param class_idx_hint: Class index hint from front/header (may be ignored)
|
||
|
|
// @return: true if handled by v7, false if not v7-managed (front should fallback)
|
||
|
|
//
|
||
|
|
static inline bool small_heap_free_fast_v7(void* ptr, uint8_t class_idx_hint) {
|
||
|
|
if (unlikely(!ptr)) {
|
||
|
|
return false;
|
||
|
|
}
|
||
|
|
|
||
|
|
// RegionIdBox lookup
|
||
|
|
RegionLookupV6 lk = region_id_lookup_v6(ptr);
|
||
|
|
|
||
|
|
// Check if this is a v7-managed pointer
|
||
|
|
if (unlikely(lk.kind != REGION_KIND_SMALL_V7)) {
|
||
|
|
return false; // Not v7 -> front falls back to legacy/pool/ULTRA
|
||
|
|
}
|
||
|
|
|
||
|
|
// Get segment from registry metadata
|
||
|
|
SmallSegment_v7* seg = (SmallSegment_v7*)lk.page_meta;
|
||
|
|
if (unlikely(!seg || !small_segment_v7_valid(seg))) {
|
||
|
|
return false;
|
||
|
|
}
|
||
|
|
|
||
|
|
// Calculate page index from pointer
|
||
|
|
uintptr_t addr = (uintptr_t)ptr;
|
||
|
|
if (unlikely(!small_ptr_in_segment_v7(seg, ptr))) {
|
||
|
|
return false;
|
||
|
|
}
|
||
|
|
|
||
|
|
size_t page_idx = SMALL_V7_PAGE_IDX(seg, addr);
|
||
|
|
if (unlikely(page_idx >= seg->num_pages)) {
|
||
|
|
return false;
|
||
|
|
}
|
||
|
|
|
||
|
|
SmallPageMeta_v7* page = &seg->page_meta[page_idx];
|
||
|
|
|
||
|
|
// Validate page is in use
|
||
|
|
if (unlikely(!small_page_v7_valid(page))) {
|
||
|
|
return false;
|
||
|
|
}
|
||
|
|
|
||
|
|
// Get actual class from page (v7 core uses page_meta.class_idx as truth)
|
||
|
|
uint8_t class_idx = (uint8_t)page->class_idx;
|
||
|
|
|
||
|
|
// OBSERVE: Check hint vs actual
|
||
|
|
if (unlikely(class_idx != class_idx_hint)) {
|
||
|
|
small_v7_log_class_mismatch(ptr, class_idx_hint, class_idx);
|
||
|
|
// Continue with actual class_idx (v7 is header-independent)
|
||
|
|
}
|
||
|
|
|
||
|
|
// v7-2: Only C6 is implemented
|
||
|
|
if (unlikely(class_idx != SMALL_V7_C6_CLASS_IDX)) {
|
||
|
|
return false; // Should not happen, but fallback
|
||
|
|
}
|
||
|
|
|
||
|
|
// Push BASE ptr to page freelist
|
||
|
|
// ptr is USER ptr (base + 1), convert back to BASE
|
||
|
|
void* base = (uint8_t*)ptr - 1;
|
||
|
|
*(void**)base = page->free_list;
|
||
|
|
page->free_list = base;
|
||
|
|
|
||
|
|
// Update stats
|
||
|
|
page->free_count++;
|
||
|
|
page->live_current--;
|
||
|
|
|
||
|
|
// Decrement used count
|
||
|
|
if (unlikely(--page->used == 0)) {
|
||
|
|
// Page is empty -> retire
|
||
|
|
small_v7_stat_retire();
|
||
|
|
SmallHeapCtx_v7* ctx = small_heap_ctx_v7();
|
||
|
|
small_cold_v7_retire_page(ctx, page);
|
||
|
|
}
|
||
|
|
|
||
|
|
small_v7_stat_free();
|
||
|
|
return true;
|
||
|
|
}
|
||
|
|
|
||
|
|
// ============================================================================
|
||
|
|
// Stub Functions (for compatibility, forwards to real impl)
|
||
|
|
// ============================================================================
|
||
|
|
|
||
|
|
// These maintain backward compatibility with v7-1 stub API
|
||
|
|
|
||
|
|
static inline void* small_heap_alloc_fast_v7_stub(size_t size, uint8_t class_idx) {
|
||
|
|
// v7-2: Use real implementation
|
||
|
|
return small_heap_alloc_fast_v7(size, class_idx);
|
||
|
|
}
|
||
|
|
|
||
|
|
static inline bool small_heap_free_fast_v7_stub(void* ptr, uint8_t class_idx) {
|
||
|
|
// v7-2: Use real implementation
|
||
|
|
return small_heap_free_fast_v7(ptr, class_idx);
|
||
|
|
}
|