238 lines
11 KiB
C
238 lines
11 KiB
C
#ifndef HAKMEM_TINY_LEGACY_FALLBACK_BOX_H
|
|
#define HAKMEM_TINY_LEGACY_FALLBACK_BOX_H
|
|
|
|
#include <stdbool.h>
|
|
#include <stdint.h>
|
|
#include "../front/tiny_unified_cache.h"
|
|
#include "../front/tiny_first_page_cache.h" // Phase 3 C2: First page inline cache
|
|
#include "../hakmem.h"
|
|
#include "tiny_front_v3_env_box.h"
|
|
#include "free_path_stats_box.h"
|
|
#include "tiny_front_hot_box.h"
|
|
#include "tiny_metadata_cache_env_box.h" // Phase 3 C2: Metadata cache ENV gate
|
|
#include "hakmem_env_snapshot_box.h" // Phase 4 E1: ENV snapshot consolidation
|
|
#include "tiny_unified_cache_fastapi_env_box.h" // Phase 74-3: FASTAPI ENV gate
|
|
#include "tiny_c6_inline_slots_env_box.h" // Phase 75-1: C6 inline slots ENV gate
|
|
#include "../front/tiny_c6_inline_slots.h" // Phase 75-1: C6 inline slots API
|
|
#include "tiny_c5_inline_slots_env_box.h" // Phase 75-2: C5 inline slots ENV gate
|
|
#include "../front/tiny_c5_inline_slots.h" // Phase 75-2: C5 inline slots API
|
|
#include "tiny_c4_inline_slots_env_box.h" // Phase 76-1: C4 inline slots ENV gate
|
|
#include "../front/tiny_c4_inline_slots.h" // Phase 76-1: C4 inline slots API
|
|
#include "tiny_c2_local_cache_env_box.h" // Phase 79-1: C2 local cache ENV gate
|
|
#include "../front/tiny_c2_local_cache.h" // Phase 79-1: C2 local cache API
|
|
#include "tiny_c3_inline_slots_env_box.h" // Phase 77-1: C3 inline slots ENV gate
|
|
#include "../front/tiny_c3_inline_slots.h" // Phase 77-1: C3 inline slots API
|
|
#include "tiny_inline_slots_fixed_mode_box.h" // Phase 78-1: Optional fixed-mode gating
|
|
#include "tiny_inline_slots_switch_dispatch_box.h" // Phase 80-1: Switch dispatch for C4/C5/C6
|
|
#include "tiny_inline_slots_switch_dispatch_fixed_box.h" // Phase 83-1: Switch dispatch fixed mode
|
|
#include "tiny_inline_slots_overflow_stats_box.h" // Phase 87b: Legacy fallback counter
|
|
#include "tiny_c6_inline_slots_ifl_env_box.h" // Phase 91: C6 intrusive LIFO inline slots ENV gate
|
|
#include "tiny_c6_inline_slots_ifl_tls_box.h" // Phase 91: C6 intrusive LIFO inline slots TLS state
|
|
|
|
// Purpose: Encapsulate legacy free logic (shared by multiple paths)
|
|
// Called by: malloc_tiny_fast.h (free path) + tiny_c6_ultra_free_box.c (C6 fallback)
|
|
//
|
|
// Contract:
|
|
// - base: BASE pointer (already extracted via ptr - 1)
|
|
// - class_idx: size class (0-7)
|
|
// - Returns: void (always succeeds or falls back to tiny_hot_free_fast)
|
|
//
|
|
__attribute__((always_inline))
|
|
static inline void tiny_legacy_fallback_free_base_with_env(void* base, uint32_t class_idx, const HakmemEnvSnapshot* env) {
|
|
// Phase 87b: Count legacy fallback calls for verification
|
|
atomic_fetch_add(&g_inline_slots_overflow_stats.legacy_fallback_calls, 1);
|
|
|
|
// Phase 80-1: Switch dispatch for C4/C5/C6 (branch reduction optimization)
|
|
// Phase 83-1: Per-op branch removed via fixed-mode caching
|
|
// C2/C3 excluded (NO-GO from Phase 77-1/79-1)
|
|
if (tiny_inline_slots_switch_dispatch_enabled_fast()) {
|
|
// Switch mode: Direct jump to case (zero comparison overhead for C4/C5/C6)
|
|
switch (class_idx) {
|
|
case 4:
|
|
if (tiny_c4_inline_slots_enabled_fast()) {
|
|
if (c4_inline_push(c4_inline_tls(), base)) {
|
|
FREE_PATH_STAT_INC(legacy_fallback);
|
|
if (__builtin_expect(free_path_stats_enabled(), 0)) {
|
|
g_free_path_stats.legacy_by_class[class_idx]++;
|
|
}
|
|
return;
|
|
}
|
|
}
|
|
break;
|
|
case 5:
|
|
if (tiny_c5_inline_slots_enabled_fast()) {
|
|
if (c5_inline_push(c5_inline_tls(), base)) {
|
|
FREE_PATH_STAT_INC(legacy_fallback);
|
|
if (__builtin_expect(free_path_stats_enabled(), 0)) {
|
|
g_free_path_stats.legacy_by_class[class_idx]++;
|
|
}
|
|
return;
|
|
}
|
|
}
|
|
break;
|
|
case 6:
|
|
// Phase 91: C6 Intrusive LIFO Inline Slots (check BEFORE FIFO)
|
|
if (tiny_c6_inline_slots_ifl_enabled_fast()) {
|
|
if (tiny_c6_inline_slots_ifl_push_fast(base)) {
|
|
FREE_PATH_STAT_INC(legacy_fallback);
|
|
if (__builtin_expect(free_path_stats_enabled(), 0)) {
|
|
g_free_path_stats.legacy_by_class[class_idx]++;
|
|
}
|
|
return;
|
|
}
|
|
}
|
|
// Phase 75-1: C6 Inline Slots (FIFO - fallback)
|
|
if (tiny_c6_inline_slots_enabled_fast()) {
|
|
if (c6_inline_push(c6_inline_tls(), base)) {
|
|
FREE_PATH_STAT_INC(legacy_fallback);
|
|
if (__builtin_expect(free_path_stats_enabled(), 0)) {
|
|
g_free_path_stats.legacy_by_class[class_idx]++;
|
|
}
|
|
return;
|
|
}
|
|
}
|
|
break;
|
|
default:
|
|
// C0-C3, C7: fall through to unified_cache push
|
|
break;
|
|
}
|
|
// Switch mode: fall through to unified_cache push after miss
|
|
} else {
|
|
// If-chain mode (Phase 80-1 baseline): C3/C4/C5/C6 sequential checks
|
|
// NOTE: C2 local cache (Phase 79-1 NO-GO) removed from hot path
|
|
|
|
// Phase 77-1: C3 Inline Slots early-exit (ENV gated)
|
|
// Try C3 inline slots SECOND (before C4/C5/C6/unified cache) for class 3
|
|
if (class_idx == 3 && tiny_c3_inline_slots_enabled_fast()) {
|
|
if (c3_inline_push(c3_inline_tls(), base)) {
|
|
// Success: pushed to C3 inline slots
|
|
FREE_PATH_STAT_INC(legacy_fallback);
|
|
if (__builtin_expect(free_path_stats_enabled(), 0)) {
|
|
g_free_path_stats.legacy_by_class[class_idx]++;
|
|
}
|
|
return;
|
|
}
|
|
// FULL → fall through to C4/C5/C6/unified cache
|
|
}
|
|
|
|
// Phase 76-1: C4 Inline Slots early-exit (ENV gated)
|
|
// Try C4 inline slots SECOND (before C5/C6/unified cache) for class 4
|
|
if (class_idx == 4 && tiny_c4_inline_slots_enabled_fast()) {
|
|
if (c4_inline_push(c4_inline_tls(), base)) {
|
|
// Success: pushed to C4 inline slots
|
|
FREE_PATH_STAT_INC(legacy_fallback);
|
|
if (__builtin_expect(free_path_stats_enabled(), 0)) {
|
|
g_free_path_stats.legacy_by_class[class_idx]++;
|
|
}
|
|
return;
|
|
}
|
|
// FULL → fall through to C5/C6/unified cache
|
|
}
|
|
|
|
// Phase 75-2: C5 Inline Slots early-exit (ENV gated)
|
|
// Try C5 inline slots SECOND (before C6 and unified cache) for class 5
|
|
if (class_idx == 5 && tiny_c5_inline_slots_enabled_fast()) {
|
|
if (c5_inline_push(c5_inline_tls(), base)) {
|
|
// Success: pushed to C5 inline slots
|
|
FREE_PATH_STAT_INC(legacy_fallback);
|
|
if (__builtin_expect(free_path_stats_enabled(), 0)) {
|
|
g_free_path_stats.legacy_by_class[class_idx]++;
|
|
}
|
|
return;
|
|
}
|
|
// FULL → fall through to C6/unified cache
|
|
}
|
|
|
|
// Phase 91: C6 Intrusive LIFO Inline Slots early-exit (ENV gated)
|
|
// Try C6 IFL THIRD (before C6 FIFO and unified cache) for class 6
|
|
if (class_idx == 6 && tiny_c6_inline_slots_ifl_enabled_fast()) {
|
|
if (tiny_c6_inline_slots_ifl_push_fast(base)) {
|
|
// Success: pushed to C6 IFL
|
|
FREE_PATH_STAT_INC(legacy_fallback);
|
|
if (__builtin_expect(free_path_stats_enabled(), 0)) {
|
|
g_free_path_stats.legacy_by_class[class_idx]++;
|
|
}
|
|
return;
|
|
}
|
|
// FULL → fall through to C6 FIFO
|
|
}
|
|
|
|
// Phase 75-1: C6 Inline Slots early-exit (ENV gated)
|
|
// Try C6 inline slots THIRD (before unified cache) for class 6
|
|
if (class_idx == 6 && tiny_c6_inline_slots_enabled_fast()) {
|
|
if (c6_inline_push(c6_inline_tls(), base)) {
|
|
// Success: pushed to C6 inline slots
|
|
FREE_PATH_STAT_INC(legacy_fallback);
|
|
if (__builtin_expect(free_path_stats_enabled(), 0)) {
|
|
g_free_path_stats.legacy_by_class[class_idx]++;
|
|
}
|
|
return;
|
|
}
|
|
// FULL → fall through to unified cache
|
|
}
|
|
} // End of if-chain mode
|
|
|
|
const TinyFrontV3Snapshot* front_snap =
|
|
env ? (env->tiny_front_v3_enabled ? tiny_front_v3_snapshot_get() : NULL)
|
|
: (__builtin_expect(tiny_front_v3_enabled(), 0) ? tiny_front_v3_snapshot_get() : NULL);
|
|
const bool metadata_cache_on = env ? env->tiny_metadata_cache_eff : tiny_metadata_cache_enabled();
|
|
|
|
// Phase 3 C2 Patch 2: First page cache hint (optional fast-path)
|
|
// Check if pointer is in cached page (avoids metadata lookup in future optimizations)
|
|
if (__builtin_expect(metadata_cache_on, 0)) {
|
|
// Note: This is a hint-only check. Even if it hits, we still use the standard path.
|
|
// The cache will be populated during refill operations for future use.
|
|
// Currently this just validates the cache state; actual optimization TBD.
|
|
if (tiny_first_page_cache_hit(class_idx, base, 4096)) {
|
|
// Future: could optimize metadata access here
|
|
}
|
|
}
|
|
|
|
// Legacy fallback - Unified Cache push
|
|
if (!front_snap || front_snap->unified_cache_on) {
|
|
// Phase 74-3 (P0): FASTAPI path (ENV-gated)
|
|
if (tiny_uc_fastapi_enabled()) {
|
|
// Preconditions guaranteed:
|
|
// - unified_cache_on == true (checked above)
|
|
// - TLS init guaranteed by front_gate_unified_enabled() in malloc_tiny_fast.h
|
|
// - Stats compiled-out in FAST builds
|
|
if (unified_cache_push_fast(class_idx, HAK_BASE_FROM_RAW(base))) {
|
|
FREE_PATH_STAT_INC(legacy_fallback);
|
|
|
|
// Per-class breakdown (Phase 4-1)
|
|
if (__builtin_expect(free_path_stats_enabled(), 0)) {
|
|
if (class_idx < 8) {
|
|
g_free_path_stats.legacy_by_class[class_idx]++;
|
|
}
|
|
}
|
|
return;
|
|
}
|
|
// FULL → fallback to slow path (rare)
|
|
}
|
|
|
|
// Original path (FASTAPI=0 or fallback)
|
|
if (unified_cache_push(class_idx, HAK_BASE_FROM_RAW(base))) {
|
|
FREE_PATH_STAT_INC(legacy_fallback);
|
|
|
|
// Per-class breakdown (Phase 4-1)
|
|
if (__builtin_expect(free_path_stats_enabled(), 0)) {
|
|
if (class_idx < 8) {
|
|
g_free_path_stats.legacy_by_class[class_idx]++;
|
|
}
|
|
}
|
|
return;
|
|
}
|
|
}
|
|
|
|
// Final fallback
|
|
tiny_hot_free_fast(class_idx, base);
|
|
}
|
|
|
|
__attribute__((always_inline))
|
|
static inline void tiny_legacy_fallback_free_base(void* base, uint32_t class_idx) {
|
|
const HakmemEnvSnapshot* env = hakmem_env_snapshot_enabled() ? hakmem_env_snapshot() : NULL;
|
|
tiny_legacy_fallback_free_base_with_env(base, class_idx, env);
|
|
}
|
|
|
|
#endif // HAKMEM_TINY_LEGACY_FALLBACK_BOX_H
|