2025-12-14 16:28:23 +09:00
|
|
|
#ifndef HAK_FRONT_FASTLANE_ENV_BOX_H
|
|
|
|
|
#define HAK_FRONT_FASTLANE_ENV_BOX_H
|
|
|
|
|
|
|
|
|
|
// ============================================================================
|
|
|
|
|
// Phase 6: Front FastLane - ENV Gate Box
|
|
|
|
|
// ============================================================================
|
|
|
|
|
//
|
|
|
|
|
// Purpose: ENV gate for Front FastLane (Layer Collapse optimization)
|
|
|
|
|
//
|
|
|
|
|
// ENV Variables:
|
|
|
|
|
// HAKMEM_FRONT_FASTLANE=0/1 (default: 1, promoted)
|
|
|
|
|
// - 0: Disabled (use existing wrapper paths)
|
|
|
|
|
// - 1: Enabled (use FastLane single-box entry point)
|
|
|
|
|
//
|
|
|
|
|
// HAKMEM_FRONT_FASTLANE_CLASS_MASK=0x.. (default: 0xFF, optional)
|
|
|
|
|
// - Bitmask for gradual rollout (e.g., 0x01 = class 0 only)
|
|
|
|
|
// - 0xFF = all classes enabled
|
|
|
|
|
//
|
|
|
|
|
// Box Theory:
|
|
|
|
|
// - L0: ENV gate (this file)
|
|
|
|
|
// - L1: FrontFastLaneBox (front_fastlane_box.h)
|
|
|
|
|
// - Integration: hak_wrappers.inc.h
|
|
|
|
|
//
|
|
|
|
|
// Safety:
|
|
|
|
|
// - Default ON (opt-out via ENV=0)
|
|
|
|
|
// - Zero overhead when disabled (static cached)
|
|
|
|
|
// - Lazy init (getenv on first call)
|
|
|
|
|
//
|
|
|
|
|
// Rollback:
|
|
|
|
|
// - Set HAKMEM_FRONT_FASTLANE=0
|
|
|
|
|
// - Or rebuild without integration
|
|
|
|
|
//
|
|
|
|
|
// ============================================================================
|
|
|
|
|
|
|
|
|
|
#include <stdatomic.h>
|
|
|
|
|
#include <stdlib.h>
|
|
|
|
|
#include <stdint.h>
|
2025-12-17 21:08:17 +09:00
|
|
|
#include "../hakmem_build_flags.h"
|
2025-12-14 16:28:23 +09:00
|
|
|
|
|
|
|
|
// Forward declaration for cross-box includes
|
|
|
|
|
static inline int front_fastlane_enabled(void);
|
|
|
|
|
static inline uint8_t front_fastlane_class_mask(void);
|
2025-12-14 17:38:21 +09:00
|
|
|
static inline int front_fastlane_free_dedup_enabled(void);
|
2025-12-14 16:28:23 +09:00
|
|
|
|
|
|
|
|
// ============================================================================
|
|
|
|
|
// ENV Gate Implementation
|
|
|
|
|
// ============================================================================
|
|
|
|
|
|
|
|
|
|
// Lazy init: Check ENV variable on first call, cache result
|
|
|
|
|
// Thread-safe: Read-only after init (atomic store, relaxed load)
|
|
|
|
|
static inline int front_fastlane_enabled(void) {
|
2025-12-17 21:08:17 +09:00
|
|
|
#if HAKMEM_FAST_PROFILE_FIXED
|
|
|
|
|
return 1;
|
|
|
|
|
#endif
|
2025-12-14 16:28:23 +09:00
|
|
|
static _Atomic int cached = -1; // -1 = uninitialized
|
|
|
|
|
int val = atomic_load_explicit(&cached, memory_order_relaxed);
|
|
|
|
|
|
|
|
|
|
if (__builtin_expect(val == -1, 0)) {
|
|
|
|
|
// Cold path: First call, check ENV
|
|
|
|
|
const char* env = getenv("HAKMEM_FRONT_FASTLANE");
|
|
|
|
|
int enabled = 1; // default: ON (opt-out)
|
|
|
|
|
|
|
|
|
|
if (env) {
|
|
|
|
|
// Parse: "0" or empty = disabled, "1" or non-empty = enabled
|
|
|
|
|
enabled = (env[0] != '0' && env[0] != '\0') ? 1 : 0;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Cache result (thread-safe: atomic store)
|
|
|
|
|
atomic_store_explicit(&cached, enabled, memory_order_relaxed);
|
|
|
|
|
val = enabled;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return val;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Get class mask for gradual rollout (default: 0xFF = all classes)
|
|
|
|
|
static inline uint8_t front_fastlane_class_mask(void) {
|
2025-12-17 21:08:17 +09:00
|
|
|
#if HAKMEM_FAST_PROFILE_FIXED
|
|
|
|
|
return (uint8_t)0xFF;
|
|
|
|
|
#endif
|
2025-12-14 16:28:23 +09:00
|
|
|
static _Atomic int cached = -1; // -1 = uninitialized
|
|
|
|
|
int val = atomic_load_explicit(&cached, memory_order_relaxed);
|
|
|
|
|
|
|
|
|
|
if (__builtin_expect(val == -1, 0)) {
|
|
|
|
|
// Cold path: First call, check ENV
|
|
|
|
|
const char* env = getenv("HAKMEM_FRONT_FASTLANE_CLASS_MASK");
|
|
|
|
|
int mask = 0xFF; // Default: all classes enabled
|
|
|
|
|
|
|
|
|
|
if (env) {
|
|
|
|
|
// Parse hex value (e.g., "0x03" or "03")
|
|
|
|
|
char* end;
|
|
|
|
|
long parsed = strtol(env, &end, 0); // Auto-detect base (0x prefix)
|
|
|
|
|
if (end != env && parsed >= 0 && parsed <= 0xFF) {
|
|
|
|
|
mask = (int)parsed;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Cache result (thread-safe: atomic store)
|
|
|
|
|
atomic_store_explicit(&cached, mask, memory_order_relaxed);
|
|
|
|
|
val = mask;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return (uint8_t)val;
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-14 17:38:21 +09:00
|
|
|
// Phase 6-2: Free DeDup gate (eliminate duplicate header validation)
|
|
|
|
|
// When enabled, front_fastlane_try_free() directly calls free_tiny_fast()
|
|
|
|
|
// instead of doing its own header validation.
|
|
|
|
|
static inline int front_fastlane_free_dedup_enabled(void) {
|
2025-12-17 21:08:17 +09:00
|
|
|
#if HAKMEM_FAST_PROFILE_FIXED
|
|
|
|
|
return 1;
|
|
|
|
|
#endif
|
2025-12-14 17:38:21 +09:00
|
|
|
static _Atomic int cached = -1; // -1 = uninitialized
|
|
|
|
|
int val = atomic_load_explicit(&cached, memory_order_relaxed);
|
|
|
|
|
|
|
|
|
|
if (__builtin_expect(val == -1, 0)) {
|
|
|
|
|
// Cold path: First call, check ENV
|
|
|
|
|
const char* env = getenv("HAKMEM_FRONT_FASTLANE_FREE_DEDUP");
|
|
|
|
|
int enabled = 1; // default: ON (opt-out, +5.18% proven)
|
|
|
|
|
|
|
|
|
|
if (env) {
|
|
|
|
|
// Parse: "0" or empty = disabled, "1" or non-empty = enabled
|
|
|
|
|
enabled = (env[0] != '0' && env[0] != '\0') ? 1 : 0;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Cache result (thread-safe: atomic store)
|
|
|
|
|
atomic_store_explicit(&cached, enabled, memory_order_relaxed);
|
|
|
|
|
val = enabled;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return val;
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-14 16:28:23 +09:00
|
|
|
#endif // HAK_FRONT_FASTLANE_ENV_BOX_H
|