Removal strategy: Deprecate routes by disabling ENV-based routing - v3/v4/v5 enum types kept for binary compatibility - small_heap_v3/v4/v5_enabled() always return 0 - small_heap_v3/v4/v5_class_enabled() always return 0 - Any v3/v4/v5 ENVs are silently ignored, routes to LEGACY Changes: - core/box/smallobject_hotbox_v3_env_box.h: stub functions - core/box/smallobject_hotbox_v4_env_box.h: stub functions - core/box/smallobject_v5_env_box.h: stub functions - core/front/malloc_tiny_fast.h: remove alloc/free cases (20+ lines) Benefits: - Cleaner routing logic (v6/v7 only for SmallObject) - 20+ lines deleted from hot path validation - No behavioral change (routes were rarely used) Performance: No regression expected (v3/v4/v5 already disabled by default) Next: Set Learner v7 default ON, production testing 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
174 lines
6.4 KiB
C
174 lines
6.4 KiB
C
// smallobject_v5_env_box.h - SmallObject v5 環境ゲート(Phase v5-0)
|
||
//
|
||
// ENV ベース: HAKMEM_SMALL_HEAP_V5_ENABLED, HAKMEM_SMALL_HEAP_V5_CLASSES
|
||
|
||
#ifndef HAKMEM_SMALLOBJECT_V5_ENV_BOX_H
|
||
#define HAKMEM_SMALLOBJECT_V5_ENV_BOX_H
|
||
|
||
#include <stdlib.h>
|
||
#include <string.h>
|
||
#include <stdint.h>
|
||
|
||
// ENV sentinel values
|
||
#define ENV_UNINIT (-1)
|
||
#define ENV_ENABLED (1)
|
||
#define ENV_DISABLED (0)
|
||
|
||
// route priority enum
|
||
enum small_route_priority {
|
||
ROUTE_PRIORITY_V4 = 0,
|
||
ROUTE_PRIORITY_V5 = 1,
|
||
ROUTE_PRIORITY_AUTO = 2,
|
||
};
|
||
|
||
// v10: v5 deprecated - always disabled, routes to LEGACY
|
||
static inline int small_heap_v5_enabled(void) {
|
||
return 0;
|
||
}
|
||
|
||
static inline uint32_t small_heap_v5_class_mask(void) {
|
||
return 0; // No classes routed to v5
|
||
}
|
||
|
||
static inline int small_heap_v5_class_enabled(uint32_t class_idx) {
|
||
(void)class_idx; // unused
|
||
return 0; // v5 disabled in Phase v10
|
||
}
|
||
|
||
// 便利関数
|
||
static inline int small_heap_v5_c6_enabled(void) {
|
||
return small_heap_v5_class_enabled(6);
|
||
}
|
||
|
||
static inline int small_heap_v5_c5_enabled(void) {
|
||
return small_heap_v5_class_enabled(5);
|
||
}
|
||
|
||
static inline int small_heap_v5_c7_enabled(void) {
|
||
return small_heap_v5_class_enabled(7);
|
||
}
|
||
|
||
// small_route_priority() - route priority (v4/v5/auto)
|
||
// ENV: HAKMEM_ROUTE_PRIORITY={v4|v5|auto}, default: v4
|
||
static inline int small_route_priority(void) {
|
||
static int g_priority = ENV_UNINIT;
|
||
if (__builtin_expect(g_priority == ENV_UNINIT, 0)) {
|
||
const char* e = getenv("HAKMEM_ROUTE_PRIORITY");
|
||
if (e && *e) {
|
||
if (strcmp(e, "v5") == 0) {
|
||
g_priority = ROUTE_PRIORITY_V5;
|
||
} else if (strcmp(e, "auto") == 0) {
|
||
g_priority = ROUTE_PRIORITY_AUTO;
|
||
} else {
|
||
g_priority = ROUTE_PRIORITY_V4; // default or "v4"
|
||
}
|
||
} else {
|
||
g_priority = ROUTE_PRIORITY_V4; // default
|
||
}
|
||
}
|
||
return g_priority;
|
||
}
|
||
|
||
// small_heap_v5_segment_size() - segment size override
|
||
// ENV: HAKMEM_SMALL_HEAP_V5_SEGMENT_SIZE, default: 2MiB (2*1024*1024)
|
||
static inline size_t small_heap_v5_segment_size(void) {
|
||
static int g_size = ENV_UNINIT;
|
||
if (__builtin_expect(g_size == ENV_UNINIT, 0)) {
|
||
const char* e = getenv("HAKMEM_SMALL_HEAP_V5_SEGMENT_SIZE");
|
||
if (e && *e) {
|
||
size_t sz = (size_t)strtoul(e, NULL, 0);
|
||
// validate: must be power of 2, >= 64KiB
|
||
if (sz >= (64 * 1024) && (sz & (sz - 1)) == 0) {
|
||
g_size = (int)sz;
|
||
} else {
|
||
g_size = (2 * 1024 * 1024); // fallback to default
|
||
}
|
||
} else {
|
||
g_size = (2 * 1024 * 1024); // default: 2MiB
|
||
}
|
||
}
|
||
return (size_t)g_size;
|
||
}
|
||
|
||
// ============================================================================
|
||
// Phase v5-4: Header mode configuration
|
||
// ============================================================================
|
||
|
||
// Header mode enum
|
||
#define SMALL_HEAP_V5_HEADER_MODE_FULL 0
|
||
#define SMALL_HEAP_V5_HEADER_MODE_LIGHT 1
|
||
|
||
// small_heap_v5_header_mode() - header write mode (default: full)
|
||
// ENV: HAKMEM_SMALL_HEAP_V5_HEADER_MODE={full|light}, default: full
|
||
// - full: write header on every alloc (safe, standard)
|
||
// - light: write header once during carve, skip on alloc (research mode, +2-4% perf)
|
||
static inline int small_heap_v5_header_mode(void) {
|
||
static int g_header_mode = ENV_UNINIT;
|
||
if (__builtin_expect(g_header_mode == ENV_UNINIT, 0)) {
|
||
const char* e = getenv("HAKMEM_SMALL_HEAP_V5_HEADER_MODE");
|
||
if (e && *e) {
|
||
if (strcmp(e, "light") == 0 || strcmp(e, "LIGHT") == 0 || *e == '1') {
|
||
g_header_mode = SMALL_HEAP_V5_HEADER_MODE_LIGHT;
|
||
} else {
|
||
g_header_mode = SMALL_HEAP_V5_HEADER_MODE_FULL; // default or "full"
|
||
}
|
||
} else {
|
||
g_header_mode = SMALL_HEAP_V5_HEADER_MODE_FULL; // default: full
|
||
}
|
||
}
|
||
return g_header_mode;
|
||
}
|
||
|
||
// ============================================================================
|
||
// Phase v5-5: TLS cache configuration (research mode)
|
||
// ============================================================================
|
||
|
||
// small_heap_v5_tls_cache_enabled() - TLS cache enable check (default: disabled)
|
||
// ENV: HAKMEM_SMALL_HEAP_V5_TLS_CACHE_ENABLED={0|1}, default: 0
|
||
// - 0: disabled (standard behavior)
|
||
// - 1: enabled (C6 TLS cache, +1-2% perf, research mode)
|
||
static inline int small_heap_v5_tls_cache_enabled(void) {
|
||
static int g_tls_cache_enabled = ENV_UNINIT;
|
||
if (__builtin_expect(g_tls_cache_enabled == ENV_UNINIT, 0)) {
|
||
const char* e = getenv("HAKMEM_SMALL_HEAP_V5_TLS_CACHE_ENABLED");
|
||
g_tls_cache_enabled = (e && *e && *e != '0') ? ENV_ENABLED : ENV_DISABLED;
|
||
}
|
||
return (g_tls_cache_enabled == ENV_ENABLED);
|
||
}
|
||
|
||
// ============================================================================
|
||
// Phase v5-6: TLS batch configuration (research mode)
|
||
// ============================================================================
|
||
|
||
// small_heap_v5_batch_enabled() - TLS batch enable check (default: disabled)
|
||
// ENV: HAKMEM_SMALL_HEAP_V5_BATCH_ENABLED={0|1}, default: 0
|
||
// - 0: disabled (standard behavior)
|
||
// - 1: enabled (C6 TLS batch, 4-slot batching, research mode)
|
||
static inline int small_heap_v5_batch_enabled(void) {
|
||
static int g_batch_enabled = ENV_UNINIT;
|
||
if (__builtin_expect(g_batch_enabled == ENV_UNINIT, 0)) {
|
||
const char* e = getenv("HAKMEM_SMALL_HEAP_V5_BATCH_ENABLED");
|
||
g_batch_enabled = (e && *e && *e != '0') ? ENV_ENABLED : ENV_DISABLED;
|
||
}
|
||
return (g_batch_enabled == ENV_ENABLED);
|
||
}
|
||
|
||
// ============================================================================
|
||
// Phase v5-7: C6 ULTRA mode configuration (research mode)
|
||
// ============================================================================
|
||
|
||
// small_heap_v5_ultra_c6_enabled() - C6 ULTRA mode enable check (default: disabled)
|
||
// ENV: HAKMEM_SMALL_HEAP_V5_ULTRA_C6_ENABLED={0|1}, default: 0
|
||
// - 0: disabled (use existing v5 cache/batch path)
|
||
// - 1: enabled (C6 uses 32-slot TLS freelist, ULTRA pattern)
|
||
static inline int small_heap_v5_ultra_c6_enabled(void) {
|
||
static int g_ultra_c6_enabled = ENV_UNINIT;
|
||
if (__builtin_expect(g_ultra_c6_enabled == ENV_UNINIT, 0)) {
|
||
const char* e = getenv("HAKMEM_SMALL_HEAP_V5_ULTRA_C6_ENABLED");
|
||
g_ultra_c6_enabled = (e && *e && *e != '0') ? ENV_ENABLED : ENV_DISABLED;
|
||
}
|
||
return (g_ultra_c6_enabled == ENV_ENABLED);
|
||
}
|
||
|
||
#endif // HAKMEM_SMALLOBJECT_V5_ENV_BOX_H
|