Files
hakmem/core/box/tiny_class_stats_box.h

109 lines
4.1 KiB
C
Raw Normal View History

// tiny_class_stats_box.h - Lightweight per-thread class stats (OBSERVE layer)
//
// Purpose:
// - Provide per-class counters without atomics for cheap observation.
// - Hot paths call small inline helpers; aggregation/printing can be added later.
#ifndef TINY_CLASS_STATS_BOX_H
#define TINY_CLASS_STATS_BOX_H
#include <stdint.h>
#include <stdatomic.h>
#include <stdio.h>
#include "../hakmem_tiny_config.h"
typedef struct TinyClassStatsThread {
uint64_t uc_miss[TINY_NUM_CLASSES]; // unified_cache_refill() hits
uint64_t warm_hit[TINY_NUM_CLASSES]; // warm pool successes
uint64_t shared_lock[TINY_NUM_CLASSES]; // shared pool lock acquisitions (hook as needed)
uint64_t tls_carve_attempt[TINY_NUM_CLASSES]; // Warm/TLS carve attempts
uint64_t tls_carve_success[TINY_NUM_CLASSES]; // Warm/TLS carve successes
} TinyClassStatsThread;
extern __thread TinyClassStatsThread g_tiny_class_stats;
// Global (cross-thread) aggregates for OBSERVE/LEARN
extern _Atomic uint64_t g_tiny_class_stats_uc_miss_global[TINY_NUM_CLASSES];
extern _Atomic uint64_t g_tiny_class_stats_warm_hit_global[TINY_NUM_CLASSES];
extern _Atomic uint64_t g_tiny_class_stats_shared_lock_global[TINY_NUM_CLASSES];
extern _Atomic uint64_t g_tiny_class_stats_tls_carve_attempt_global[TINY_NUM_CLASSES];
extern _Atomic uint64_t g_tiny_class_stats_tls_carve_success_global[TINY_NUM_CLASSES];
static inline void tiny_class_stats_on_uc_miss(int ci) {
#if HAKMEM_TINY_CLASS_STATS_COMPILED
// Phase 24: Compile-out stats atomics (default OFF)
if (ci >= 0 && ci < TINY_NUM_CLASSES) {
g_tiny_class_stats.uc_miss[ci]++;
atomic_fetch_add_explicit(&g_tiny_class_stats_uc_miss_global[ci],
1, memory_order_relaxed);
}
#else
(void)ci; // Suppress unused variable warning
#endif
}
static inline void tiny_class_stats_on_warm_hit(int ci) {
#if HAKMEM_TINY_CLASS_STATS_COMPILED
// Phase 24: Compile-out stats atomics (default OFF)
if (ci >= 0 && ci < TINY_NUM_CLASSES) {
g_tiny_class_stats.warm_hit[ci]++;
atomic_fetch_add_explicit(&g_tiny_class_stats_warm_hit_global[ci],
1, memory_order_relaxed);
}
#else
(void)ci; // Suppress unused variable warning
#endif
}
static inline void tiny_class_stats_on_shared_lock(int ci) {
#if HAKMEM_TINY_CLASS_STATS_COMPILED
// Phase 24: Compile-out stats atomics (default OFF)
if (ci >= 0 && ci < TINY_NUM_CLASSES) {
g_tiny_class_stats.shared_lock[ci]++;
atomic_fetch_add_explicit(&g_tiny_class_stats_shared_lock_global[ci],
1, memory_order_relaxed);
}
#else
(void)ci; // Suppress unused variable warning
#endif
}
static inline void tiny_class_stats_on_tls_carve_attempt(int ci) {
#if HAKMEM_TINY_CLASS_STATS_COMPILED
// Phase 24: Compile-out stats atomics (default OFF)
if (ci >= 0 && ci < TINY_NUM_CLASSES) {
g_tiny_class_stats.tls_carve_attempt[ci]++;
atomic_fetch_add_explicit(&g_tiny_class_stats_tls_carve_attempt_global[ci],
1, memory_order_relaxed);
}
#else
(void)ci; // Suppress unused variable warning
#endif
}
static inline void tiny_class_stats_on_tls_carve_success(int ci) {
#if HAKMEM_TINY_CLASS_STATS_COMPILED
// Phase 24: Compile-out stats atomics (default OFF)
if (ci >= 0 && ci < TINY_NUM_CLASSES) {
g_tiny_class_stats.tls_carve_success[ci]++;
atomic_fetch_add_explicit(&g_tiny_class_stats_tls_carve_success_global[ci],
1, memory_order_relaxed);
}
#else
(void)ci; // Suppress unused variable warning
#endif
}
// Optional: reset per-thread counters (cold path only).
void tiny_class_stats_reset_thread(void);
// Snapshot helpers (cold path): copy current counters into caller-provided struct.
void tiny_class_stats_snapshot_thread(TinyClassStatsThread* out);
void tiny_class_stats_snapshot_global(TinyClassStatsThread* out);
// Simple stderr dump helpers (cold path)
void tiny_class_stats_dump_thread(FILE* out, const char* tag);
void tiny_class_stats_dump_global(FILE* out, const char* tag);
#endif // TINY_CLASS_STATS_BOX_H