Files
hakmem/core/tiny_box_geometry.h

172 lines
6.1 KiB
C
Raw Normal View History

// tiny_box_geometry.h - Box 3: Geometry & Capacity Calculator
// Purpose: Centralize stride/capacity/base calculations for Tiny allocator
//
// Box Theory Responsibility:
// - Calculate block stride (size + header) with C7 (headerless) handling
// - Calculate slab capacity (usable bytes / stride)
// - Calculate slab base address with Slab 0 offset handling
// - Provide single source of truth for geometry calculations
//
// This Box eliminates code duplication and makes C7 special cases explicit.
#ifndef TINY_BOX_GEOMETRY_H
#define TINY_BOX_GEOMETRY_H
#include <stdint.h>
#include <stddef.h>
#include "hakmem_tiny_superslab_constants.h"
#include "hakmem_tiny_config.h" // For g_tiny_class_sizes declaration
// ============================================================================
// Box 3 API: Geometry Calculations (Single Source of Truth)
// ============================================================================
/**
* Calculate block stride for a given class
*
* @param class_idx Class index (0-7)
* @return Block stride in bytes (class_size + header, except C7 which has no header)
*
* Class 7 (1KB) is headerless and uses stride = 1024
* All other classes use stride = class_size + 1 (1-byte header)
*/
static inline size_t tiny_stride_for_class(int class_idx) {
#if HAKMEM_TINY_HEADER_CLASSIDX
// C7 (1KB) is headerless, all others have 1-byte header
return g_tiny_class_sizes[class_idx] + ((class_idx != 7) ? 1 : 0);
#else
// No headers at all
return g_tiny_class_sizes[class_idx];
#endif
}
/**
* Calculate slab capacity (number of blocks that fit in usable space)
*
* @param slab_idx Slab index within SuperSlab (0 for first slab, 1+ for others)
* @param stride Block stride in bytes (from tiny_stride_for_class)
* @return Number of blocks that fit in this slab
*
* Slab 0 has reduced usable space (SUPERSLAB_SLAB0_USABLE_SIZE)
* Slabs 1+ have full usable space (SUPERSLAB_SLAB_USABLE_SIZE)
*/
static inline uint16_t tiny_capacity_for_slab(int slab_idx, size_t stride) {
size_t usable = (slab_idx == 0)
? SUPERSLAB_SLAB0_USABLE_SIZE
: SUPERSLAB_SLAB_USABLE_SIZE;
return (uint16_t)(usable / stride);
}
/**
* Get slab base address (accounts for SUPERSLAB_SLAB0_DATA_OFFSET)
*
* @param ss SuperSlab pointer
* @param slab_idx Slab index within SuperSlab
* @return Pointer to first usable byte in this slab
*
* Slab 0 has an offset (SUPERSLAB_SLAB0_DATA_OFFSET) due to SuperSlab metadata
* Slabs 1+ start at slab_idx * SLAB_SIZE
*/
static inline uint8_t* tiny_slab_base_for_geometry(struct SuperSlab* ss, int slab_idx) {
uint8_t* base = (uint8_t*)ss + (slab_idx * SLAB_SIZE);
// Slab 0 offset: sizeof(SuperSlab)=1088, aligned to next 1024-boundary=2048
if (slab_idx == 0) base += SUPERSLAB_SLAB0_DATA_OFFSET;
return base;
}
/**
* Calculate usable bytes for a given slab
*
* @param slab_idx Slab index within SuperSlab
* @return Usable bytes in this slab
*/
static inline size_t tiny_usable_bytes_for_slab(int slab_idx) {
return (slab_idx == 0)
? SUPERSLAB_SLAB0_USABLE_SIZE
: SUPERSLAB_SLAB_USABLE_SIZE;
}
/**
* Calculate block address within a slab (linear allocation)
*
* @param base Slab base address (from tiny_slab_base_for_geometry)
* @param index Block index (0-based)
* @param stride Block stride (from tiny_stride_for_class)
* @return Pointer to block at given index
*/
static inline void* tiny_block_at_index(uint8_t* base, uint16_t index, size_t stride) {
return (void*)(base + ((size_t)index * stride));
}
/**
* Validate that a linear carve operation stays within slab bounds
*
* @param slab_idx Slab index
* @param carved Current carved count
* @param stride Block stride
* @param reserve Number of blocks to reserve
* @return 1 if operation is safe, 0 if it would exceed bounds
*/
static inline int tiny_carve_guard(int slab_idx,
uint16_t carved,
size_t stride,
uint32_t reserve) {
size_t usable = tiny_usable_bytes_for_slab(slab_idx);
size_t needed = ((size_t)carved + (size_t)reserve) * stride;
return needed <= usable;
}
// ============================================================================
// Box 3 Debug Helpers (fail-fast validation)
// ============================================================================
/**
* Debug helper: verbose carve guard with diagnostics
*
* @param stage Debug stage name
* @param class_idx Class index
* @param slab_idx Slab index
* @param carved Current carved count
* @param used Current used count
* @param capacity Slab capacity
* @param stride Block stride
* @param reserve Blocks to reserve
* @return 1 if safe, 0 if would exceed bounds (with stderr logging)
*/
static inline int tiny_carve_guard_verbose(const char* stage,
int class_idx,
int slab_idx,
uint16_t carved,
uint16_t used,
uint16_t capacity,
size_t stride,
uint32_t reserve) {
#if HAKMEM_BUILD_RELEASE
(void)stage; (void)class_idx; (void)slab_idx;
(void)carved; (void)used; (void)capacity;
return tiny_carve_guard(slab_idx, carved, stride, reserve);
#else
size_t usable = tiny_usable_bytes_for_slab(slab_idx);
size_t needed = ((size_t)carved + (size_t)reserve) * stride;
if (__builtin_expect(needed > usable, 0)) {
fprintf(stderr,
"[LINEAR_GUARD] stage=%s cls=%d slab=%d carved=%u used=%u cap=%u "
"stride=%zu reserve=%u needed=%zu usable=%zu\n",
stage ? stage : "carve",
class_idx,
slab_idx,
carved,
used,
capacity,
stride,
reserve,
needed,
usable);
return 0;
}
return 1;
#endif
}
#endif // TINY_BOX_GEOMETRY_H