完全移行→削除の安全順序(Option C)に従い、CoreContext の deprecated フィールドと sync helpers を完全削除。 ## Changes - Migrated all access sites to core_ctx.* (SSOT allocators) - utils.rs: value_gen (3 sites), block_gen (1 site), temp_slot_counter (1 site) - loop_api.rs: block_gen (1 site) - phi_helpers.rs: value_gen (1 site) - builder.rs: sync helpers (2 methods), test assertions (2 sites) - Removed 5 deprecated fields from builder.rs - value_gen: ValueIdGenerator - block_gen: BasicBlockIdGenerator - next_binding_id: u32 - temp_slot_counter: u32 - debug_join_counter: u32 - Removed sync helper calls (allocate_binding_id, debug_next_join_id) - Removed field initializations from MirBuilder::new() - Maintained Phase 136 P0 SSOT structure (next_value_id() remains as high-level API) ## Tests - cargo build --release: PASS - cargo test --release --lib: 1029 passed (4 pre-existing failures) - Deprecation warnings: 435 → 456 (+21) Note: Warnings increased due to remaining deprecated field *uses* being exposed after removing the field *definitions*. This is expected during migration. ## Code metrics - Net reduction: 40 lines (-56 removed, +16 modifications) - Files changed: 4 (builder.rs, utils.rs, loop_api.rs, phi_helpers.rs) Phase 2 Progress: 2/7 contexts complete (MetadataContext ✅, CoreContext ✅)
1382 lines
62 KiB
Rust
1382 lines
62 KiB
Rust
/*!
|
||
* MIR Builder - Converts AST to MIR/SSA form
|
||
*
|
||
* Implements AST → MIR conversion with SSA construction
|
||
*/
|
||
|
||
use super::slot_registry::resolve_slot_by_type_name;
|
||
use super::{
|
||
BasicBlock, BasicBlockId, BasicBlockIdGenerator, CompareOp, ConstValue, Effect, EffectMask,
|
||
FunctionSignature, MirFunction, MirInstruction, MirModule, MirType, ValueId, ValueIdGenerator,
|
||
};
|
||
use crate::ast::{ASTNode, LiteralValue, Span};
|
||
use crate::mir::builder::builder_calls::CallTarget;
|
||
use crate::mir::region::function_slot_registry::FunctionSlotRegistry;
|
||
use crate::mir::region::RegionId;
|
||
use std::collections::HashSet;
|
||
use std::collections::{BTreeMap, HashMap};
|
||
mod builder_calls;
|
||
mod call_resolution; // ChatGPT5 Pro: Type-safe call resolution utilities
|
||
mod calls; // Call system modules (refactored from builder_calls)
|
||
mod binding_context; // Phase 136 follow-up (Step 4/7): BindingContext extraction
|
||
mod compilation_context; // Phase 136 follow-up (Step 7/7): CompilationContext extraction
|
||
mod context; // BoxCompilationContext - 箱理論による静的Boxコンパイル時のコンテキスト分離
|
||
mod core_context; // Phase 136 follow-up (Step 2/7): CoreContext extraction
|
||
mod metadata_context; // Phase 136 follow-up (Step 6/7): MetadataContext extraction
|
||
mod variable_context; // Phase 136 follow-up (Step 5/7): VariableContext extraction
|
||
mod decls; // declarations lowering split
|
||
mod exprs; // expression lowering split
|
||
mod exprs_call;
|
||
mod method_call_handlers; // Method call handler separation (Phase 3) // call(expr)
|
||
// include lowering removed (using is handled in runner)
|
||
mod control_flow; // thin wrappers to centralize control-flow entrypoints
|
||
mod exprs_lambda; // lambda lowering
|
||
mod exprs_peek; // peek expression
|
||
mod exprs_qmark; // ?-propagate
|
||
mod fields; // field access/assignment lowering split
|
||
mod if_form;
|
||
mod joinir_id_remapper; // Phase 189: JoinIR ID remapping (ValueId/BlockId translation)
|
||
mod joinir_inline_boundary_injector; // Phase 189: JoinInlineBoundary Copy instruction injector
|
||
mod lifecycle;
|
||
mod loop_frontend_binding; // Phase 50: Loop Frontend Binding (JoinIR variable mapping)
|
||
pub(crate) mod loops;
|
||
mod ops;
|
||
mod phi;
|
||
mod phi_merge; // Phase 25.1q: Unified PHI merge helper // prepare/lower_root/finalize split
|
||
// legacy large-match remains inline for now (planned extraction)
|
||
mod emission; // emission::*(Const/Compare/Branch の薄い発行箱)
|
||
mod emit_guard; // EmitGuardBox(emit直前の最終関所)
|
||
mod metadata; // MetadataPropagationBox(type/originの伝播)
|
||
mod name_const; // NameConstBox(関数名Const生成)
|
||
mod observe; // P0: dev-only observability helpers(ssa/resolve)
|
||
mod origin; // P0: origin inference(me/Known)と PHI 伝播(軽量)
|
||
mod plugin_sigs; // plugin signature loader
|
||
mod receiver; // ReceiverMaterializationBox(Method recv の pin+LocalSSA 集約)
|
||
mod rewrite; // P1: Known rewrite & special consolidation
|
||
mod router; // RouterPolicyBox(Unified vs BoxCall)
|
||
mod schedule; // BlockScheduleBox(物理順序: PHI→materialize→body)
|
||
mod ssa; // LocalSSA helpers (in-block materialization)
|
||
mod stmts;
|
||
mod scope_context; // Phase 136 follow-up (Step 3/7): ScopeContext extraction
|
||
mod type_context; // Phase 136 follow-up: TypeContext extraction
|
||
mod type_facts; // Phase 136 follow-up: Type inference facts box
|
||
pub(crate) mod type_registry;
|
||
mod types; // types::annotation / inference(型注釈/推論の箱: 推論は後段)
|
||
mod utils;
|
||
mod vars; // variables/scope helpers // small loop helpers (header/exit context) // TypeRegistryBox(型情報管理の一元化)
|
||
|
||
// Unified member property kinds for computed/once/birth_once
|
||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||
pub(crate) enum PropertyKind {
|
||
Computed,
|
||
Once,
|
||
BirthOnce,
|
||
}
|
||
|
||
/// MIR builder for converting AST to SSA form
|
||
pub struct MirBuilder {
|
||
/// Current module being built
|
||
pub(super) current_module: Option<MirModule>,
|
||
|
||
/// [DEPRECATED] Current function being built
|
||
/// Phase 136: Moved to scope_ctx.current_function (backward compat wrapper)
|
||
#[deprecated(note = "Use scope_ctx.current_function instead")]
|
||
pub(super) current_function: Option<MirFunction>,
|
||
|
||
/// Current basic block being built
|
||
pub(super) current_block: Option<BasicBlockId>,
|
||
|
||
/// Phase 136 follow-up (Step 2/7): Core ID generation context
|
||
/// Consolidates value_gen, block_gen, next_binding_id, temp_slot_counter, debug_join_counter.
|
||
/// Direct field access for backward compatibility (migration in progress).
|
||
pub(super) core_ctx: core_context::CoreContext,
|
||
|
||
|
||
/// [DEPRECATED] 箱理論: Static boxコンパイル時のコンテキスト分離
|
||
/// Phase 136 Step 7/7: Moved to comp_ctx.compilation_context (backward compat wrapper)
|
||
/// Some(ctx)の場合、variable_map/value_origin_newbox/value_typesはctxから取得
|
||
/// Noneの場合、従来のフィールドを使用(後方互換性)
|
||
#[deprecated(note = "Use comp_ctx.compilation_context instead")]
|
||
pub(super) compilation_context: Option<context::BoxCompilationContext>,
|
||
|
||
/// Phase 136 follow-up: Type information context
|
||
/// Consolidates value_types, value_kinds, value_origin_newbox for better organization.
|
||
/// Direct field access for backward compatibility (migration in progress).
|
||
pub(super) type_ctx: type_context::TypeContext,
|
||
|
||
/// Phase 136 follow-up (Step 3/7): Scope and control flow context
|
||
/// Consolidates lexical_scope_stack, loop stacks, if_merge_stack, current_function,
|
||
/// function_param_names, debug_scope_stack for better organization.
|
||
/// Direct field access for backward compatibility (migration in progress).
|
||
pub(super) scope_ctx: scope_context::ScopeContext,
|
||
|
||
/// Phase 136 follow-up (Step 4/7): Binding context
|
||
/// Consolidates binding_map (String -> BindingId mapping).
|
||
/// Direct field access for backward compatibility (migration in progress).
|
||
pub(super) binding_ctx: binding_context::BindingContext,
|
||
|
||
/// Phase 136 follow-up (Step 5/7): Variable context
|
||
/// Consolidates variable_map (String -> ValueId mapping for SSA conversion).
|
||
/// Direct field access for backward compatibility (migration in progress).
|
||
pub(super) variable_ctx: variable_context::VariableContext,
|
||
|
||
/// Phase 136 follow-up (Step 6/7): Metadata context
|
||
/// Consolidates current_span, source_file, hint_sink, current_region_stack.
|
||
/// Direct field access for backward compatibility (migration in progress).
|
||
pub(super) metadata_ctx: metadata_context::MetadataContext,
|
||
|
||
/// Phase 136 follow-up (Step 7/7): Compilation context
|
||
/// Consolidates compilation_context, current_static_box, user_defined_boxes, reserved_value_ids,
|
||
/// fn_body_ast, weak_fields_by_box, property_getters_by_box, field_origin_class, field_origin_by_box,
|
||
/// static_method_index, method_tail_index, type_registry, current_slot_registry, plugin_method_sigs.
|
||
/// Direct field access for backward compatibility (migration in progress).
|
||
pub(super) comp_ctx: compilation_context::CompilationContext,
|
||
|
||
/// [DEPRECATED] Variable name to ValueId mapping (for SSA conversion)
|
||
/// Phase 136 Step 5/7: Moved to variable_ctx.variable_map (backward compat wrapper)
|
||
/// 注意: compilation_contextがSomeの場合は使用されません
|
||
/// Phase 25.1: HashMap → BTreeMap(PHI生成の決定性確保)
|
||
#[deprecated(note = "Use variable_ctx.variable_map instead")]
|
||
pub(super) variable_map: BTreeMap<String, ValueId>,
|
||
|
||
/// [DEPRECATED] Lexical scope stack for block-scoped `local` declarations.
|
||
/// Phase 136: Moved to scope_ctx.lexical_scope_stack (backward compat wrapper)
|
||
#[deprecated(note = "Use scope_ctx.lexical_scope_stack instead")]
|
||
lexical_scope_stack: Vec<vars::lexical_scope::LexicalScopeFrame>,
|
||
|
||
/// Pending phi functions to be inserted
|
||
#[allow(dead_code)]
|
||
pub(super) pending_phis: Vec<(BasicBlockId, ValueId, String)>,
|
||
|
||
/// [DEPRECATED] Origin tracking for simple optimizations (e.g., object.method after new)
|
||
/// Maps a ValueId to the class name if it was produced by NewBox of that class
|
||
/// 注意: compilation_contextがSomeの場合は使用されません
|
||
/// Phase 136: Moved to type_ctx.value_origin_newbox (backward compat wrapper)
|
||
// Phase 25.1: HashMap → BTreeMap(決定性確保)
|
||
#[deprecated(note = "Use type_ctx.value_origin_newbox instead")]
|
||
pub(super) value_origin_newbox: BTreeMap<ValueId, String>,
|
||
|
||
/// [DEPRECATED] Names of user-defined boxes declared in the current module
|
||
/// Phase 136 Step 7/7: Moved to comp_ctx.user_defined_boxes (backward compat wrapper)
|
||
#[deprecated(note = "Use comp_ctx.user_defined_boxes instead")]
|
||
pub(super) user_defined_boxes: HashSet<String>,
|
||
|
||
/// [DEPRECATED] Weak field registry: BoxName -> {weak field names}
|
||
/// Phase 136 Step 7/7: Moved to comp_ctx.weak_fields_by_box (backward compat wrapper)
|
||
#[deprecated(note = "Use comp_ctx.weak_fields_by_box instead")]
|
||
pub(super) weak_fields_by_box: HashMap<String, HashSet<String>>,
|
||
|
||
/// [DEPRECATED] Unified members: BoxName -> {propName -> Kind}
|
||
/// Phase 136 Step 7/7: Moved to comp_ctx.property_getters_by_box (backward compat wrapper)
|
||
#[deprecated(note = "Use comp_ctx.property_getters_by_box instead")]
|
||
pub(super) property_getters_by_box: HashMap<String, HashMap<String, PropertyKind>>,
|
||
|
||
/// [DEPRECATED] Remember class of object fields after assignments: (base_id, field) -> class_name
|
||
/// Phase 136 Step 7/7: Moved to comp_ctx.field_origin_class (backward compat wrapper)
|
||
#[deprecated(note = "Use comp_ctx.field_origin_class instead")]
|
||
pub(super) field_origin_class: HashMap<(ValueId, String), String>,
|
||
/// [DEPRECATED] Class-level field origin (cross-function heuristic): (BaseBoxName, field) -> FieldBoxName
|
||
/// Phase 136 Step 7/7: Moved to comp_ctx.field_origin_by_box (backward compat wrapper)
|
||
#[deprecated(note = "Use comp_ctx.field_origin_by_box instead")]
|
||
pub(super) field_origin_by_box: HashMap<(String, String), String>,
|
||
|
||
/// [DEPRECATED] Optional per-value type annotations (MIR-level): ValueId -> MirType
|
||
/// 注意: compilation_contextがSomeの場合は使用されません
|
||
/// Phase 136: Moved to type_ctx.value_types (backward compat wrapper)
|
||
// Phase 25.1: HashMap → BTreeMap(決定性確保)
|
||
#[deprecated(note = "Use type_ctx.value_types instead")]
|
||
pub(super) value_types: BTreeMap<ValueId, super::MirType>,
|
||
|
||
/// [DEPRECATED] Phase 26-A: ValueId型情報マップ(型安全性強化)
|
||
/// ValueId -> MirValueKind のマッピング
|
||
/// - GUARDバグ予防: ValueId(0)がParameterかLocalか区別可能
|
||
/// - デフォルト: 未登録のValueIdはTemporary扱い
|
||
/// Phase 136: Moved to type_ctx.value_kinds (backward compat wrapper)
|
||
#[deprecated(note = "Use type_ctx.value_kinds instead")]
|
||
pub(super) value_kinds: HashMap<ValueId, super::MirValueKind>,
|
||
|
||
/// [DEPRECATED] 関数スコープの SlotRegistry(観測専用)
|
||
/// Phase 136 Step 7/7: Moved to comp_ctx.current_slot_registry (backward compat wrapper)
|
||
/// - current_function と同じライフサイクルを持つよ。
|
||
/// - 既存の variable_map/SSA には影響しない(メタデータのみ)。
|
||
#[deprecated(note = "Use comp_ctx.current_slot_registry instead")]
|
||
pub(super) current_slot_registry: Option<FunctionSlotRegistry>,
|
||
|
||
/// [DEPRECATED] 🎯 箱理論: 型情報管理の一元化(TypeRegistryBox)
|
||
/// Phase 136 Step 7/7: Moved to comp_ctx.type_registry (backward compat wrapper)
|
||
/// NYASH_USE_TYPE_REGISTRY=1 で有効化(段階的移行用)
|
||
#[deprecated(note = "Use comp_ctx.type_registry instead")]
|
||
pub(super) type_registry: type_registry::TypeRegistry,
|
||
|
||
/// [DEPRECATED] Plugin method return type signatures loaded from nyash_box.toml
|
||
/// Phase 136 Step 7/7: Moved to comp_ctx.plugin_method_sigs (backward compat wrapper)
|
||
#[deprecated(note = "Use comp_ctx.plugin_method_sigs instead")]
|
||
plugin_method_sigs: HashMap<(String, String), super::MirType>,
|
||
/// [DEPRECATED] Current static box name when lowering a static box body (e.g., "Main")
|
||
/// Phase 136 Step 7/7: Moved to comp_ctx.current_static_box (backward compat wrapper)
|
||
#[deprecated(note = "Use comp_ctx.current_static_box instead")]
|
||
current_static_box: Option<String>,
|
||
/// [DEPRECATED] Index of static methods seen during lowering: name -> [(BoxName, arity)]
|
||
/// Phase 136 Step 7/7: Moved to comp_ctx.static_method_index (backward compat wrapper)
|
||
#[deprecated(note = "Use comp_ctx.static_method_index instead")]
|
||
pub(super) static_method_index: std::collections::HashMap<String, Vec<(String, usize)>>,
|
||
|
||
/// [DEPRECATED] Function parameter names (for LoopForm PHI construction)
|
||
/// Phase 136: Moved to scope_ctx.function_param_names (backward compat wrapper)
|
||
#[deprecated(note = "Use scope_ctx.function_param_names instead")]
|
||
pub(super) function_param_names: HashSet<String>,
|
||
|
||
/// [DEPRECATED] Fast lookup: method+arity tail → candidate function names (e.g., ".str/0" → ["JsonNode.str/0", ...])
|
||
/// Phase 136 Step 7/7: Moved to comp_ctx.method_tail_index (backward compat wrapper)
|
||
#[deprecated(note = "Use comp_ctx.method_tail_index instead")]
|
||
pub(super) method_tail_index: std::collections::HashMap<String, Vec<String>>,
|
||
/// [DEPRECATED] Source size snapshot to detect when to rebuild the tail index
|
||
/// Phase 136 Step 7/7: Moved to comp_ctx.method_tail_index_source_len (backward compat wrapper)
|
||
#[deprecated(note = "Use comp_ctx.method_tail_index_source_len instead")]
|
||
pub(super) method_tail_index_source_len: usize,
|
||
|
||
/// [DEPRECATED] Phase 200-C: Original function body AST for capture analysis
|
||
/// Phase 136 Step 7/7: Moved to comp_ctx.fn_body_ast (backward compat wrapper)
|
||
/// Stored temporarily during function lowering to support FunctionScopeCaptureAnalyzer.
|
||
/// None when not lowering a function, or when fn_body is not available.
|
||
#[deprecated(note = "Use comp_ctx.fn_body_ast instead")]
|
||
pub(super) fn_body_ast: Option<Vec<ASTNode>>,
|
||
|
||
/// [DEPRECATED] Phase 201-A: Reserved ValueIds that must not be allocated
|
||
/// Phase 136 Step 7/7: Moved to comp_ctx.reserved_value_ids (backward compat wrapper)
|
||
/// These are PHI dst ValueIds created by LoopHeaderPhiBuilder.
|
||
/// When next_value_id() encounters a reserved ID, it skips to the next.
|
||
/// Cleared after JoinIR merge completes.
|
||
#[deprecated(note = "Use comp_ctx.reserved_value_ids instead")]
|
||
pub(super) reserved_value_ids: HashSet<ValueId>,
|
||
|
||
/// [DEPRECATED] Phase 74: BindingId mapping for lexical variable bindings
|
||
/// Phase 136: Moved to binding_ctx.binding_map (backward compat wrapper)
|
||
#[deprecated(note = "Use binding_ctx.binding_map instead")]
|
||
pub binding_map: BTreeMap<String, super::BindingId>,
|
||
|
||
// include guards removed
|
||
/// [DEPRECATED] Loop context stacks for lowering break/continue inside nested control flow
|
||
/// Phase 136: Moved to scope_ctx.loop_header_stack (backward compat wrapper)
|
||
#[deprecated(note = "Use scope_ctx.loop_header_stack instead")]
|
||
pub(super) loop_header_stack: Vec<BasicBlockId>,
|
||
#[allow(dead_code)]
|
||
#[deprecated(note = "Use scope_ctx.loop_exit_stack instead")]
|
||
pub(super) loop_exit_stack: Vec<BasicBlockId>,
|
||
|
||
/// [DEPRECATED] If/merge context stack (innermost first)
|
||
/// Phase 136: Moved to scope_ctx.if_merge_stack (backward compat wrapper)
|
||
#[deprecated(note = "Use scope_ctx.if_merge_stack instead")]
|
||
pub(super) if_merge_stack: Vec<BasicBlockId>,
|
||
|
||
// フェーズM: no_phi_modeフィールド削除(常にPHI使用)
|
||
|
||
// ---- Try/Catch/Cleanup lowering context ----
|
||
/// When true, `return` statements are deferred: they assign to `return_defer_slot`
|
||
/// and jump to `return_defer_target` (typically the cleanup/exit block).
|
||
pub(super) return_defer_active: bool,
|
||
/// Slot value to receive deferred return values (edge-copy mode friendly).
|
||
pub(super) return_defer_slot: Option<ValueId>,
|
||
/// Target block to jump to on deferred return.
|
||
pub(super) return_defer_target: Option<BasicBlockId>,
|
||
/// Set to true when a deferred return has been emitted in the current context.
|
||
pub(super) return_deferred_emitted: bool,
|
||
/// True while lowering the cleanup block.
|
||
pub(super) in_cleanup_block: bool,
|
||
/// Policy flags (snapshotted at entry of try/catch lowering)
|
||
pub(super) cleanup_allow_return: bool,
|
||
pub(super) cleanup_allow_throw: bool,
|
||
|
||
/// If true, skip entry materialization of pinned slots on the next start_new_block call.
|
||
suppress_pin_entry_copy_next: bool,
|
||
|
||
// ----------------------
|
||
// Debug scope context (dev only; zero-cost when unused)
|
||
// ----------------------
|
||
/// [DEPRECATED] Stack of region identifiers like "loop#1/header" or "join#3/join".
|
||
/// Phase 136: Moved to scope_ctx.debug_scope_stack (backward compat wrapper)
|
||
#[deprecated(note = "Use scope_ctx.debug_scope_stack instead")]
|
||
debug_scope_stack: Vec<String>,
|
||
|
||
/// Local SSA cache: ensure per-block materialization for critical operands (e.g., recv)
|
||
/// Key: (bb, original ValueId, kind) -> local ValueId
|
||
/// kind: 0=recv, 1+ reserved for future (args etc.)
|
||
pub(super) local_ssa_map: HashMap<(BasicBlockId, ValueId, u8), ValueId>,
|
||
/// BlockSchedule cache: deduplicate materialize copies per (bb, src)
|
||
pub(super) schedule_mat_map: HashMap<(BasicBlockId, ValueId), ValueId>,
|
||
/// Mapping from ValueId to its pin slot name (e.g., "__pin$3$@recv")
|
||
/// Used by LocalSSA to redirect old pinned values to the latest slot value.
|
||
pub(super) pin_slot_names: HashMap<ValueId, String>,
|
||
|
||
/// Guard flag to prevent re-entering emit_unified_call from BoxCall fallback.
|
||
/// Used when RouterPolicyBox in emit_unified_call has already decided to
|
||
/// route a given Method call to BoxCall; emit_box_or_plugin_call must not
|
||
/// bounce back into the unified path for the same call, otherwise an
|
||
/// infinite recursion (emit_unified_call → emit_box_or_plugin_call →
|
||
/// emit_unified_call …) can occur when routing decisions disagree.
|
||
pub(super) in_unified_boxcall_fallback: bool,
|
||
|
||
/// Recursion depth counter for debugging stack overflow
|
||
/// Tracks the depth of build_expression calls to detect infinite loops
|
||
pub(super) recursion_depth: usize,
|
||
|
||
/// Root lowering mode: how to treat top-level Program
|
||
/// - None: not decided yet (lower_root not called)
|
||
/// - Some(true): App mode (static box Main.main is entry)
|
||
/// - Some(false): Script/Test mode (top-level Program runs sequentially)
|
||
pub(super) root_is_app_mode: Option<bool>,
|
||
|
||
/// 🎯 Phase 21.7: Static box singleton instances for methodization
|
||
/// Maps BoxName → ValueId of singleton instance (created on demand)
|
||
/// Used when HAKO_MIR_BUILDER_METHODIZE=1 to convert Global("BoxName.method/arity")
|
||
/// to Method{receiver=singleton} calls
|
||
pub(super) static_box_singletons: HashMap<String, ValueId>,
|
||
}
|
||
|
||
impl MirBuilder {
|
||
/// Create a new MIR builder
|
||
pub fn new() -> Self {
|
||
let plugin_method_sigs = plugin_sigs::load_plugin_method_sigs();
|
||
let core_ctx = core_context::CoreContext::new();
|
||
|
||
// Phase 136 Step 7/7: Compilation context (new SSOT)
|
||
let comp_ctx = compilation_context::CompilationContext::with_plugin_sigs(plugin_method_sigs.clone());
|
||
|
||
// フェーズM: no_phi_mode初期化削除
|
||
#[allow(deprecated)]
|
||
Self {
|
||
current_module: None,
|
||
current_function: None,
|
||
current_block: None,
|
||
|
||
// Phase 136 Step 2/7: Core context (new SSOT)
|
||
core_ctx,
|
||
|
||
compilation_context: None, // 箱理論: デフォルトは従来モード
|
||
type_ctx: type_context::TypeContext::new(), // Phase 136: Type context
|
||
scope_ctx: scope_context::ScopeContext::new(), // Phase 136 Step 3/7: Scope context
|
||
binding_ctx: binding_context::BindingContext::new(), // Phase 136 Step 4/7: Binding context
|
||
variable_ctx: variable_context::VariableContext::new(), // Phase 136 Step 5/7: Variable context
|
||
metadata_ctx: metadata_context::MetadataContext::new(), // Phase 136 Step 6/7: Metadata context
|
||
comp_ctx, // Phase 136 Step 7/7: Compilation context
|
||
variable_map: BTreeMap::new(), // Phase 25.1: 決定性確保 (backward compat)
|
||
lexical_scope_stack: Vec::new(),
|
||
pending_phis: Vec::new(),
|
||
value_origin_newbox: BTreeMap::new(), // Phase 25.1: 決定性確保 (backward compat)
|
||
user_defined_boxes: HashSet::new(),
|
||
weak_fields_by_box: HashMap::new(),
|
||
property_getters_by_box: HashMap::new(),
|
||
field_origin_class: HashMap::new(),
|
||
field_origin_by_box: HashMap::new(),
|
||
value_types: BTreeMap::new(), // Phase 25.1: 決定性確保 (backward compat)
|
||
value_kinds: HashMap::new(), // Phase 26-A: ValueId型安全化 (backward compat)
|
||
current_slot_registry: None,
|
||
type_registry: type_registry::TypeRegistry::new(),
|
||
plugin_method_sigs,
|
||
current_static_box: None,
|
||
static_method_index: std::collections::HashMap::new(),
|
||
function_param_names: HashSet::new(),
|
||
method_tail_index: std::collections::HashMap::new(),
|
||
method_tail_index_source_len: 0,
|
||
|
||
fn_body_ast: None, // Phase 200-C: Initialize to None
|
||
reserved_value_ids: HashSet::new(), // Phase 201-A: Initialize to empty
|
||
|
||
binding_map: BTreeMap::new(), // Phase 74: Initialize BindingId mapping
|
||
|
||
loop_header_stack: Vec::new(),
|
||
loop_exit_stack: Vec::new(),
|
||
if_merge_stack: Vec::new(),
|
||
// フェーズM: no_phi_modeフィールド削除
|
||
return_defer_active: false,
|
||
return_defer_slot: None,
|
||
return_defer_target: None,
|
||
return_deferred_emitted: false,
|
||
in_cleanup_block: false,
|
||
cleanup_allow_return: false,
|
||
cleanup_allow_throw: false,
|
||
suppress_pin_entry_copy_next: false,
|
||
|
||
// Debug scope context
|
||
debug_scope_stack: Vec::new(),
|
||
|
||
local_ssa_map: HashMap::new(),
|
||
schedule_mat_map: HashMap::new(),
|
||
pin_slot_names: HashMap::new(),
|
||
|
||
in_unified_boxcall_fallback: false,
|
||
recursion_depth: 0,
|
||
root_is_app_mode: None,
|
||
static_box_singletons: HashMap::new(), // Phase 21.7: methodization support
|
||
}
|
||
}
|
||
|
||
// ---- Phase 136: TypeContext synchronization helpers ----
|
||
/// Sync type_ctx changes back to legacy fields (backward compatibility)
|
||
#[allow(deprecated)]
|
||
fn sync_type_ctx_to_legacy(&mut self) {
|
||
self.value_types = self.type_ctx.value_types.clone();
|
||
self.value_kinds = self.type_ctx.value_kinds.clone();
|
||
self.value_origin_newbox = self.type_ctx.value_origin_newbox.clone();
|
||
}
|
||
|
||
/// Sync legacy field changes to type_ctx (backward compatibility)
|
||
#[allow(deprecated)]
|
||
fn sync_legacy_to_type_ctx(&mut self) {
|
||
self.type_ctx.value_types = self.value_types.clone();
|
||
self.type_ctx.value_kinds = self.value_kinds.clone();
|
||
self.type_ctx.value_origin_newbox = self.value_origin_newbox.clone();
|
||
}
|
||
|
||
// ---- Phase 136 Step 3/7: ScopeContext synchronization helpers ----
|
||
/// Sync scope_ctx changes back to legacy fields (backward compatibility)
|
||
#[allow(deprecated)]
|
||
fn sync_scope_ctx_to_legacy(&mut self) {
|
||
self.lexical_scope_stack = self.scope_ctx.lexical_scope_stack.clone();
|
||
self.loop_header_stack = self.scope_ctx.loop_header_stack.clone();
|
||
self.loop_exit_stack = self.scope_ctx.loop_exit_stack.clone();
|
||
self.if_merge_stack = self.scope_ctx.if_merge_stack.clone();
|
||
self.current_function = self.scope_ctx.current_function.clone();
|
||
self.function_param_names = self.scope_ctx.function_param_names.clone();
|
||
self.debug_scope_stack = self.scope_ctx.debug_scope_stack.clone();
|
||
}
|
||
|
||
/// Sync legacy field changes to scope_ctx (backward compatibility)
|
||
#[allow(deprecated)]
|
||
fn sync_legacy_to_scope_ctx(&mut self) {
|
||
self.scope_ctx.lexical_scope_stack = self.lexical_scope_stack.clone();
|
||
self.scope_ctx.loop_header_stack = self.loop_header_stack.clone();
|
||
self.scope_ctx.loop_exit_stack = self.loop_exit_stack.clone();
|
||
self.scope_ctx.if_merge_stack = self.if_merge_stack.clone();
|
||
self.scope_ctx.current_function = self.current_function.clone();
|
||
self.scope_ctx.function_param_names = self.function_param_names.clone();
|
||
self.scope_ctx.debug_scope_stack = self.debug_scope_stack.clone();
|
||
}
|
||
|
||
// ---- Phase 136 Step 4/7: BindingContext synchronization helpers ----
|
||
/// Sync binding_ctx changes back to legacy fields (backward compatibility)
|
||
#[allow(deprecated)]
|
||
fn sync_binding_ctx_to_legacy(&mut self) {
|
||
self.binding_map = self.binding_ctx.binding_map.clone();
|
||
}
|
||
|
||
/// Sync legacy field changes to binding_ctx (backward compatibility)
|
||
#[allow(deprecated)]
|
||
fn sync_legacy_to_binding_ctx(&mut self) {
|
||
self.binding_ctx.binding_map = self.binding_map.clone();
|
||
}
|
||
|
||
/// Phase 136 Step 5/7: Sync variable_ctx changes back to legacy field (backward compatibility)
|
||
#[allow(deprecated)]
|
||
fn sync_variable_ctx_to_legacy(&mut self) {
|
||
self.variable_map = self.variable_ctx.variable_map.clone();
|
||
}
|
||
|
||
/// Phase 136 Step 5/7: Sync legacy field changes to variable_ctx (backward compatibility)
|
||
#[allow(deprecated)]
|
||
fn sync_legacy_to_variable_ctx(&mut self) {
|
||
self.variable_ctx.variable_map = self.variable_map.clone();
|
||
}
|
||
|
||
/// Push/pop helpers for If merge context (best-effort; optional usage)
|
||
#[allow(deprecated)]
|
||
pub(super) fn push_if_merge(&mut self, bb: BasicBlockId) {
|
||
// Phase 136 Step 3/7: Update both scope_ctx (SSOT) and legacy field (backward compat)
|
||
self.scope_ctx.push_if_merge(bb);
|
||
self.if_merge_stack.push(bb);
|
||
}
|
||
#[allow(deprecated)]
|
||
pub(super) fn pop_if_merge(&mut self) {
|
||
// Phase 136 Step 3/7: Update both scope_ctx (SSOT) and legacy field (backward compat)
|
||
let _ = self.scope_ctx.pop_if_merge();
|
||
let _ = self.if_merge_stack.pop();
|
||
}
|
||
|
||
/// Suppress entry pin copy for the next start_new_block (used for merge blocks).
|
||
pub(super) fn suppress_next_entry_pin_copy(&mut self) {
|
||
self.suppress_pin_entry_copy_next = true;
|
||
}
|
||
|
||
// ---- Phase 74: BindingId allocation ----
|
||
/// Allocate a new BindingId (parallel to ValueId allocation)
|
||
///
|
||
/// ## Parallel ValueId/BindingId Allocation
|
||
///
|
||
/// BindingId allocation is completely independent from ValueId allocation:
|
||
/// - `next_value_id()` increments `value_gen` counter
|
||
/// - `allocate_binding_id()` increments `next_binding_id` counter
|
||
///
|
||
/// This parallelism enables:
|
||
/// 1. **Stable binding identity** across SSA transformations
|
||
/// 2. **Independent shadowing tracking** separate from SSA renaming
|
||
/// 3. **Future ScopeManager migration** (Phase 75+) without breaking SSA
|
||
///
|
||
/// Example:
|
||
/// ```ignore
|
||
/// // local x = 1; <- allocate_binding_id() -> BindingId(0)
|
||
/// // next_value_id() -> ValueId(10)
|
||
/// // {
|
||
/// // local x = 2; <- allocate_binding_id() -> BindingId(1)
|
||
/// // next_value_id() -> ValueId(20)
|
||
/// // }
|
||
/// ```
|
||
pub fn allocate_binding_id(&mut self) -> super::BindingId {
|
||
// Phase 136 Step 2/7 + Phase 2-2: Use core_ctx as SSOT (no sync needed)
|
||
self.core_ctx.next_binding()
|
||
}
|
||
|
||
// ---- Hint helpers (no-op by default) ----
|
||
// Phase 136 Step 6/7: Delegate to metadata_ctx with legacy sync
|
||
#[inline]
|
||
pub(crate) fn hint_scope_enter(&mut self, id: u32) {
|
||
self.metadata_ctx.hint_scope_enter(id);
|
||
}
|
||
#[inline]
|
||
pub(crate) fn hint_scope_leave(&mut self, id: u32) {
|
||
self.metadata_ctx.hint_scope_leave(id);
|
||
}
|
||
#[inline]
|
||
pub(crate) fn hint_join_result<S: Into<String>>(&mut self, var: S) {
|
||
self.metadata_ctx.hint_join_result(var);
|
||
}
|
||
|
||
// ----------------------
|
||
// Debug scope helpers (region_id for DebugHub events)
|
||
// ----------------------
|
||
#[inline]
|
||
pub(crate) fn debug_next_join_id(&mut self) -> u32 {
|
||
// Phase 136 Step 2/7 + Phase 2-2: Use core_ctx as SSOT (no sync needed)
|
||
self.core_ctx.next_debug_join()
|
||
}
|
||
|
||
#[inline]
|
||
#[allow(deprecated)]
|
||
pub(crate) fn debug_push_region<S: Into<String>>(&mut self, region: S) {
|
||
// Phase 136 Step 3/7: Update both scope_ctx (SSOT) and legacy field (backward compat)
|
||
let region = region.into();
|
||
self.scope_ctx.debug_push_region(region.clone());
|
||
self.debug_scope_stack.push(region);
|
||
}
|
||
|
||
#[inline]
|
||
#[allow(deprecated)]
|
||
pub(crate) fn debug_pop_region(&mut self) {
|
||
// Phase 136 Step 3/7: Update both scope_ctx (SSOT) and legacy field (backward compat)
|
||
self.scope_ctx.debug_pop_region();
|
||
let _ = self.debug_scope_stack.pop();
|
||
}
|
||
|
||
#[inline]
|
||
#[allow(deprecated)]
|
||
pub(crate) fn debug_current_region_id(&self) -> Option<String> {
|
||
// Phase 136 Step 3/7: Read from scope_ctx (SSOT)
|
||
self.scope_ctx.debug_current_region_id()
|
||
}
|
||
|
||
/// Hint for downstream metadata: set the logical source file name/path for the next build.
|
||
/// Phase 136 Step 6/7: Delegate to metadata_ctx
|
||
pub fn set_source_file_hint<S: Into<String>>(&mut self, source: S) {
|
||
self.metadata_ctx.set_source_file(source);
|
||
}
|
||
|
||
/// Clear the source file hint (used when reusing the builder across modules).
|
||
/// Phase 136 Step 6/7: Delegate to metadata_ctx
|
||
pub fn clear_source_file_hint(&mut self) {
|
||
self.metadata_ctx.clear_source_file();
|
||
}
|
||
|
||
/// Resolve current source file hint (builder field or env fallback).
|
||
/// Phase 136 Step 6/7: Delegate to metadata_ctx
|
||
fn current_source_file(&self) -> Option<String> {
|
||
self.metadata_ctx
|
||
.current_source_file()
|
||
.or_else(|| std::env::var("NYASH_SOURCE_FILE_HINT").ok())
|
||
}
|
||
|
||
/// Create a new MirFunction with source metadata applied.
|
||
fn new_function_with_metadata(
|
||
&self,
|
||
signature: FunctionSignature,
|
||
entry_block: BasicBlockId,
|
||
) -> MirFunction {
|
||
let mut f = MirFunction::new(signature, entry_block);
|
||
f.metadata.source_file = self.current_source_file();
|
||
f
|
||
}
|
||
|
||
// ----------------------
|
||
// Compile trace helpers (dev only; env-gated)
|
||
// ----------------------
|
||
#[inline]
|
||
pub(super) fn compile_trace_enabled() -> bool {
|
||
std::env::var("NYASH_MIR_COMPILE_TRACE").ok().as_deref() == Some("1")
|
||
}
|
||
|
||
#[inline]
|
||
pub(super) fn trace_compile<S: AsRef<str>>(&self, msg: S) {
|
||
if Self::compile_trace_enabled() {
|
||
eprintln!("[mir-compile] {}", msg.as_ref());
|
||
}
|
||
}
|
||
|
||
// ----------------------
|
||
// Method tail index (performance helper)
|
||
// ----------------------
|
||
fn rebuild_method_tail_index(&mut self) {
|
||
self.method_tail_index.clear();
|
||
if let Some(ref module) = self.current_module {
|
||
for name in module.functions.keys() {
|
||
if let (Some(dot), Some(slash)) = (name.rfind('.'), name.rfind('/')) {
|
||
if slash > dot {
|
||
let tail = &name[dot..];
|
||
self.method_tail_index
|
||
.entry(tail.to_string())
|
||
.or_insert_with(Vec::new)
|
||
.push(name.clone());
|
||
}
|
||
}
|
||
}
|
||
self.method_tail_index_source_len = module.functions.len();
|
||
} else {
|
||
self.method_tail_index_source_len = 0;
|
||
}
|
||
}
|
||
|
||
fn ensure_method_tail_index(&mut self) {
|
||
let need_rebuild = match self.current_module {
|
||
Some(ref refmod) => self.method_tail_index_source_len != refmod.functions.len(),
|
||
None => self.method_tail_index_source_len != 0,
|
||
};
|
||
if need_rebuild {
|
||
self.rebuild_method_tail_index();
|
||
}
|
||
}
|
||
|
||
pub(super) fn method_candidates(&mut self, method: &str, arity: usize) -> Vec<String> {
|
||
self.ensure_method_tail_index();
|
||
let tail = format!(".{}{}", method, format!("/{}", arity));
|
||
self.method_tail_index
|
||
.get(&tail)
|
||
.cloned()
|
||
.unwrap_or_default()
|
||
}
|
||
|
||
pub(super) fn method_candidates_tail<S: AsRef<str>>(&mut self, tail: S) -> Vec<String> {
|
||
self.ensure_method_tail_index();
|
||
self.method_tail_index
|
||
.get(tail.as_ref())
|
||
.cloned()
|
||
.unwrap_or_default()
|
||
}
|
||
|
||
/// Build a complete MIR module from AST
|
||
pub fn build_module(&mut self, ast: ASTNode) -> Result<MirModule, String> {
|
||
self.prepare_module()?;
|
||
let result_value = self.lower_root(ast)?;
|
||
self.finalize_module(result_value)
|
||
}
|
||
|
||
/// Build an expression and return its value ID
|
||
pub(super) fn build_expression(&mut self, ast: ASTNode) -> Result<ValueId, String> {
|
||
// Delegated to exprs.rs to keep this file lean
|
||
// Debug: Track recursion depth to detect infinite loops
|
||
const MAX_RECURSION_DEPTH: usize = 200;
|
||
self.recursion_depth += 1;
|
||
if self.recursion_depth > MAX_RECURSION_DEPTH {
|
||
eprintln!("\n[FATAL] ============================================");
|
||
eprintln!(
|
||
"[FATAL] Recursion depth exceeded {} in build_expression",
|
||
MAX_RECURSION_DEPTH
|
||
);
|
||
eprintln!("[FATAL] Current depth: {}", self.recursion_depth);
|
||
eprintln!("[FATAL] AST node type: {:?}", std::mem::discriminant(&ast));
|
||
eprintln!("[FATAL] ============================================\n");
|
||
return Err(format!(
|
||
"Recursion depth exceeded: {} (possible infinite loop)",
|
||
self.recursion_depth
|
||
));
|
||
}
|
||
|
||
let result = self.build_expression_impl(ast);
|
||
self.recursion_depth -= 1;
|
||
result
|
||
}
|
||
|
||
/// Build a literal value
|
||
pub(super) fn build_literal(&mut self, literal: LiteralValue) -> Result<ValueId, String> {
|
||
// Determine type without moving literal
|
||
let ty_for_dst = match &literal {
|
||
LiteralValue::Integer(_) => Some(super::MirType::Integer),
|
||
LiteralValue::Float(_) => Some(super::MirType::Float),
|
||
LiteralValue::Bool(_) => Some(super::MirType::Bool),
|
||
LiteralValue::String(_) => Some(super::MirType::String),
|
||
_ => None,
|
||
};
|
||
|
||
// Emit via ConstantEmissionBox(仕様不変の統一ルート)
|
||
let dst = match literal {
|
||
LiteralValue::Integer(n) => {
|
||
crate::mir::builder::emission::constant::emit_integer(self, n)
|
||
}
|
||
LiteralValue::Float(f) => crate::mir::builder::emission::constant::emit_float(self, f),
|
||
LiteralValue::String(s) => {
|
||
crate::mir::builder::emission::constant::emit_string(self, s)
|
||
}
|
||
LiteralValue::Bool(b) => crate::mir::builder::emission::constant::emit_bool(self, b),
|
||
LiteralValue::Null => crate::mir::builder::emission::constant::emit_null(self),
|
||
LiteralValue::Void => crate::mir::builder::emission::constant::emit_void(self),
|
||
};
|
||
// Annotate type
|
||
if let Some(ty) = ty_for_dst {
|
||
self.value_types.insert(dst, ty);
|
||
}
|
||
|
||
Ok(dst)
|
||
}
|
||
|
||
/// Build variable access
|
||
pub(super) fn build_variable_access(&mut self, name: String) -> Result<ValueId, String> {
|
||
// Step 5-5-G: __pin$ variables should NEVER be accessed from variable_map
|
||
// They are transient temporaries created during expression building and
|
||
// should not persist across blocks. If we see one here, it's a compiler bug.
|
||
if name.starts_with("__pin$") {
|
||
return Err(format!(
|
||
"COMPILER BUG: Attempt to access __pin$ temporary '{}' from variable_map. \
|
||
__pin$ variables should only exist as direct SSA values, not as named variables.",
|
||
name
|
||
));
|
||
}
|
||
|
||
if let Some(&value_id) = self.variable_map.get(&name) {
|
||
Ok(value_id)
|
||
} else {
|
||
Err(self.undefined_variable_message(&name))
|
||
}
|
||
}
|
||
|
||
pub(in crate::mir::builder) fn undefined_variable_message(&self, name: &str) -> String {
|
||
// Enhance diagnostics using Using simple registry (Phase 1)
|
||
let mut msg = format!("Undefined variable: {}", name);
|
||
|
||
// Stage-3 keyword diagnostic (local/flow/try/catch/throw)
|
||
if name == "local" && !crate::config::env::parser_stage3_enabled() {
|
||
msg.push_str("\nHint: 'local' is a Stage-3 keyword. Prefer NYASH_FEATURES=stage3 (legacy: NYASH_PARSER_STAGE3=1 / HAKO_PARSER_STAGE3=1 for Stage-B).");
|
||
msg.push_str("\nFor AotPrep verification, use tools/hakorune_emit_mir.sh which sets these automatically.");
|
||
} else if (name == "flow" || name == "try" || name == "catch" || name == "throw")
|
||
&& !crate::config::env::parser_stage3_enabled()
|
||
{
|
||
msg.push_str(&format!("\nHint: '{}' is a Stage-3 keyword. Prefer NYASH_FEATURES=stage3 (legacy: NYASH_PARSER_STAGE3=1 / HAKO_PARSER_STAGE3=1 for Stage-B).", name));
|
||
}
|
||
|
||
let suggest = crate::using::simple_registry::suggest_using_for_symbol(name);
|
||
if !suggest.is_empty() {
|
||
msg.push_str("\nHint: symbol appears in using module(s): ");
|
||
msg.push_str(&suggest.join(", "));
|
||
msg.push_str("\nConsider adding 'using <module> [as Alias]' or check nyash.toml [using].");
|
||
}
|
||
|
||
msg
|
||
}
|
||
|
||
/// Build assignment
|
||
pub(super) fn build_assignment(
|
||
&mut self,
|
||
var_name: String,
|
||
value: ASTNode,
|
||
) -> Result<ValueId, String> {
|
||
// SSOT (LANGUAGE_REFERENCE_2025 / syntax-cheatsheet):
|
||
// - Assignment to an undeclared name is an error.
|
||
// - Use `local name = ...` (or `local name; name = ...`) to declare.
|
||
vars::assignment_resolver::AssignmentResolverBox::ensure_declared(self, &var_name)?;
|
||
|
||
let value_id = self.build_expression(value)?;
|
||
|
||
// Step 5-5-E: FIX variable map corruption bug
|
||
// REMOVED pin_to_slot() call - it was causing __pin$ temporaries to overwrite
|
||
// real variable names in the variable map.
|
||
//
|
||
// Root cause: pin_to_slot(raw_value_id, "@assign") would sometimes return
|
||
// a ValueId from a previous __pin$ temporary (e.g., __pin$767$@binop_lhs),
|
||
// causing variable_map["m"] to point to the wrong ValueId.
|
||
//
|
||
// SSA + PHI merges work correctly without explicit pinning here.
|
||
// The expression building already creates necessary temporaries.
|
||
|
||
// Step 5-5-F: NEVER insert __pin$ temporaries into variable_map
|
||
// __pin$ variables are transient compiler-generated temporaries that should
|
||
// never be tracked as real variables. They are used only within expression
|
||
// building and should not persist across blocks or loops.
|
||
//
|
||
// BUG FIX: Previously, __pin$ variables would be inserted into variable_map,
|
||
// causing stale references after LoopForm transformation renumbers blocks.
|
||
// Result: VM would try to read undefined ValueIds (e.g., ValueId(270) at bb303).
|
||
if !var_name.starts_with("__pin$") {
|
||
// In SSA form, each assignment creates a new value
|
||
self.variable_map.insert(var_name.clone(), value_id);
|
||
}
|
||
|
||
Ok(value_id)
|
||
}
|
||
|
||
/// Emit an instruction to the current basic block
|
||
pub(super) fn emit_instruction(&mut self, instruction: MirInstruction) -> Result<(), String> {
|
||
let block_id = self.current_block.ok_or("No current basic block")?;
|
||
|
||
// Make instruction mutable for potential receiver materialization
|
||
let mut instruction = instruction;
|
||
|
||
// Precompute debug metadata to avoid borrow conflicts later
|
||
let _dbg_fn_name = self
|
||
.current_function
|
||
.as_ref()
|
||
.map(|f| f.signature.name.clone());
|
||
let _dbg_region_id = self.debug_current_region_id();
|
||
// P0: PHI の軽量補強と観測は、関数ブロック取得前に実施して借用競合を避ける
|
||
if let MirInstruction::Phi { dst, inputs, .. } = &instruction {
|
||
origin::phi::propagate_phi_meta(self, *dst, inputs);
|
||
observe::ssa::emit_phi(self, *dst, inputs);
|
||
}
|
||
|
||
// CRITICAL: Final receiver materialization for MethodCall
|
||
// This ensures the receiver has an in-block definition in the same block as the Call.
|
||
// Must happen BEFORE function mutable borrow to avoid borrowck conflicts.
|
||
if let MirInstruction::Call {
|
||
callee: Some(callee),
|
||
dst,
|
||
args,
|
||
effects,
|
||
..
|
||
} = &instruction
|
||
{
|
||
use crate::mir::definitions::call_unified::Callee;
|
||
if let Callee::Method {
|
||
box_name,
|
||
method,
|
||
receiver: Some(r),
|
||
certainty,
|
||
box_kind,
|
||
} = callee.clone()
|
||
{
|
||
// LocalSSA: ensure receiver has a Copy in current_block
|
||
let r_local = crate::mir::builder::ssa::local::recv(self, r);
|
||
|
||
// Update instruction with materialized receiver
|
||
let new_callee = Callee::Method {
|
||
box_name: box_name.clone(),
|
||
method: method.clone(),
|
||
receiver: Some(r_local),
|
||
certainty,
|
||
box_kind,
|
||
};
|
||
instruction = MirInstruction::Call {
|
||
dst: *dst,
|
||
func: crate::mir::ValueId::INVALID, // Legacy dummy (not a real SSA use)
|
||
callee: Some(new_callee),
|
||
args: args.clone(),
|
||
effects: *effects,
|
||
};
|
||
}
|
||
}
|
||
|
||
if let Some(ref mut function) = self.current_function {
|
||
// Pre-capture branch/jump targets for predecessor update after we finish
|
||
// mutably borrowing the current block.
|
||
let (then_t, else_t, jump_t) = match &instruction {
|
||
MirInstruction::Branch {
|
||
then_bb, else_bb, ..
|
||
} => (Some(*then_bb), Some(*else_bb), None),
|
||
MirInstruction::Jump { target } => (None, None, Some(*target)),
|
||
_ => (None, None, None),
|
||
};
|
||
|
||
// Extract function name before mutable borrow to avoid borrowck error
|
||
let current_fn_name = function.signature.name.clone();
|
||
|
||
if let Some(block) = function.get_block_mut(block_id) {
|
||
// CRITICAL: Copy専用トレース(LocalSSA調査用)
|
||
if let MirInstruction::Copy { dst, src } = &instruction {
|
||
if std::env::var("NYASH_LOCAL_SSA_TRACE").ok().as_deref() == Some("1") {
|
||
eprintln!(
|
||
"[emit-inst] fn={} bb={:?} COPY %{} <- %{}",
|
||
current_fn_name,
|
||
self.current_block.map(|b| b.0).unwrap_or(0),
|
||
dst.0,
|
||
src.0
|
||
);
|
||
}
|
||
}
|
||
|
||
// Invariant: Call must always carry a Callee (unified path).
|
||
if let MirInstruction::Call { callee, .. } = &instruction {
|
||
if callee.is_none() {
|
||
return Err("builder invariant violated: MirInstruction::Call.callee must be Some (unified call)".into());
|
||
} else if std::env::var("NYASH_LOCAL_SSA_TRACE").ok().as_deref() == Some("1") {
|
||
use crate::mir::definitions::call_unified::Callee;
|
||
if let Some(Callee::Method {
|
||
box_name,
|
||
method,
|
||
receiver: Some(r),
|
||
..
|
||
}) = callee
|
||
{
|
||
eprintln!(
|
||
"[emit-inst] fn={} bb={:?} Call {}.{} recv=%{}",
|
||
current_fn_name,
|
||
self.current_block.map(|b| b.0).unwrap_or(0),
|
||
box_name,
|
||
method,
|
||
r.0
|
||
);
|
||
}
|
||
} else if std::env::var("NYASH_BUILDER_TRACE_RECV").ok().as_deref() == Some("1")
|
||
{
|
||
use crate::mir::definitions::call_unified::Callee;
|
||
if let Some(Callee::Method {
|
||
box_name,
|
||
method,
|
||
receiver: Some(r),
|
||
..
|
||
}) = callee
|
||
{
|
||
let names: Vec<String> = self
|
||
.variable_map
|
||
.iter()
|
||
.filter(|(_, &vid)| vid == *r)
|
||
.map(|(k, _)| k.clone())
|
||
.collect();
|
||
eprintln!(
|
||
"[builder/recv-trace] fn={} bb={:?} method={}.{} recv=%{} aliases={:?}",
|
||
current_fn_name,
|
||
self.current_block,
|
||
box_name,
|
||
method,
|
||
r.0,
|
||
names
|
||
);
|
||
}
|
||
}
|
||
}
|
||
if utils::builder_debug_enabled() {
|
||
eprintln!(
|
||
"[BUILDER] emit @bb{} -> {}",
|
||
block_id,
|
||
match &instruction {
|
||
MirInstruction::TypeOp { dst, op, value, ty } =>
|
||
format!("typeop {:?} {} {:?} -> {}", op, value, ty, dst),
|
||
MirInstruction::Print { value, .. } => format!("print {}", value),
|
||
MirInstruction::BoxCall {
|
||
box_val,
|
||
method,
|
||
method_id,
|
||
args,
|
||
dst,
|
||
..
|
||
} => {
|
||
if let Some(mid) = method_id {
|
||
format!(
|
||
"boxcall {}.{}[#{}]({:?}) -> {:?}",
|
||
box_val, method, mid, args, dst
|
||
)
|
||
} else {
|
||
format!(
|
||
"boxcall {}.{}({:?}) -> {:?}",
|
||
box_val, method, args, dst
|
||
)
|
||
}
|
||
}
|
||
MirInstruction::Call {
|
||
func, args, dst, ..
|
||
} => format!("call {}({:?}) -> {:?}", func, args, dst),
|
||
MirInstruction::NewBox {
|
||
dst,
|
||
box_type,
|
||
args,
|
||
} => format!("new {}({:?}) -> {}", box_type, args, dst),
|
||
MirInstruction::Const { dst, value } =>
|
||
format!("const {:?} -> {}", value, dst),
|
||
MirInstruction::Branch {
|
||
condition,
|
||
then_bb,
|
||
else_bb,
|
||
} => format!("br {}, {}, {}", condition, then_bb, else_bb),
|
||
MirInstruction::Jump { target } => format!("br {}", target),
|
||
_ => format!("{:?}", instruction),
|
||
}
|
||
);
|
||
}
|
||
// Phase 136 Step 6/7: Use metadata_ctx for span
|
||
block.add_instruction_with_span(instruction.clone(), self.metadata_ctx.current_span());
|
||
// Drop the mutable borrow of `block` before updating other blocks
|
||
}
|
||
// Update predecessor sets for branch/jump immediately so that
|
||
// debug_verify_phi_inputs can observe a consistent CFG without
|
||
// requiring a full function.update_cfg() pass.
|
||
if let Some(t) = then_t {
|
||
if let Some(succ) = function.get_block_mut(t) {
|
||
succ.add_predecessor(block_id);
|
||
}
|
||
}
|
||
if let Some(t) = else_t {
|
||
if let Some(succ) = function.get_block_mut(t) {
|
||
succ.add_predecessor(block_id);
|
||
}
|
||
}
|
||
if let Some(t) = jump_t {
|
||
if let Some(succ) = function.get_block_mut(t) {
|
||
succ.add_predecessor(block_id);
|
||
}
|
||
}
|
||
Ok(())
|
||
} else {
|
||
Err(format!("Basic block {} does not exist", block_id))
|
||
}
|
||
}
|
||
|
||
/// Update an existing PHI instruction's inputs (for loop sealing)
|
||
/// Used by LoopFormBuilder to complete incomplete PHI nodes
|
||
#[allow(dead_code)]
|
||
pub(super) fn update_phi_instruction(
|
||
&mut self,
|
||
block: BasicBlockId,
|
||
phi_id: ValueId,
|
||
new_inputs: Vec<(BasicBlockId, ValueId)>,
|
||
) -> Result<(), String> {
|
||
if let Some(ref mut function) = self.current_function {
|
||
if let Some(block_data) = function.get_block_mut(block) {
|
||
// Find PHI instruction with matching dst
|
||
for inst in &mut block_data.instructions {
|
||
if let MirInstruction::Phi { dst, inputs, .. } = inst {
|
||
if *dst == phi_id {
|
||
*inputs = new_inputs;
|
||
return Ok(());
|
||
}
|
||
}
|
||
}
|
||
Err(format!(
|
||
"PHI instruction {} not found in block {}",
|
||
phi_id, block
|
||
))
|
||
} else {
|
||
Err(format!("Block {} not found", block))
|
||
}
|
||
} else {
|
||
Err("No current function".to_string())
|
||
}
|
||
}
|
||
|
||
// フェーズM: is_no_phi_mode()メソッド削除
|
||
|
||
// フェーズM: insert_edge_copy()メソッド削除(no_phi_mode撤廃により不要)
|
||
|
||
/// Build new expression: new ClassName(arguments)
|
||
pub(super) fn build_new_expression(
|
||
&mut self,
|
||
class: String,
|
||
arguments: Vec<ASTNode>,
|
||
) -> Result<ValueId, String> {
|
||
// Phase 9.78a: Unified Box creation using NewBox instruction
|
||
// Core-13 pure mode: emit ExternCall(env.box.new) with type name const only
|
||
if crate::config::env::mir_core13_pure() {
|
||
// Emit Const String for type name(ConstantEmissionBox)
|
||
let ty_id = crate::mir::builder::emission::constant::emit_string(self, class.clone());
|
||
// Evaluate arguments (pass through to env.box.new shim)
|
||
let mut arg_vals: Vec<ValueId> = Vec::with_capacity(arguments.len());
|
||
for a in arguments {
|
||
arg_vals.push(self.build_expression(a)?);
|
||
}
|
||
// Build arg list: [type, a1, a2, ...]
|
||
let mut args: Vec<ValueId> = Vec::with_capacity(1 + arg_vals.len());
|
||
args.push(ty_id);
|
||
args.extend(arg_vals);
|
||
// Call env.box.new
|
||
// 📦 Hotfix 3: Use next_value_id() to respect function parameter reservation
|
||
let dst = self.next_value_id();
|
||
self.emit_instruction(MirInstruction::ExternCall {
|
||
dst: Some(dst),
|
||
iface_name: "env.box".to_string(),
|
||
method_name: "new".to_string(),
|
||
args,
|
||
effects: EffectMask::PURE,
|
||
})?;
|
||
// 型注釈(最小)
|
||
self.value_types
|
||
.insert(dst, super::MirType::Box(class.clone()));
|
||
return Ok(dst);
|
||
}
|
||
|
||
// Optimization: Primitive wrappers → emit Const directly when possible
|
||
if class == "IntegerBox" && arguments.len() == 1 {
|
||
if let ASTNode::Literal {
|
||
value: LiteralValue::Integer(n),
|
||
..
|
||
} = arguments[0].clone()
|
||
{
|
||
// 📦 Hotfix 3: Use next_value_id() to respect function parameter reservation
|
||
let dst = self.next_value_id();
|
||
self.emit_instruction(MirInstruction::Const {
|
||
dst,
|
||
value: ConstValue::Integer(n),
|
||
})?;
|
||
self.value_types.insert(dst, super::MirType::Integer);
|
||
return Ok(dst);
|
||
}
|
||
}
|
||
|
||
// First, evaluate all arguments to get their ValueIds
|
||
let mut arg_values = Vec::new();
|
||
for arg in arguments {
|
||
let arg_value = self.build_expression(arg)?;
|
||
arg_values.push(arg_value);
|
||
}
|
||
|
||
// Generate the destination ValueId
|
||
// 📦 Hotfix 3: Use next_value_id() to respect function parameter reservation
|
||
let dst = self.next_value_id();
|
||
|
||
// Emit NewBox instruction for all Box types
|
||
// VM will handle optimization for basic types internally
|
||
self.emit_instruction(MirInstruction::NewBox {
|
||
dst,
|
||
box_type: class.clone(),
|
||
args: arg_values.clone(),
|
||
})?;
|
||
// Phase 15.5: Unified box type handling
|
||
// All boxes (including former core boxes) are treated uniformly as Box types
|
||
self.value_types
|
||
.insert(dst, super::MirType::Box(class.clone()));
|
||
|
||
// Record origin for optimization: dst was created by NewBox of class
|
||
self.value_origin_newbox.insert(dst, class.clone());
|
||
|
||
// birth 呼び出し(Builder 正規化)
|
||
// 優先: 低下済みグローバル関数 `<Class>.birth/Arity`(Arity は me を含まない)
|
||
// 代替: 既存互換として BoxCall("birth")(プラグイン/ビルトインの初期化に対応)
|
||
if class != "StringBox" {
|
||
let arity = arg_values.len();
|
||
let lowered =
|
||
crate::mir::builder::calls::function_lowering::generate_method_function_name(
|
||
&class, "birth", arity,
|
||
);
|
||
let use_lowered = if let Some(ref module) = self.current_module {
|
||
module.functions.contains_key(&lowered)
|
||
} else {
|
||
false
|
||
};
|
||
if use_lowered {
|
||
// Call Global("Class.birth/Arity") with argv = [me, args...]
|
||
let mut argv: Vec<ValueId> = Vec::with_capacity(1 + arity);
|
||
argv.push(dst);
|
||
argv.extend(arg_values.iter().copied());
|
||
self.emit_legacy_call(None, CallTarget::Global(lowered), argv)?;
|
||
} else {
|
||
// Fallback policy:
|
||
// - For user-defined boxes (no explicit constructor), do NOT emit BoxCall("birth").
|
||
// VM will treat plain NewBox as constructed; dev verify warns if needed.
|
||
// - For builtins/plugins, keep BoxCall("birth") fallback to preserve legacy init.
|
||
let is_user_box = self.user_defined_boxes.contains(&class);
|
||
// Dev safety: allow disabling birth() injection for builtins to avoid
|
||
// unified-call method dispatch issues while migrating. Off by default unless explicitly enabled.
|
||
let allow_builtin_birth = std::env::var("NYASH_DEV_BIRTH_INJECT_BUILTINS")
|
||
.ok()
|
||
.as_deref()
|
||
== Some("1");
|
||
if !is_user_box && allow_builtin_birth {
|
||
let birt_mid = resolve_slot_by_type_name(&class, "birth");
|
||
self.emit_box_or_plugin_call(
|
||
None,
|
||
dst,
|
||
"birth".to_string(),
|
||
birt_mid,
|
||
arg_values,
|
||
EffectMask::READ.add(Effect::ReadHeap),
|
||
)?;
|
||
}
|
||
}
|
||
}
|
||
|
||
Ok(dst)
|
||
}
|
||
|
||
/// Check if the current basic block is terminated
|
||
fn is_current_block_terminated(&self) -> bool {
|
||
if let (Some(block_id), Some(ref function)) = (self.current_block, &self.current_function) {
|
||
if let Some(block) = function.get_block(block_id) {
|
||
return block.is_terminated();
|
||
}
|
||
}
|
||
false
|
||
}
|
||
|
||
// ============================================================================
|
||
// Phase 26-A: ValueId型安全化メソッド
|
||
// ============================================================================
|
||
|
||
/// 型付きValueIdを発行(新API)
|
||
/// Phase 136 P0: Use SSOT allocator (next_value_id) to respect function context
|
||
pub fn new_typed_value(&mut self, kind: super::MirValueKind) -> super::TypedValueId {
|
||
let id = self.next_value_id();
|
||
self.value_kinds.insert(id, kind);
|
||
super::TypedValueId::new(id, kind)
|
||
}
|
||
|
||
/// 既存ValueIdの型情報を取得
|
||
pub fn get_value_kind(&self, id: ValueId) -> Option<super::MirValueKind> {
|
||
self.value_kinds.get(&id).copied()
|
||
}
|
||
|
||
/// 既存ValueIdに型情報を後付け(レガシー互換用)
|
||
pub fn register_value_kind(&mut self, id: ValueId, kind: super::MirValueKind) {
|
||
self.value_kinds.insert(id, kind);
|
||
}
|
||
|
||
/// 型安全なパラメータ判定(ValueIdベース) - GUARD Bug Prevention
|
||
pub fn is_value_parameter(&self, id: ValueId) -> bool {
|
||
self.get_value_kind(id)
|
||
.map(|kind| kind.is_parameter())
|
||
.unwrap_or(false)
|
||
}
|
||
|
||
/// 型安全なローカル変数判定(ValueIdベース)
|
||
pub fn is_value_local(&self, id: ValueId) -> bool {
|
||
self.get_value_kind(id)
|
||
.map(|kind| kind.is_local())
|
||
.unwrap_or(false)
|
||
}
|
||
|
||
/// 型安全なLoopCarrier判定(ValueIdベース)
|
||
pub fn is_value_loop_carrier(&self, id: ValueId) -> bool {
|
||
self.get_value_kind(id)
|
||
.map(|kind| kind.is_loop_carrier())
|
||
.unwrap_or(false)
|
||
}
|
||
}
|
||
|
||
impl Default for MirBuilder {
|
||
fn default() -> Self {
|
||
Self::new()
|
||
}
|
||
}
|
||
|
||
// Phase 79: BindingMapProvider implementation
|
||
// Centralizes feature-gated binding_map access for promoters
|
||
use crate::mir::loop_pattern_detection::BindingMapProvider;
|
||
|
||
impl BindingMapProvider for MirBuilder {
|
||
#[cfg(feature = "normalized_dev")]
|
||
fn get_binding_map(&self) -> Option<&std::collections::BTreeMap<String, crate::mir::BindingId>> {
|
||
// Phase 136 Step 4/7: Use binding_ctx (SSOT)
|
||
Some(self.binding_ctx.binding_map())
|
||
}
|
||
|
||
#[cfg(not(feature = "normalized_dev"))]
|
||
fn get_binding_map(&self) -> Option<&std::collections::BTreeMap<String, crate::mir::BindingId>> {
|
||
None
|
||
}
|
||
}
|
||
|
||
#[cfg(test)]
|
||
mod binding_id_tests {
|
||
use super::*;
|
||
|
||
#[test]
|
||
#[allow(deprecated)]
|
||
fn test_binding_map_initialization() {
|
||
let builder = MirBuilder::new();
|
||
assert_eq!(builder.core_ctx.next_binding_id, 0);
|
||
// Phase 136 Step 4/7: Check both binding_ctx (SSOT) and legacy field
|
||
assert!(builder.binding_ctx.is_empty());
|
||
assert!(builder.binding_map.is_empty());
|
||
}
|
||
|
||
#[test]
|
||
fn test_binding_allocation_sequential() {
|
||
let mut builder = MirBuilder::new();
|
||
let bid0 = builder.allocate_binding_id();
|
||
let bid1 = builder.allocate_binding_id();
|
||
let bid2 = builder.allocate_binding_id();
|
||
|
||
assert_eq!(bid0.raw(), 0);
|
||
assert_eq!(bid1.raw(), 1);
|
||
assert_eq!(bid2.raw(), 2);
|
||
assert_eq!(builder.core_ctx.next_binding_id, 3);
|
||
}
|
||
|
||
#[test]
|
||
#[allow(deprecated)]
|
||
fn test_shadowing_binding_restore() {
|
||
let mut builder = MirBuilder::new();
|
||
|
||
// Simulate function entry scope
|
||
builder.push_lexical_scope();
|
||
|
||
// Declare outer x
|
||
// Phase 136 P0: Use SSOT allocator for function scope simulation
|
||
let outer_vid = builder.next_value_id();
|
||
builder
|
||
.declare_local_in_current_scope("x", outer_vid)
|
||
.unwrap();
|
||
// Phase 136 Step 4/7: Check binding_ctx (SSOT)
|
||
let outer_bid = builder.binding_ctx.lookup("x").unwrap();
|
||
assert_eq!(outer_bid.raw(), 0);
|
||
// Also verify legacy field is synced
|
||
assert_eq!(*builder.binding_map.get("x").unwrap(), outer_bid);
|
||
|
||
// Enter inner scope and shadow x
|
||
builder.push_lexical_scope();
|
||
// Phase 136 P0: Use SSOT allocator for function scope simulation
|
||
let inner_vid = builder.next_value_id();
|
||
builder
|
||
.declare_local_in_current_scope("x", inner_vid)
|
||
.unwrap();
|
||
// Phase 136 Step 4/7: Check binding_ctx (SSOT)
|
||
let inner_bid = builder.binding_ctx.lookup("x").unwrap();
|
||
assert_eq!(inner_bid.raw(), 1);
|
||
// Also verify legacy field is synced
|
||
assert_eq!(*builder.binding_map.get("x").unwrap(), inner_bid);
|
||
|
||
// Exit inner scope - should restore outer binding
|
||
builder.pop_lexical_scope();
|
||
// Phase 136 Step 4/7: Check binding_ctx (SSOT)
|
||
let restored_bid = builder.binding_ctx.lookup("x").unwrap();
|
||
assert_eq!(restored_bid, outer_bid);
|
||
assert_eq!(restored_bid.raw(), 0);
|
||
// Also verify legacy field is synced
|
||
assert_eq!(*builder.binding_map.get("x").unwrap(), restored_bid);
|
||
|
||
// Cleanup
|
||
builder.pop_lexical_scope();
|
||
}
|
||
|
||
#[test]
|
||
fn test_valueid_binding_parallel_allocation() {
|
||
let mut builder = MirBuilder::new();
|
||
|
||
// Phase 136 P0: Use SSOT allocator (next_value_id)
|
||
// Note: Without current_function, next_value_id() falls back to value_gen.next()
|
||
// so this test still validates ValueId/BindingId independence
|
||
// Allocate ValueIds and BindingIds in parallel
|
||
let vid0 = builder.next_value_id();
|
||
let bid0 = builder.allocate_binding_id();
|
||
let vid1 = builder.next_value_id();
|
||
let bid1 = builder.allocate_binding_id();
|
||
|
||
// ValueId and BindingId should be independent
|
||
assert_eq!(vid0.0, 0);
|
||
assert_eq!(bid0.raw(), 0);
|
||
assert_eq!(vid1.0, 1);
|
||
assert_eq!(bid1.raw(), 1);
|
||
|
||
// Allocating more ValueIds should not affect BindingId counter
|
||
let _ = builder.next_value_id();
|
||
let _ = builder.next_value_id();
|
||
let bid2 = builder.allocate_binding_id();
|
||
assert_eq!(bid2.raw(), 2); // Still sequential
|
||
|
||
// Allocating more BindingIds should not affect ValueId counter
|
||
let _ = builder.allocate_binding_id();
|
||
let _ = builder.allocate_binding_id();
|
||
let vid2 = builder.next_value_id();
|
||
assert_eq!(vid2.0, 4); // Continues from where we left off
|
||
}
|
||
}
|