Files
hakorune/tools/hakorune_emit_mir.sh
nyash-codex eadde8d1dd fix(mir/builder): use function-local ValueId throughout MIR builder
Phase 25.1b: Complete SSA fix - eliminate all global ValueId usage in function contexts.

Root cause: ~75 locations throughout MIR builder were using global value
generator (self.value_gen.next()) instead of function-local allocator
(f.next_value_id()), causing SSA verification failures and runtime
"use of undefined value" errors.

Solution:
- Added next_value_id() helper that automatically chooses correct allocator
- Fixed 19 files with ~75 occurrences of ValueId allocation
- All function-context allocations now use function-local IDs

Files modified:
- src/mir/builder/utils.rs: Added next_value_id() helper, fixed 8 locations
- src/mir/builder/builder_calls.rs: 17 fixes
- src/mir/builder/ops.rs: 8 fixes
- src/mir/builder/stmts.rs: 7 fixes
- src/mir/builder/emission/constant.rs: 6 fixes
- src/mir/builder/rewrite/*.rs: 10 fixes
- + 13 other files

Verification:
- cargo build --release: SUCCESS
- Simple tests with NYASH_VM_VERIFY_MIR=1: Zero undefined errors
- Multi-parameter static methods: All working

Known remaining: ValueId(22) in Stage-B (separate issue to investigate)

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-11-17 00:48:18 +09:00

1006 lines
39 KiB
Bash
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#!/usr/bin/env bash
# hakorune_emit_mir.sh — Emit MIR(JSON) using Hakorune StageB + MirBuilder (Hakofirst)
#
# Usage: tools/hakorune_emit_mir.sh <input.hako> <out.json>
# Notes:
# - Runs the StageB compiler (Hako) to emit Program(JSON v0), then feeds it to MirBuilderBox.emit_from_program_json_v0.
# - Defaults to the Hakorune VM path; no inline Ny compiler; Stage3 enabled.
# - Keeps defaults conservative: no global flips; this is a helper for dev/CI scripts.
set -euo pipefail
if [ "$#" -ne 2 ]; then
echo "Usage: $0 <input.hako> <out.json>" >&2
exit 2
fi
IN="$1"
OUT="$2"
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
if ROOT_GIT=$(git -C "$SCRIPT_DIR" rev-parse --show-toplevel 2>/dev/null); then
ROOT="$ROOT_GIT"
else
ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
fi
# Resolve nyash/hakorune binary via test_runner helper (ensures consistent env)
if [ ! -f "$IN" ]; then
echo "[FAIL] input not found: $IN" >&2
exit 1
fi
# Resolve nyash/hakorune binary (simple detection; test_runner will override later if sourced)
if [ -z "${NYASH_BIN:-}" ]; then
if [ -x "$ROOT/target/release/hakorune" ]; then
export NYASH_BIN="$ROOT/target/release/hakorune"
else
export NYASH_BIN="$ROOT/target/release/nyash"
fi
fi
# Allow legacy 'nyash' binary when invoked via this helper (Stage0 bootstrap).
# This keeps StageB / MirBuilder scripts working while the main CLI migrates to 'hakorune'.
export HAKO_ALLOW_NYASH="${HAKO_ALLOW_NYASH:-1}"
export NYASH_ALLOW_NYASH="${NYASH_ALLOW_NYASH:-1}"
# Store CODE in temp file to avoid subshell expansion issues
CODE_TMP=$(mktemp --suffix=.hako)
trap 'rm -f "$CODE_TMP" || true' EXIT
cp "$IN" "$CODE_TMP"
# Stage1 using resolver: prepare modules list from nyash.toml
if [ -z "${HAKO_STAGEB_MODULES_LIST:-}" ]; then
HAKO_STAGEB_MODULES_LIST=$(
python3 - "$ROOT" <<'PY' 2>/dev/null
import sys
from pathlib import Path
root = Path(sys.argv[1])
toml_path = root / "nyash.toml"
try:
lines = toml_path.read_text(encoding="utf-8").splitlines()
except Exception:
print("", end="")
raise SystemExit(0)
collect = False
entries = []
for raw in lines:
line = raw.strip()
if not line or line.startswith("#"):
continue
if line.startswith("["):
collect = (line == "[modules]")
continue
if not collect:
continue
if "=" not in line:
continue
key, value = line.split("=", 1)
key = key.strip().strip('"')
value = value.split("#", 1)[0].strip()
if value.startswith('"') and value.endswith('"'):
value = value[1:-1]
if not key or not value:
continue
entries.append(f"{key}={value}")
print("|||".join(entries), end="")
PY
) || HAKO_STAGEB_MODULES_LIST=""
export HAKO_STAGEB_MODULES_LIST
fi
export HAKO_STAGEB_APPLY_USINGS="${HAKO_STAGEB_APPLY_USINGS:-1}"
# Phase 21.8: Extract using imports and build JSON map for MirBuilder
# This enables MirBuilder to recognize static box references like MatI64, IntArrayCore, MirBuilderBox.
IMPORTS_JSON="{}"
if [ "${NYASH_ENABLE_USING:-1}" = "1" ] || [ "${HAKO_ENABLE_USING:-1}" = "1" ]; then
# Delegate parsing to Python for robustness:
# - Handle `using ns.path.Type as Alias` → Alias
# - Handle alias-less `using ns.path.Type` → Type
# - Ignore path-style using (`"path.hako"`, `./`, `/`, `*.hako`)
IMPORTS_JSON=$(python3 - "$CODE_TMP" <<'PY' 2>/dev/null || echo "{}"
import sys, json
path = sys.argv[1]
try:
text = open(path, "r", encoding="utf-8").read().splitlines()
except Exception:
print("{}", end="")
raise SystemExit(0)
imports = {}
for raw in text:
line = raw.lstrip()
if not line.startswith("using "):
continue
# Strip trailing line comment
if "//" in line:
line = line.split("//", 1)[0]
line = line.strip()
if not line.startswith("using "):
continue
body = line[len("using "):].strip().rstrip(";").strip()
if not body:
continue
alias = None
# Split by " as "
lower = body
idx = lower.find(" as ")
if idx >= 0:
target = body[:idx].strip()
alias = body[idx + 4 :].strip()
else:
target = body
if not target:
continue
# Skip path-style targets
if target.startswith('"') or target.startswith("./") or target.startswith("/") or target.endswith(".hako") or target.endswith(".nyash"):
continue
if alias is None:
# Derive alias from last segment of namespace
alias = target.split(".")[-1].strip()
if not alias:
continue
# Very small ident check (letters/digits/_)
if not all((c.isalnum() or c == "_") for c in alias):
continue
imports[alias] = alias
print(json.dumps(imports), end="")
PY
)
if [ -z "$IMPORTS_JSON" ]; then
IMPORTS_JSON="{}"
fi
fi
export HAKO_MIRBUILDER_IMPORTS="$IMPORTS_JSON"
# Check if FORCE jsonfrag mode is requested (bypasses Stage-B entirely)
if [ "${HAKO_MIR_BUILDER_LOOP_FORCE_JSONFRAG:-0}" = "1" ]; then
# Extract limit from code using grep/awk
limit=$(cat "$CODE_TMP" | grep -o '[0-9]\+' | head -1 || echo "10")
# Generate minimal while-form MIR(JSON) directly (executable semantics)
# PHI incoming format: [[value_register, predecessor_block_id], ...]
echo "[emit/jsonfrag] FORCE min-loop MIR (dev-only)" >&2
cat > "$OUT" <<MIRJSON
{
"functions": [{
"name": "main",
"params": [],
"locals": [],
"blocks": [
{
"id": 0,
"instructions": [
{"op": "const", "dst": 1, "value": {"type": "i64", "value": 0}},
{"op": "const", "dst": 2, "value": {"type": "i64", "value": ${limit}}},
{"op": "jump", "target": 1}
]
},
{
"id": 1,
"instructions": [
{"op": "phi", "dst": 6, "incoming": [[2, 0], [6, 2]]},
{"op": "phi", "dst": 3, "incoming": [[1, 0], [5, 2]]},
{"op": "compare", "operation": "<", "lhs": 3, "rhs": 6, "dst": 4},
{"op": "branch", "cond": 4, "then": 2, "else": 3}
]
},
{
"id": 2,
"instructions": [
{"op": "const", "dst": 10, "value": {"type": "i64", "value": 1}},
{"op": "binop", "operation": "+", "lhs": 3, "rhs": 10, "dst": 5},
{"op": "jump", "target": 1}
]
},
{
"id": 3,
"instructions": [
{"op": "ret", "value": 3}
]
}
]
}]
}
MIRJSON
echo "[OK] MIR JSON written (force-jsonfrag): $OUT"
exit 0
fi
# 1) StageB: Hako parser emits Program(JSON v0) to stdout
# Extract Program JSON robustly using Python3 bracket balancing with fallbacks
extract_program_json_py() {
python3 - <<'PYEOF'
import sys
stdin = sys.stdin.read()
# Find the start of Program JSON (look for "kind":"Program")
start = stdin.find('"kind":"Program"')
if start < 0:
sys.exit(1)
# Walk back to find the opening brace of the object containing "kind":"Program"
pos = start
depth = 0
while pos >= 0:
if stdin[pos] == '{':
depth += 1
if depth == 1:
# Found the start brace
break
elif stdin[pos] == '}':
depth -= 1
pos -= 1
if pos < 0:
sys.exit(1)
# Now walk forward from pos, tracking braces to find the matching closing brace
obj_start = pos
depth = 0
in_string = False
escape = False
i = obj_start
while i < len(stdin):
ch = stdin[i]
if escape:
escape = False
elif in_string:
if ch == '\\':
escape = True
elif ch == '"':
in_string = False
else:
if ch == '"':
in_string = True
elif ch == '{':
depth += 1
elif ch == '}':
depth -= 1
if depth == 0:
# Found the matching closing brace
print(stdin[obj_start:i+1])
sys.exit(0)
i += 1
# If we get here, no matching brace found
sys.exit(1)
PYEOF
}
extract_program_json() {
local input="$1"
local result
# Try 1: Python balancer (existing)
result=$(echo "$input" | extract_program_json_py 2>/dev/null || true)
if [ -n "$result" ] && echo "$result" | grep -q '"kind".*"Program"'; then
echo "$result"
return 0
fi
# Try 2: Simple awk fallback
result=$(echo "$input" | awk '/^\{/,/^\}$/')
if [ -n "$result" ] && echo "$result" | grep -q '"kind".*"Program"'; then
echo "$result"
return 0
fi
# Try 3: Ruby fallback (if available)
if command -v ruby >/dev/null 2>&1; then
result=$(echo "$input" | ruby -e 'puts STDIN.read[/\{.*"kind".*"Program".*\}/m]' 2>/dev/null || true)
if [ -n "$result" ]; then
echo "$result"
return 0
fi
fi
# All fallbacks failed
return 1
}
# Helper: surface common StageB / parser / using errors in a concise, friendly way.
diagnose_stageb_failure() {
local raw="$1"
# Stage3 local keyword misconfig
if printf '%s\n' "$raw" | grep -q "Undefined variable: local"; then
echo "[stageb/diagnose] Undefined variable: local" >&2
echo "[stageb/hint] 'local' は Stage3 キーワードだよ。" >&2
echo "[stageb/hint] NYASH_PARSER_STAGE3=1 HAKO_PARSER_STAGE3=1 を有効にするか、tools/hakorune_emit_mir.sh 経由で実行してね。" >&2
fi
# Generic parse error
if printf '%s\n' "$raw" | grep -q "Parse error"; then
echo "[stageb/diagnose] Parse error detected in StageB output." >&2
echo "[stageb/hint] HAKO_SELFHOST_TRACE=1 を立ててもう一度実行すると、最初の 200 文字が表示されるよ。" >&2
fi
# VM execution errors from StageB itself (MIR builder / verifier side)
if printf '%s\n' "$raw" | grep -q "Invalid value: use of undefined value"; then
echo "[stageb/diagnose] VM reported 'use of undefined value' during StageB execution." >&2
echo "[stageb/hint] StageB が生成した MIR に未定義レジスタ (%0 など) が含まれている可能性があるよ。" >&2
echo "[stageb/hint] 詳細を確認するには NYASH_VM_VERIFY_MIR=1 を立てて compiler_stageb.hako を直接実行してね。" >&2
echo "[stageb/hint] 代表的な原因と掘り先は docs/private/roadmap/phases/phase-20.33/DEBUG.md を参照してね (Stage1UsingResolverBox / FuncScannerBox 周り)。" >&2
fi
# Using resolver issues (StageB side)
local using_lines
using_lines=$(printf '%s\n' "$raw" | grep -E "\[using\] not found: " | sort -u || true)
if [ -n "$using_lines" ]; then
echo "[stageb/diagnose] Missing using modules detected (StageB):" >&2
echo "$using_lines" >&2
echo "[stageb/hint] nyash.toml の [modules] に該当 Box を追加するか、HAKO_STAGEB_MODULES_LIST / HAKO_STAGEB_APPLY_USINGS を確認してね。" >&2
fi
}
set +e
if [ "${HAKO_SELFHOST_TRACE:-0}" = "1" ]; then
code_len=$(wc -c < "$CODE_TMP" | tr -d ' ')
echo "[emit:trace] Stage-B: Starting parse of input (${code_len} chars)..." >&2
fi
# Run Stage-B with temp file (avoid subshell CODE variable expansion)
PROG_JSON_RAW=$(cd "$ROOT" && \
NYASH_JSON_ONLY=1 NYASH_DISABLE_NY_COMPILER=1 HAKO_DISABLE_NY_COMPILER=1 \
HAKO_STAGEB_FUNC_SCAN="${HAKO_STAGEB_FUNC_SCAN:-1}" \
NYASH_PARSER_STAGE3=1 HAKO_PARSER_STAGE3=1 NYASH_PARSER_ALLOW_SEMICOLON=1 \
NYASH_ENABLE_USING=${NYASH_ENABLE_USING:-1} HAKO_ENABLE_USING=${HAKO_ENABLE_USING:-1} \
"$NYASH_BIN" --backend vm "$ROOT/lang/src/compiler/entry/compiler_stageb.hako" -- --source "$(cat "$CODE_TMP")" 2>&1)
rc=$?
if [ "${HAKO_SELFHOST_TRACE:-0}" = "1" ]; then
echo "[emit:trace] Stage-B: Raw output length=${#PROG_JSON_RAW} chars, rc=$rc" >&2
fi
# Extract Program JSON from raw output
PROG_JSON_OUT=$(extract_program_json "$PROG_JSON_RAW" 2>/dev/null || true)
extract_rc=$?
if [ "${HAKO_SELFHOST_TRACE:-0}" = "1" ]; then
if [ $extract_rc -eq 0 ] && [ -n "$PROG_JSON_OUT" ]; then
echo "[emit:trace] Stage-B: SUCCESS - Generated Program(JSON) (${#PROG_JSON_OUT} chars)" >&2
# Show first 200 chars for validation
prog_head=$(printf '%s' "$PROG_JSON_OUT" | head -c 200)
echo "[emit:trace] Stage-B: prog_json_head: $prog_head..." >&2
else
echo "[emit:trace] Stage-B: FAILED - extract_rc=$extract_rc, output_len=${#PROG_JSON_OUT}" >&2
if [ -n "$PROG_JSON_RAW" ]; then
echo "[emit:trace] Stage-B: Raw output first 200 chars:" >&2
printf '%s' "$PROG_JSON_RAW" | head -c 200 >&2
echo "" >&2
fi
fi
fi
# Update rc to reflect extraction result
if [ $extract_rc -ne 0 ] || [ -z "$PROG_JSON_OUT" ]; then
rc=1
fi
set -e
# If Stage-B fails, skip to direct MIR emit paths (provider/legacy)
if [ $rc -ne 0 ] || [ -z "$PROG_JSON_OUT" ]; then
# Diagnose common StageB errors before falling back
diagnose_stageb_failure "$PROG_JSON_RAW"
# Stage-B not available - fall back to legacy CLI path directly
# Skip the intermediate Program(JSON) step and emit MIR directly
if HAKO_STAGEB_FUNC_SCAN="${HAKO_STAGEB_FUNC_SCAN:-1}" \
HAKO_MIR_BUILDER_FUNCS="${HAKO_MIR_BUILDER_FUNCS:-}" \
HAKO_MIR_BUILDER_CALL_RESOLVE="${HAKO_MIR_BUILDER_CALL_RESOLVE:-}" \
NYASH_JSON_SCHEMA_V1=${NYASH_JSON_SCHEMA_V1:-1} \
NYASH_MIR_UNIFIED_CALL=${NYASH_MIR_UNIFIED_CALL:-1} \
"$NYASH_BIN" --emit-mir-json "$OUT" "$IN" >/dev/null 2>&1; then
echo "[OK] MIR JSON written (direct-emit): $OUT"
exit 0
fi
echo "[FAIL] Stage-B and direct MIR emit both failed" >&2
exit 1
fi
# Quick validation for Program(JSON v0)
if ! printf '%s' "$PROG_JSON_OUT" | grep -q '"kind"\s*:\s*"Program"'; then
# Invalid Program JSON - fall back to direct emit事前に簡単な診断を出す
diagnose_stageb_failure "$PROG_JSON_RAW"
if HAKO_STAGEB_FUNC_SCAN="${HAKO_STAGEB_FUNC_SCAN:-1}" \
HAKO_MIR_BUILDER_FUNCS="${HAKO_MIR_BUILDER_FUNCS:-}" \
HAKO_MIR_BUILDER_CALL_RESOLVE="${HAKO_MIR_BUILDER_CALL_RESOLVE:-}" \
NYASH_JSON_SCHEMA_V1=${NYASH_JSON_SCHEMA_V1:-1} \
NYASH_MIR_UNIFIED_CALL=${NYASH_MIR_UNIFIED_CALL:-1} \
"$NYASH_BIN" --emit-mir-json "$OUT" "$IN" >/dev/null 2>&1; then
echo "[OK] MIR JSON written (direct-emit-fallback): $OUT"
exit 0
fi
echo "[FAIL] StageB output invalid and direct emit failed" >&2
exit 1
fi
# 2) Convert Program(JSON v0) → MIR(JSON)
# Prefer selfhost builder first when explicitly requested; otherwise use delegate (GateC) for stability.
try_selfhost_builder() {
local prog_json="$1" out_path="$2"
# FORCE=1 direct assembly shortcut (dev toggle, bypasses using resolution)
if [ "${HAKO_MIR_BUILDER_LOOP_FORCE_JSONFRAG:-0}" = "1" ]; then
# Extract limit from Program(JSON) using grep/awk
local limit=$(printf '%s' "$prog_json" | grep -o '"type":"Int","value":[0-9]*' | head -1 | grep -o '[0-9]*$' || echo "10")
# Generate minimal while-form MIR(JSON) directly (executable semantics)
# PHI incoming format: [[value_register, predecessor_block_id], ...]
cat > "$out_path" <<'MIRJSON'
{
"functions": [{
"name": "main",
"params": [],
"locals": [],
"blocks": [
{
"id": 0,
"instructions": [
{"op": "const", "dst": 1, "value": {"type": "i64", "value": 0}},
{"op": "const", "dst": 2, "value": {"type": "i64", "value": LIMIT_PLACEHOLDER}},
{"op": "jump", "target": 1}
]
},
{
"id": 1,
"instructions": [
{"op": "phi", "dst": 6, "incoming": [[2, 0], [6, 2]]},
{"op": "phi", "dst": 3, "incoming": [[1, 0], [5, 2]]},
{"op": "compare", "operation": "<", "lhs": 3, "rhs": 6, "dst": 4},
{"op": "branch", "cond": 4, "then": 2, "else": 3}
]
},
{
"id": 2,
"instructions": [
{"op": "const", "dst": 10, "value": {"type": "i64", "value": 1}},
{"op": "binop", "operation": "+", "lhs": 3, "rhs": 10, "dst": 5},
{"op": "jump", "target": 1}
]
},
{
"id": 3,
"instructions": [
{"op": "ret", "value": 3}
]
}
]
}]
}
MIRJSON
# Provider-first delegate: call env.mirbuilder.emit(prog_json) and capture v1 JSON
try_provider_emit() {
local prog_json="$1" out_path="$2"
local tmp_hako; tmp_hako=$(mktemp --suffix .hako)
cat >"$tmp_hako" <<'HCODE'
using "hako.mir.builder.internal.jsonfrag_normalizer" as NormBox
static box Main { method main(args) {
local p = env.get("HAKO_BUILDER_PROGRAM_JSON")
if p == null { print("[provider/emit:nojson]"); return 1 }
local a = new ArrayBox(); a.push(p)
local out = hostbridge.extern_invoke("env.mirbuilder", "emit", a)
// Optional normalization (dev): apply JsonFrag normalizer/purifier to provider output
{
local nv = env.get("HAKO_MIR_NORMALIZE_PROVIDER")
if nv != null && ("" + nv) == "1" {
local out_s = "" + out
out = NormBox.normalize_all(out_s)
}
}
print("[provider/emit:ok]")
print("[MIR_OUT_BEGIN]")
print("" + out)
print("[MIR_OUT_END]")
return 0
} }
HCODE
local tmp_stdout; tmp_stdout=$(mktemp)
trap 'rm -f "$tmp_hako" "$tmp_stdout" || true' RETURN
set +e
(cd "$ROOT" && \
NYASH_DISABLE_PLUGINS=1 NYASH_FILEBOX_MODE="core-ro" \
NYASH_PARSER_STAGE3=1 HAKO_PARSER_STAGE3=1 NYASH_PARSER_ALLOW_SEMICOLON=1 \
HAKO_BUILDER_PROGRAM_JSON="$prog_json" \
"$NYASH_BIN" --backend vm "$tmp_hako" 2>&1 | tee "$tmp_stdout" >/dev/null)
local rc=$?
set -e
if [ $rc -ne 0 ] || ! grep -q "\[provider/emit:ok\]" "$tmp_stdout"; then
return 1
fi
local mir
mir=$(awk '/\[MIR_OUT_BEGIN\]/{flag=1;next}/\[MIR_OUT_END\]/{flag=0}flag' "$tmp_stdout")
if [ -z "$mir" ]; then return 1; fi
# Write raw MIR JSON first
printf '%s' "$mir" > "$out_path"
# Optional AOT prep stage (run_json; no FileBox)
# Trigger when HAKO_APPLY_AOT_PREP=1 or when fast/hoist/collections_hot are requested.
if [ "${HAKO_APPLY_AOT_PREP:-0}" = "1" ] || [ "${NYASH_AOT_COLLECTIONS_HOT:-0}" = "1" ] || [ "${NYASH_LLVM_FAST:-0}" = "1" ] || [ "${NYASH_MIR_LOOP_HOIST:-0}" = "1" ]; then
if [ "${HAKO_SELFHOST_TRACE:-0}" = "1" ]; then
echo "[provider/emit:trace] Applying AotPrep.run_json to MIR JSON..." >&2
fi
_prep_hako=$(mktemp --suffix .hako)
cat > "$_prep_hako" <<'HAKO'
using selfhost.llvm.ir.aot_prep as AotPrepBox
static box Main { method main(args) {
local src = env.get("HAKO_PREP_INPUT")
if src == null || src == "" { print("[prep:skip:empty]"); return 0 }
local out = AotPrepBox.run_json(src)
print("[PREP_OUT_BEGIN]")
print(out)
print("[PREP_OUT_END]")
return 0
} }
HAKO
# Read MIR JSON and pass via env; capture between markers
_prep_stdout=$(mktemp)
_prep_stderr=$(mktemp)
set +e
HAKO_PREP_INPUT="$(cat "$out_path")" \
NYASH_FILEBOX_MODE=core-ro \
# AotPrep(run_json) は自己ホストVMで JSON を1回走査するだけなので、既定ではステップ上限なし0で実行する。
# 必要に応じて HAKO_VM_MAX_STEPS / NYASH_VM_MAX_STEPS を明示上書き可能。
HAKO_VM_MAX_STEPS="${HAKO_VM_MAX_STEPS:-0}" NYASH_VM_MAX_STEPS="${NYASH_VM_MAX_STEPS:-0}" \
NYASH_PARSER_STAGE3=1 HAKO_PARSER_STAGE3=1 NYASH_PARSER_ALLOW_SEMICOLON=1 \
NYASH_ENABLE_USING=1 HAKO_ENABLE_USING=1 HAKO_USING_RESOLVER_FIRST=1 \
NYASH_AOT_COLLECTIONS_HOT=${NYASH_AOT_COLLECTIONS_HOT:-0} NYASH_LLVM_FAST=${NYASH_LLVM_FAST:-0} NYASH_MIR_LOOP_HOIST=${NYASH_MIR_LOOP_HOIST:-0} NYASH_AOT_MAP_KEY_MODE=${NYASH_AOT_MAP_KEY_MODE:-auto} \
NYASH_AOT_NUMERIC_CORE=${NYASH_AOT_NUMERIC_CORE:-0} NYASH_AOT_NUMERIC_CORE_TRACE=${NYASH_AOT_NUMERIC_CORE_TRACE:-0} \
NYASH_JSON_ONLY=${NYASH_JSON_ONLY:-1} \
"$NYASH_BIN" --backend vm "$_prep_hako" >"$_prep_stdout" 2>"$_prep_stderr"
_rc=$?
set -e
if [ "${HAKO_SELFHOST_TRACE:-0}" = "1" ]; then
echo "[provider/emit:trace] AotPrep runner rc=$_rc" >&2
fi
if [ $_rc -eq 0 ] && grep -q "\[PREP_OUT_BEGIN\]" "$_prep_stdout" && grep -q "\[PREP_OUT_END\]" "$_prep_stdout"; then
awk '/\[PREP_OUT_BEGIN\]/{flag=1;next}/\[PREP_OUT_END\]/{flag=0}flag' "$_prep_stdout" > "$out_path"
[ "${HAKO_SELFHOST_TRACE:-0}" = "1" ] && echo "[provider/emit:trace] AotPrep applied (run_json)" >&2
# Optional: surface CollectionsHot trace lines for diagnostics when requested
if [ "${NYASH_AOT_CH_TRACE:-0}" = "1" ]; then
if command -v rg >/dev/null 2>&1; then
rg -n '^\[aot/collections_hot\]' "$_prep_stdout" >&2 || true
else
grep '^\[aot/collections_hot\]' "$_prep_stdout" >&2 || true
fi
fi
# Optional: surface NumericCore trace lines for diagnostics when requested
if [ "${NYASH_AOT_NUMERIC_CORE_TRACE:-0}" = "1" ]; then
if command -v rg >/dev/null 2>&1; then
rg -n '^\[aot/numeric_core\]' "$_prep_stdout" >&2 || true
else
grep '^\[aot/numeric_core\]' "$_prep_stdout" >&2 || true
fi
fi
else
if [ "${HAKO_SELFHOST_TRACE:-0}" = "1" ]; then
echo "[provider/emit:trace] AotPrep skipped or failed (run_json) rc=$_rc" >&2
if [ -s "$_prep_stderr" ]; then
echo "[provider/emit:trace] AotPrep stderr (tail):" >&2
tail -n 60 "$_prep_stderr" >&2 || true
fi
fi
fi
rm -f "$_prep_hako" "$_prep_stdout" "$_prep_stderr" 2>/dev/null || true
fi
echo "[OK] MIR JSON written (delegate:provider): $out_path"
return 0
}
# Replace LIMIT_PLACEHOLDER with actual limit
sed -i "s/LIMIT_PLACEHOLDER/$limit/g" "$out_path"
if [ "${HAKO_SELFHOST_TRACE:-0}" = "1" ]; then
echo "[selfhost-direct:ok] Direct MIR assembly (FORCE=1), limit=$limit" >&2
fi
return 0
fi
# Builder box selection (default: hako.mir.builder)
local builder_box="${HAKO_MIR_BUILDER_BOX:-hako.mir.builder}"
local tmp_hako; tmp_hako=$(mktemp --suffix .hako)
if [ "$builder_box" = "hako.mir.builder.min" ]; then
cat >"$tmp_hako" <<'HCODE'
using "hako.mir.builder.internal.runner_min" as BuilderRunnerMinBox
static box Main { method main(args) {
local prog_json = env.get("HAKO_BUILDER_PROGRAM_JSON")
if prog_json == null { print("[builder/selfhost-first:fail:nojson]"); return 1 }
local mir_out = BuilderRunnerMinBox.run(prog_json)
if mir_out == null { print("[builder/selfhost-first:fail:emit]"); return 1 }
print("[builder/selfhost-first:ok]")
print("[MIR_OUT_BEGIN]")
print("" + mir_out)
print("[MIR_OUT_END]")
return 0
} }
HCODE
else
cat >"$tmp_hako" <<'HCODE'
using "__BUILDER_BOX__" as MirBuilderBox
static box Main { method main(args) {
local prog_json = env.get("HAKO_BUILDER_PROGRAM_JSON")
if prog_json == null { print("[builder/selfhost-first:fail:nojson]"); return 1 }
local mir_out = MirBuilderBox.emit_from_program_json_v0(prog_json, null)
if mir_out == null { print("[builder/selfhost-first:fail:emit]"); return 1 }
print("[builder/selfhost-first:ok]")
print("[MIR_OUT_BEGIN]")
print("" + mir_out)
print("[MIR_OUT_END]")
return 0
} }
HCODE
sed -i "s|__BUILDER_BOX__|$builder_box|g" "$tmp_hako"
fi
local tmp_stdout; tmp_stdout=$(mktemp)
trap 'rm -f "$tmp_hako" "$tmp_stdout" || true' RETURN
# Trace mode: analyze Program(JSON) before passing to builder
if [ "${HAKO_SELFHOST_TRACE:-0}" = "1" ]; then
local prog_len=${#prog_json}
local loop_count=$(printf '%s' "$prog_json" | grep -o '"type":"Loop"' 2>/dev/null | wc -l | tr -d ' \n')
local cmp_count=$(printf '%s' "$prog_json" | grep -o '"type":"Compare"' 2>/dev/null | wc -l | tr -d ' \n')
local array_count=$(printf '%s' "$prog_json" | grep -o '"type":"MethodCall"' 2>/dev/null | wc -l | tr -d ' \n')
loop_count=${loop_count:-0}
cmp_count=${cmp_count:-0}
array_count=${array_count:-0}
local cwd="$(pwd)"
local toml_status="absent"
if [ -f "$ROOT/nyash.toml" ]; then
toml_status="present"
fi
echo "[builder/selfhost-first:trace] builder_box=$builder_box prog_json_len=$prog_len tokens=Loop:$loop_count,Compare:$cmp_count,MethodCall:$array_count cwd=$cwd nyash.toml=$toml_status" >&2
# Show first 200 chars of Program(JSON) for structural validation
local prog_head=$(printf '%s' "$prog_json" | head -c 200)
echo "[builder/selfhost-first:trace] prog_json_head: $prog_head..." >&2
fi
set +e
# Run from repo root to ensure nyash.toml is available for using resolution
# Capture both stdout and stderr (2>&1) instead of discarding stderr
local tmp_stderr; tmp_stderr=$(mktemp)
trap 'rm -f "$tmp_hako" "$tmp_stdout" "$tmp_stderr" || true' RETURN
if [ "${HAKO_SELFHOST_TRACE:-0}" = "1" ]; then
echo "[builder/selfhost-first:exec] Starting builder execution..." >&2
fi
(cd "$ROOT" && \
HAKO_SELFHOST_NO_DELEGATE=1 \
HAKO_MIR_BUILDER_INTERNAL=1 HAKO_MIR_BUILDER_REGISTRY=1 \
HAKO_MIR_BUILDER_TRACE="${HAKO_SELFHOST_TRACE:-}" \
HAKO_MIR_BUILDER_LOOP_JSONFRAG="${HAKO_MIR_BUILDER_LOOP_JSONFRAG:-}" \
HAKO_MIR_BUILDER_LOOP_FORCE_JSONFRAG="${HAKO_MIR_BUILDER_LOOP_FORCE_JSONFRAG:-}" \
HAKO_MIR_BUILDER_JSONFRAG_NORMALIZE="${HAKO_MIR_BUILDER_JSONFRAG_NORMALIZE:-}" \
HAKO_MIR_BUILDER_JSONFRAG_PURIFY="${HAKO_MIR_BUILDER_JSONFRAG_PURIFY:-}" \
HAKO_MIR_BUILDER_METHODIZE="${HAKO_MIR_BUILDER_METHODIZE:-}" \
HAKO_MIR_BUILDER_NORMALIZE_TAG="${HAKO_MIR_BUILDER_NORMALIZE_TAG:-}" \
HAKO_MIR_BUILDER_DEBUG="${HAKO_MIR_BUILDER_DEBUG:-}" \
NYASH_DISABLE_PLUGINS="${NYASH_DISABLE_PLUGINS:-0}" NYASH_FILEBOX_MODE="core-ro" HAKO_PROVIDER_POLICY="safe-core-first" \
NYASH_ENABLE_USING=1 HAKO_ENABLE_USING=1 HAKO_USING_RESOLVER_FIRST=1 \
NYASH_PARSER_STAGE3=1 HAKO_PARSER_STAGE3=1 NYASH_PARSER_ALLOW_SEMICOLON=1 \
NYASH_USE_NY_COMPILER=0 HAKO_USE_NY_COMPILER=0 NYASH_DISABLE_NY_COMPILER=1 HAKO_DISABLE_NY_COMPILER=1 \
NYASH_MACRO_DISABLE=1 HAKO_MACRO_DISABLE=1 \
# Builder 実行時は Step budget を既定で「無制限0」にしておく大きな Program(JSON) でも上限で落とさない)。
# 無限ループ疑いの診断時は HAKO_VM_MAX_STEPS/NYASH_VM_MAX_STEPS を明示上書きして使う想定。
HAKO_VM_MAX_STEPS="${HAKO_VM_MAX_STEPS:-0}" NYASH_VM_MAX_STEPS="${NYASH_VM_MAX_STEPS:-0}" \
HAKO_BUILDER_PROGRAM_JSON="$prog_json" \
NYASH_VM_DUMP_MERGED_HAKO="${NYASH_VM_DUMP_MERGED_HAKO:-${HAKO_SELFHOST_DUMP_MERGED_HAKO:-0}}" \
NYASH_VM_DUMP_MERGED_HAKO_PATH="${NYASH_VM_DUMP_MERGED_HAKO_PATH:-${HAKO_SELFHOST_DUMP_MERGED_HAKO_PATH:-}}" \
"$NYASH_BIN" --backend vm "$tmp_hako" 2>"$tmp_stderr" | tee "$tmp_stdout" >/dev/null)
local rc=$?
set -e
if [ "${HAKO_SELFHOST_TRACE:-0}" = "1" ]; then
echo "[builder/selfhost-first:exec] Builder execution completed with rc=$rc" >&2
fi
# Enhanced failure diagnostics with comprehensive logging
if [ $rc -ne 0 ]; then
if [ "${HAKO_SELFHOST_NO_DELEGATE:-0}" = "1" ] || [ "${HAKO_SELFHOST_TRACE:-0}" = "1" ]; then
echo "[builder/selfhost-first:fail:child:rc=$rc]" >&2
echo "[builder/selfhost-first:fail:detail] First 20 lines of output:" >&2
head -n 20 "$tmp_stdout" >&2 || true
echo "[builder/selfhost-first:fail:detail] Last 80 lines of output:" >&2
tail -n 80 "$tmp_stdout" >&2 || true
if [ -s "$tmp_stderr" ]; then
echo "[builder/selfhost-first:fail:stderr] First 20 lines:" >&2
head -n 20 "$tmp_stderr" >&2 || true
echo "[builder/selfhost-first:fail:stderr] Last 40 lines:" >&2
tail -n 40 "$tmp_stderr" >&2 || true
# Pretty diagnostics for missing using modules
USING_MISSING=$(cat "$tmp_stdout" "$tmp_stderr" 2>/dev/null | grep -Eo "\[using\] not found: '[^']+'" | sort -u || true)
if [ -n "$USING_MISSING" ]; then
echo "[builder/selfhost-first:diagnose] Missing using modules detected:" >&2
echo "$USING_MISSING" >&2
echo "[builder/selfhost-first:diagnose] Hint: enable resolver-first (HAKO_USING_RESOLVER_FIRST=1) and ensure nyash.toml maps these modules." >&2
echo "[builder/selfhost-first:diagnose] Example entries (nyash.toml [modules]):" >&2
echo " \"selfhost.shared.json.core.json_canonical\" = \"lang/src/shared/json/json_canonical_box.hako\"" >&2
echo " \"selfhost.shared.common.common_imports\" = \"lang/src/shared/common/common_imports.hako\"" >&2
fi
fi
fi
# Don't return immediately - check for fallback below
fi
if [ $rc -eq 0 ] && ! grep -q "\[builder/selfhost-first:ok\]" "$tmp_stdout"; then
if [ "${HAKO_SELFHOST_NO_DELEGATE:-0}" = "1" ] || [ "${HAKO_SELFHOST_TRACE:-0}" = "1" ]; then
echo "[builder/selfhost-first:fail:no-ok-marker]" >&2
echo "[builder/selfhost-first:fail:detail] First 20 lines of output:" >&2
head -n 20 "$tmp_stdout" >&2 || true
echo "[builder/selfhost-first:fail:detail] Last 80 lines of output:" >&2
tail -n 80 "$tmp_stdout" >&2 || true
fi
rc=1
fi
# Try min builder fallback if enabled and initial builder failed
if [ "${HAKO_SELFHOST_TRY_MIN:-0}" = "1" ] && [ $rc -ne 0 ] && [ "$builder_box" != "hako.mir.builder.min" ]; then
if [ "${HAKO_SELFHOST_NO_DELEGATE:-0}" = "1" ] || [ "${HAKO_SELFHOST_TRACE:-0}" = "1" ]; then
echo "[builder/selfhost-first:trying-min-fallback]" >&2
fi
# Retry with min builder
HAKO_MIR_BUILDER_BOX="hako.mir.builder.min" try_selfhost_builder "$prog_json" "$out_path"
local fallback_rc=$?
if [ "${HAKO_SELFHOST_TRACE:-0}" = "1" ]; then
echo "[builder/selfhost-first:min-fallback:rc=$fallback_rc]" >&2
fi
return $fallback_rc
fi
# Return original failure if no fallback or if fallback not triggered
if [ $rc -ne 0 ]; then
return 1
fi
local mir
mir=$(awk '/\[MIR_OUT_BEGIN\]/{flag=1;next}/\[MIR_OUT_END\]/{flag=0}flag' "$tmp_stdout")
if [ -z "$mir" ]; then return 1; fi
# Surface key builder tags (multi-carrier detectionなど) when trace is enabled,
# so that v2 smokes can reliably grep them without生 stdout 全量を流す必要がない。
if [ "${HAKO_SELFHOST_TRACE:-0}" = "1" ]; then
if grep -q "\[mirbuilder/internal/loop:multi_carrier:" "$tmp_stdout" 2>/dev/null; then
echo "[builder/selfhost-first:trace:multi_carrier] detected" >&2
grep "\[mirbuilder/internal/loop:multi_carrier:" "$tmp_stdout" >&2 || true
fi
if grep -q "\[funcs/basic:loop.multi_carrier\]" "$tmp_stdout" 2>/dev/null; then
echo "[builder/selfhost-first:trace:funcs:loop.multi_carrier]" >&2
grep "\[funcs/basic:loop.multi_carrier\]" "$tmp_stdout" >&2 || true
fi
fi
printf '%s' "$mir" > "$out_path"
echo "[OK] MIR JSON written (selfhost-first): $out_path"
return 0
}
# Provider-first delegate: call env.mirbuilder.emit(prog_json) and capture v1 JSON
try_provider_emit() {
local prog_json="$1" out_path="$2"
local tmp_hako; tmp_hako=$(mktemp --suffix .hako)
cat >"$tmp_hako" <<'HCODE'
static box Main { method main(args) {
local p = env.get("HAKO_BUILDER_PROGRAM_JSON")
if p == null { print("[provider/emit:nojson]"); return 1 }
local a = new ArrayBox(); a.push(p)
local out = hostbridge.extern_invoke("env.mirbuilder", "emit", a)
print("[provider/emit:ok]")
print("[MIR_OUT_BEGIN]")
print("" + out)
print("[MIR_OUT_END]")
return 0
} }
HCODE
local tmp_stdout; tmp_stdout=$(mktemp)
local tmp_stderr; tmp_stderr=$(mktemp)
trap 'rm -f "$tmp_hako" "$tmp_stdout" "$tmp_stderr" || true' RETURN
if [ "${HAKO_SELFHOST_TRACE:-0}" = "1" ]; then
echo "[provider/emit:trace] Starting provider emit..." >&2
fi
set +e
(cd "$ROOT" && \
NYASH_DISABLE_PLUGINS="${NYASH_DISABLE_PLUGINS:-0}" NYASH_FILEBOX_MODE="core-ro" \
NYASH_PARSER_STAGE3=1 HAKO_PARSER_STAGE3=1 NYASH_PARSER_ALLOW_SEMICOLON=1 \
HAKO_BUILDER_PROGRAM_JSON="$prog_json" \
NYASH_ENABLE_USING=1 HAKO_ENABLE_USING=1 HAKO_USING_RESOLVER_FIRST=1 \
"$NYASH_BIN" --backend vm "$tmp_hako" 2>"$tmp_stderr" | tee "$tmp_stdout" >/dev/null)
local rc=$?
set -e
if [ "${HAKO_SELFHOST_TRACE:-0}" = "1" ]; then
echo "[provider/emit:trace] Provider execution completed with rc=$rc" >&2
fi
if [ $rc -ne 0 ] || ! grep -q "\[provider/emit:ok\]" "$tmp_stdout"; then
if [ "${HAKO_SELFHOST_TRACE:-0}" = "1" ]; then
echo "[provider/emit:fail:rc=$rc]" >&2
if [ -s "$tmp_stderr" ]; then
echo "[provider/emit:fail:stderr] First 20 lines:" >&2
head -n 20 "$tmp_stderr" >&2 || true
echo "[provider/emit:fail:stderr] Last 40 lines:" >&2
tail -n 40 "$tmp_stderr" >&2 || true
fi
echo "[provider/emit:fail:stdout] Last 40 lines:" >&2
tail -n 40 "$tmp_stdout" >&2 || true
USING_MISSING=$(cat "$tmp_stdout" "$tmp_stderr" 2>/dev/null | grep -Eo "\[using\] not found: '[^']+'" | sort -u || true)
if [ -n "$USING_MISSING" ]; then
echo "[provider/emit:diagnose] Missing using modules detected:" >&2
echo "$USING_MISSING" >&2
echo "[provider/emit:diagnose] Hint: enable resolver-first (HAKO_USING_RESOLVER_FIRST=1) and ensure nyash.toml maps these modules." >&2
fi
fi
return 1
fi
local mir
mir=$(awk '/\[MIR_OUT_BEGIN\]/{flag=1;next}/\[MIR_OUT_END\]/{flag=0}flag' "$tmp_stdout")
if [ -z "$mir" ]; then
if [ "${HAKO_SELFHOST_TRACE:-0}" = "1" ]; then
echo "[provider/emit:fail:no-mir-output]" >&2
fi
return 1
fi
# Write raw MIR JSON first
printf '%s' "$mir" > "$out_path"
# Apply AotPrep via run_json when enabled
if [ "${HAKO_APPLY_AOT_PREP:-0}" = "1" ] || [ "${NYASH_AOT_COLLECTIONS_HOT:-0}" = "1" ] || [ "${NYASH_LLVM_FAST:-0}" = "1" ] || [ "${NYASH_MIR_LOOP_HOIST:-0}" = "1" ]; then
[ "${HAKO_SELFHOST_TRACE:-0}" = "1" ] && echo "[provider/emit:trace] Applying AotPrep(run_json)..." >&2
local aot_runner; aot_runner=$(mktemp --suffix=.hako)
cat > "$aot_runner" << 'EOF'
using selfhost.llvm.ir.aot_prep as AotPrepBox
static box Main { method main(args) {
local src = env.get("HAKO_PREP_INPUT")
if src == null || src == "" { print("[prep:skip:empty]"); return 0 }
local out = AotPrepBox.run_json(src)
print("[PREP_OUT_BEGIN]")
print(out)
print("[PREP_OUT_END]")
return 0
} }
EOF
local aot_rc=0
local prep_stdout; prep_stdout=$(mktemp)
local prep_stderr; prep_stderr=$(mktemp)
set +e
HAKO_PREP_INPUT="$(cat "$out_path")" \
NYASH_FILEBOX_MODE=core-ro \
NYASH_PARSER_STAGE3=1 HAKO_PARSER_STAGE3=1 NYASH_PARSER_ALLOW_SEMICOLON=1 \
NYASH_ENABLE_USING=1 HAKO_ENABLE_USING=1 HAKO_USING_RESOLVER_FIRST=1 \
NYASH_AOT_COLLECTIONS_HOT=${NYASH_AOT_COLLECTIONS_HOT:-0} NYASH_LLVM_FAST=${NYASH_LLVM_FAST:-0} NYASH_MIR_LOOP_HOIST=${NYASH_MIR_LOOP_HOIST:-0} NYASH_AOT_MAP_KEY_MODE=${NYASH_AOT_MAP_KEY_MODE:-auto} \
NYASH_AOT_NUMERIC_CORE=${NYASH_AOT_NUMERIC_CORE:-0} NYASH_AOT_NUMERIC_CORE_TRACE=${NYASH_AOT_NUMERIC_CORE_TRACE:-0} \
HAKO_VM_MAX_STEPS="${HAKO_VM_MAX_STEPS:-0}" NYASH_VM_MAX_STEPS="${NYASH_VM_MAX_STEPS:-0}" \
"$NYASH_BIN" --backend vm "$aot_runner" >"$prep_stdout" 2>"$prep_stderr"
aot_rc=$?
set -e
if [ $aot_rc -eq 0 ] && grep -q "\[PREP_OUT_BEGIN\]" "$prep_stdout" && grep -q "\[PREP_OUT_END\]" "$prep_stdout"; then
awk '/\[PREP_OUT_BEGIN\]/{flag=1;next}/\[PREP_OUT_END\]/{flag=0}flag' "$prep_stdout" > "$out_path"
[ "${HAKO_SELFHOST_TRACE:-0}" = "1" ] && echo "[prep:ok] AotPrep applied (run_json)" >&2
# Surface numeric_core trace lines when requestedprovider経路でも [aot/numeric_core] を見えるようにする)
if [ "${NYASH_AOT_NUMERIC_CORE_TRACE:-0}" = "1" ]; then
if command -v rg >/devnull 2>&1; then
rg -n '^\[aot/numeric_core\]' "$prep_stdout" >&2 || true
else
grep '^\[aot/numeric_core\]' "$prep_stdout" >&2 || true
fi
fi
# Optional strict post-check: after AotPrep(run_json) the MIR JSON is expected
# to have MatI64.mul_naive lowered to Call when NYASH_AOT_NUMERIC_CORE=1.
if [ "${NYASH_AOT_NUMERIC_CORE_STRICT:-0}" = "1" ]; then
if rg -q '"op":"boxcall".*"method":"mul_naive"' "$out_path"; then
echo "[prep/numeric_core/strict] NYASH_AOT_NUMERIC_CORE_STRICT=1 but boxcall(\"mul_naive\") remains after AotPrep.run_json; inspect numeric_core.hako patterns." >&2
return 1
fi
fi
else
if [ "${HAKO_SELFHOST_TRACE:-0}" = "1" ]; then
echo "[prep:warn] AotPrep failed (rc=$aot_rc), using original MIR" >&2
if [ -s "$prep_stderr" ]; then
echo "[prep:stderr:tail]" >&2
tail -n 60 "$prep_stderr" >&2 || true
fi
fi
fi
rm -f "$aot_runner" "$prep_stdout" "$prep_stderr" 2>/dev/null || true
fi
echo "[OK] MIR JSON written (delegate:provider): $out_path"
return 0
}
# When forcing JSONFrag loop, default-enable normalize+purify (dev-only, no default changes)
if [ "${HAKO_MIR_BUILDER_LOOP_FORCE_JSONFRAG:-0}" = "1" ]; then
export HAKO_MIR_BUILDER_JSONFRAG_NORMALIZE="${HAKO_MIR_BUILDER_JSONFRAG_NORMALIZE:-1}"
export HAKO_MIR_BUILDER_JSONFRAG_PURIFY="${HAKO_MIR_BUILDER_JSONFRAG_PURIFY:-1}"
fi
if [ "${HAKO_SELFHOST_BUILDER_FIRST:-0}" = "1" ]; then
# Prefer Stage1 CLI (hakorune) when available to avoid Stage-3 parser issues
stage1_cli_bin="${HAKO_STAGE1_CLI:-$ROOT/lang/bin/hakorune}"
if [ -x "$stage1_cli_bin" ]; then
prog_tmp=$(mktemp --suffix .json)
printf '%s' "$PROG_JSON_OUT" > "$prog_tmp"
if "$stage1_cli_bin" emit mir-json --from-program-json "$prog_tmp" -o "$OUT" --quiet >/dev/null 2>&1 && [ -s "$OUT" ]; then
rm -f "$prog_tmp" || true
echo "[OK] MIR JSON written (selfhost-first:stage1-cli): $OUT"
exit 0
fi
rm -f "$prog_tmp" || true
if [ "${HAKO_SELFHOST_TRACE:-0}" = "1" ]; then
echo "[selfhost-first:stage1-cli] failed (binary: $stage1_cli_bin), falling back to legacy builder" >&2
fi
fi
if try_selfhost_builder "$PROG_JSON_OUT" "$OUT"; then
exit 0
fi
# Phase 25.1b: selfhost-first モードでは MirBuilder のフォールバックは禁止。
# selfhost builder が失敗した時点で非0終了とし、provider 経路には委譲しない。
echo "[FAIL] selfhost-first failed (MirBuilder) and delegate is disabled in HAKO_SELFHOST_BUILDER_FIRST=1 mode" >&2
exit 1
fi
# Dev: force JsonFrag minimal loop even on provider-first path
if [ "${HAKO_MIR_BUILDER_LOOP_FORCE_JSONFRAG:-0}" = "1" ]; then
# Extract limit from Program(JSON) or source file
limit=$(printf '%s' "$PROG_JSON_OUT" | grep -o '"type":"Int","value":[0-9]*' | head -1 | grep -o '[0-9]*$' || cat "$CODE_TMP" | grep -o '[0-9]\+' | head -1 || echo "10")
echo "[emit/jsonfrag] provider-force min-loop MIR (dev-only)" >&2
cat > "$OUT" <<MIRJSON
{
"functions": [{
"name": "main",
"params": [],
"locals": [],
"blocks": [
{ "id": 0, "instructions": [
{"op":"const","dst":1,"value":{"type":"i64","value":0}},
{"op":"const","dst":2,"value":{"type":"i64","value": ${limit} }},
{"op":"jump","target":1}
]},
{ "id": 1, "instructions": [
{"op":"phi","dst":6,"incoming":[[2,0],[6,2]]},
{"op":"phi","dst":3,"incoming":[[1,0],[5,2]]},
{"op":"compare","operation":"<","lhs":3,"rhs":6,"dst":4},
{"op":"branch","cond":4,"then":2,"else":3}
]},
{ "id": 2, "instructions": [
{"op":"const","dst":10,"value":{"type":"i64","value":1}},
{"op":"binop","operation":"+","lhs":3,"rhs":10,"dst":5},
{"op":"jump","target":1}
]},
{ "id": 3, "instructions": [
{"op":"ret","value":3}
]}
]
}]
}
MIRJSON
echo "[OK] MIR JSON written (provider-force-jsonfrag): $OUT"
exit 0
fi
tmp_prog="/tmp/hako_emit_prog_$$.json"
trap 'rm -f "$tmp_prog" || true' EXIT
printf '%s' "$PROG_JSON_OUT" > "$tmp_prog"
# Provider-first delegate (v1固定): env.mirbuilder.emit を使用
if try_provider_emit "$PROG_JSON_OUT" "$OUT"; then
exit 0
fi
# 最終フォールバック: 旧CLI変換環境でv1を促す
if HAKO_STAGEB_FUNC_SCAN="${HAKO_STAGEB_FUNC_SCAN:-1}" \
HAKO_MIR_BUILDER_FUNCS="${HAKO_MIR_BUILDER_FUNCS:-}" \
HAKO_MIR_BUILDER_CALL_RESOLVE="${HAKO_MIR_BUILDER_CALL_RESOLVE:-}" \
NYASH_JSON_SCHEMA_V1=${NYASH_JSON_SCHEMA_V1:-1} \
NYASH_MIR_UNIFIED_CALL=${NYASH_MIR_UNIFIED_CALL:-1} \
"$NYASH_BIN" --program-json-to-mir "$OUT" --json-file "$tmp_prog" >/dev/null 2>&1; then
echo "[OK] MIR JSON written (delegate-legacy): $OUT"
exit 0
fi
echo "[FAIL] Program→MIR delegate failed (provider+legacy)" >&2
exit 1