Files
hakorune/tools/hako_check.sh
nyash-codex e948bcb4f8 feat(hako_check): Phase 154-170 hako_check dead code detection & JSON library design
🎉 Phase 154-170 完了!hako_check に死んだコード検出を統合

📋 Phase 154: MIR CFG 統合 & HC020 設計
- DeadBlockAnalyzerBox(570行) - 箱化モジュール化
- ブロックレベルの unreachable 検出設計
- テストケース4本、スモークテスト

🔧 Phase 155: MIR CFG データブリッジ(MVP)
- src/runner/mir_json_emit.rs: MIR JSON に CFG を追加
- tools/hako_check/analysis_consumer.hako: 空のCFG構造体追加
- Phase 156 の準備完成

🌉 Phase 156: hako_check MIR パイプライン統合
- hako_check.sh: MIR JSON 生成ステップ追加
- cli.hako: --mir-json-content 引数処理
- analysis_consumer.hako: ~320行の手動JSON パーサー実装
- HC020 が実際にブロックを検出!

📐 Phase 170: .hako JSON ライブラリ設計
- 既存 JSON 利用箇所インベントリ(Program/MIR/CFG)
- Phase 156 JSON パーサの詳細分析(289行)
- JsonParserBox API 草案確定
- 利用予定マッピング(96%削減見込み)

🏗️ 設計原則:
- 箱化モジュール化パターン(Phase 153継承)
- Rust 層変更最小化(.hako + シェルスクリプト優先)
- セルフホスティング対応設計

📊 期待効果:
- hako_check JSON パーサ: 289行 → ~10行(96%削減)
- JsonParserBox 共通ライブラリ化(Phase 171+)
- hako_check/selfhost/デバッグツール で共通利用

🤖 Generated with Claude Code

Co-Authored-By: Claude <noreply@anthropic.com>
2025-12-04 16:16:56 +09:00

143 lines
4.2 KiB
Bash

#!/usr/bin/env bash
set -euo pipefail
ROOT="$(cd "$(dirname "$0")/.." && pwd)"
BIN="${NYASH_BIN:-$ROOT/target/release/hakorune}"
if [ ! -x "$BIN" ]; then
echo "[ERROR] hakorune binary not found: $BIN" >&2
echo "Run: cargo build --release" >&2
exit 2
fi
if [ $# -lt 1 ]; then
echo "Usage: $0 [--format text|dot|json-lsp] <file-or-dir|file> [more...]" >&2
exit 2
fi
fail=0
FORMAT="text"
if [ "${1:-}" = "--format" ] && [ -n "${2:-}" ]; then
FORMAT="$2"; shift 2 || true
fi
list_targets() {
local p="$1"
if [ -d "$p" ]; then
find "$p" -type f -name '*.hako' | sort
else
echo "$p"
fi
}
run_one() {
local f="$1"
# Run analyzer main with inlined source text to avoid FileBox dependency
local text
text="$(sed 's/\r$//' "$f")"
# Phase 156: Generate MIR JSON for CFG-based analysis and pass inline
local mir_json_path="/tmp/hako_check_mir_$$.json"
local mir_json_content=""
if [ -x "$ROOT/tools/hakorune_emit_mir.sh" ]; then
"$ROOT/tools/hakorune_emit_mir.sh" "$f" "$mir_json_path" >/dev/null 2>&1 || true
if [ -f "$mir_json_path" ]; then
mir_json_content="$(cat "$mir_json_path")"
fi
fi
# Build args array with optional MIR JSON
local args_arr=("--source-file" "$f" "$text")
if [ -n "$mir_json_content" ]; then
args_arr+=("--mir-json-content" "$mir_json_content")
fi
NYASH_DISABLE_PLUGINS=1 \
NYASH_BOX_FACTORY_POLICY=builtin_first \
NYASH_DISABLE_NY_COMPILER=1 \
HAKO_DISABLE_NY_COMPILER=1 \
NYASH_FEATURES="${NYASH_FEATURES:-stage3}" \
NYASH_PARSER_SEAM_TOLERANT=1 \
HAKO_PARSER_SEAM_TOLERANT=1 \
NYASH_PARSER_ALLOW_SEMICOLON=1 \
NYASH_ENABLE_USING=1 \
HAKO_ENABLE_USING=1 \
NYASH_USING_AST=1 \
NYASH_NY_COMPILER_TIMEOUT_MS="${NYASH_NY_COMPILER_TIMEOUT_MS:-8000}" \
"$BIN" --backend vm "$ROOT/tools/hako_check/cli.hako" -- "${args_arr[@]}" \
>"/tmp/hako_lint_out_$$.log" 2>&1 || true
local out rc
out="$(cat "/tmp/hako_lint_out_$$.log")"; rc=0
# Extract RC
if echo "$out" | grep -q '^RC: '; then
rc="$(echo "$out" | sed -n 's/^RC: //p' | tail -n1)"
else rc=1; fi
if [ "$rc" != "0" ]; then
echo "$out" | sed -n '1,200p'
fail=$((fail+1))
fi
rm -f "/tmp/hako_lint_out_$$.log" "$mir_json_path"
}
if [ "$FORMAT" = "dot" ]; then
# Aggregate all targets and render DOT once
TMP_LIST="/tmp/hako_targets_$$.txt"; : >"$TMP_LIST"
for p in "$@"; do list_targets "$p" >>"$TMP_LIST"; done
mapfile -t FILES <"$TMP_LIST"
rm -f "$TMP_LIST"
NYASH_DISABLE_PLUGINS=1 \
NYASH_BOX_FACTORY_POLICY=builtin_first \
NYASH_DISABLE_NY_COMPILER=1 \
HAKO_DISABLE_NY_COMPILER=1 \
NYASH_FEATURES="${NYASH_FEATURES:-stage3}" \
NYASH_PARSER_SEAM_TOLERANT=1 \
HAKO_PARSER_SEAM_TOLERANT=1 \
NYASH_PARSER_ALLOW_SEMICOLON=1 \
NYASH_ENABLE_USING=1 \
HAKO_ENABLE_USING=1 \
NYASH_USING_AST=1 \
NYASH_JSON_ONLY=1 \
NYASH_NY_COMPILER_TIMEOUT_MS="${NYASH_NY_COMPILER_TIMEOUT_MS:-8000}" \
"$BIN" --backend vm "$ROOT/tools/hako_check/cli.hako" -- --format dot "${FILES[@]}" \
>"/tmp/hako_lint_out_$$.log" 2>/tmp/hako_lint_err_$$.log || true
rc=$?
# Only print DOT graph body to STDOUT
awk '/^digraph /, /^}/' "/tmp/hako_lint_out_$$.log"
rm -f "/tmp/hako_lint_out_$$.log" "/tmp/hako_lint_err_$$.log"
exit $([ "$rc" -eq 0 ] && echo 0 || echo 1)
elif [ "$FORMAT" = "json-lsp" ]; then
# Aggregate and emit pure JSON (no summaries). Exit code = findings count.
TMP_LIST="/tmp/hako_targets_$$.txt"; : >"$TMP_LIST"
for p in "$@"; do list_targets "$p" >>"$TMP_LIST"; done
mapfile -t FILES <"$TMP_LIST"
rm -f "$TMP_LIST"
NYASH_DISABLE_PLUGINS=1 \
NYASH_BOX_FACTORY_POLICY=builtin_first \
NYASH_DISABLE_NY_COMPILER=1 \
HAKO_DISABLE_NY_COMPILER=1 \
NYASH_FEATURES="${NYASH_FEATURES:-stage3}" \
NYASH_PARSER_SEAM_TOLERANT=1 \
HAKO_PARSER_SEAM_TOLERANT=1 \
NYASH_PARSER_ALLOW_SEMICOLON=1 \
NYASH_ENABLE_USING=1 \
HAKO_ENABLE_USING=1 \
NYASH_USING_AST=1 \
NYASH_JSON_ONLY=1 \
NYASH_NY_COMPILER_TIMEOUT_MS="${NYASH_NY_COMPILER_TIMEOUT_MS:-8000}" \
"$BIN" --backend vm "$ROOT/tools/hako_check/cli.hako" -- --format json-lsp "${FILES[@]}"
exit $?
else
for p in "$@"; do
while IFS= read -r f; do run_one "$f"; done < <(list_targets "$p")
done
fi
if [ $fail -ne 0 ]; then
echo "[lint/summary] failures: $fail" >&2
exit 1
fi
echo "[lint/summary] all clear" >&2
exit 0