chore: Phase 25.1 完了 - LoopForm v2/Stage1 CLI/環境変数削減 + Phase 26-D からの変更

Phase 25.1 完了成果:
-  LoopForm v2 テスト・ドキュメント・コメント完備
  - 4ケース(A/B/C/D)完全テストカバレッジ
  - 最小再現ケース作成(SSAバグ調査用)
  - SSOT文書作成(loopform_ssot.md)
  - 全ソースに [LoopForm] コメントタグ追加

-  Stage-1 CLI デバッグ環境構築
  - stage1_cli.hako 実装
  - stage1_bridge.rs ブリッジ実装
  - デバッグツール作成(stage1_debug.sh/stage1_minimal.sh)
  - アーキテクチャ改善提案文書

-  環境変数削減計画策定
  - 25変数の完全調査・分類
  - 6段階削減ロードマップ(25→5、80%削減)
  - 即時削除可能変数特定(NYASH_CONFIG/NYASH_DEBUG)

Phase 26-D からの累積変更:
- PHI実装改善(ExitPhiBuilder/HeaderPhiBuilder等)
- MIRビルダーリファクタリング
- 型伝播・最適化パス改善
- その他約300ファイルの累積変更

🎯 技術的成果:
- SSAバグ根本原因特定(条件分岐内loop変数変更)
- Region+next_iパターン適用完了(UsingCollectorBox等)
- LoopFormパターン文書化・テスト化完了
- セルフホスティング基盤強化

Co-Authored-By: Claude <noreply@anthropic.com>
Co-Authored-By: ChatGPT <noreply@openai.com>
Co-Authored-By: Task Assistant <task@anthropic.com>
This commit is contained in:
nyash-codex
2025-11-21 06:25:17 +09:00
parent baf028a94f
commit f9d100ce01
366 changed files with 14322 additions and 5236 deletions

View File

@ -122,15 +122,22 @@ fn methodize_calls(root: &mut Value) -> Result<bool, String> {
for inst in insts.iter() {
if let Some(obj) = inst.as_object() {
if obj.get("op").and_then(Value::as_str) == Some("const") {
if let (Some(dst), Some(val)) = (obj.get("dst").and_then(Value::as_i64), obj.get("value")) {
if let (Some(dst), Some(val)) =
(obj.get("dst").and_then(Value::as_i64), obj.get("value"))
{
let mut s: Option<String> = None;
if let Some(st) = val.as_str() { s = Some(st.to_string()); }
else if let Some(vobj) = val.as_object() {
if let Some(Value::String(st)) = vobj.get("value") { s = Some(st.clone()); }
if let Some(st) = val.as_str() {
s = Some(st.to_string());
} else if let Some(vobj) = val.as_object() {
if let Some(Value::String(st)) = vobj.get("value") {
s = Some(st.clone());
}
}
if let Some(name) = s {
// Accept only names with dot separator
if name.contains('.') { reg_name.insert(dst, name); }
if name.contains('.') {
reg_name.insert(dst, name);
}
}
}
}
@ -138,14 +145,28 @@ fn methodize_calls(root: &mut Value) -> Result<bool, String> {
}
// Second pass: rewrite calls
for inst in insts.iter_mut() {
let Some(obj) = inst.as_object_mut() else { continue };
if obj.get("op").and_then(Value::as_str) != Some("call") { continue; }
let Some(func_reg) = obj.get("func").and_then(Value::as_i64) else { continue };
let Some(name) = reg_name.get(&func_reg).cloned() else { continue };
let Some(obj) = inst.as_object_mut() else {
continue;
};
if obj.get("op").and_then(Value::as_str) != Some("call") {
continue;
}
let Some(func_reg) = obj.get("func").and_then(Value::as_i64) else {
continue;
};
let Some(name) = reg_name.get(&func_reg).cloned() else {
continue;
};
// Split Box.method[/N]
let mut parts = name.split('.');
let box_name = match parts.next() { Some(x) => x, None => continue };
let rest = match parts.next() { Some(x) => x, None => continue };
let box_name = match parts.next() {
Some(x) => x,
None => continue,
};
let rest = match parts.next() {
Some(x) => x,
None => continue,
};
let method = rest.split('/').next().unwrap_or(rest);
// Build mir_call object
@ -163,7 +184,9 @@ fn methodize_calls(root: &mut Value) -> Result<bool, String> {
mir_call.insert("effects".to_string(), Value::Array(vec![]));
obj.insert("op".to_string(), Value::String("mir_call".into()));
if let Some(d) = dst { obj.insert("dst".to_string(), d); }
if let Some(d) = dst {
obj.insert("dst".to_string(), d);
}
obj.remove("func");
obj.insert("mir_call".to_string(), Value::Object(mir_call));
changed = true;

View File

@ -1,5 +1,8 @@
//! Common diagnostics helpers (concise, centralized)
use crate::parser::ParseError;
use crate::tokenizer::TokenizeError;
/// Whether provider logs should be emitted under current policy.
/// quiet_pipe usually reflects NYASH_JSON_ONLY; allowing override with HAKO_PROVIDER_TRACE=1.
pub fn provider_log_enabled(quiet_pipe: bool) -> bool {
@ -15,14 +18,102 @@ pub fn provider_log_info(msg: &str) {
pub fn provider_log_select(box_name: &str, ring: &str, source: &str, caps: Option<&str>) {
match caps {
Some(c) if !c.is_empty() => {
eprintln!("[provider/select:{} ring={} src={} caps={}]", box_name, ring, source, c);
eprintln!(
"[provider/select:{} ring={} src={} caps={}]",
box_name, ring, source, c
);
}
_ => {
eprintln!("[provider/select:{} ring={} src={}]", box_name, ring, source);
eprintln!(
"[provider/select:{} ring={} src={}]",
box_name, ring, source
);
}
}
}
/// Emit a Fail-Fast tag for provider fallback/selection errors.
pub fn failfast_provider(reason: &str) { eprintln!("[failfast/provider/{}]", reason); }
pub fn failfast_provider(reason: &str) {
eprintln!("[failfast/provider/{}]", reason);
}
/// Print a parse error with enriched context (source excerpt + caret + origin mapping).
pub fn print_parse_error_with_context(filename: &str, src: &str, err: &ParseError) {
eprintln!("❌ Parse error in {}: {}", filename, err);
let (line_opt, col_opt) = extract_line_col(err);
if let Some(line) = line_opt {
print_source_snippet(filename, src, line, col_opt);
if let Some((of, ol)) =
crate::runner::modes::common_util::resolve::map_merged_line_to_origin(line)
{
if of != filename {
eprintln!(
"[parse/context] merged origin: {}:{} (from merged line {})",
of, ol, line
);
}
}
}
}
fn extract_line_col(err: &ParseError) -> (Option<usize>, Option<usize>) {
match err {
ParseError::UnexpectedToken { line, .. } => (Some(*line), None),
ParseError::UnexpectedEOF => (None, None),
ParseError::InvalidExpression { line } => (Some(*line), None),
ParseError::InvalidStatement { line } => (Some(*line), None),
ParseError::UnsupportedIdentifier { line, .. } => (Some(*line), None),
ParseError::CircularDependency { .. } => (None, None),
ParseError::InfiniteLoop { line, .. } => (Some(*line), None),
ParseError::TransparencySystemRemoved { line, .. } => (Some(*line), None),
ParseError::UnsupportedNamespace { line, .. } => (Some(*line), None),
ParseError::ExpectedIdentifier { line } => (Some(*line), None),
ParseError::TokenizeError(te) => match te {
TokenizeError::UnexpectedCharacter { line, column, .. } => {
(Some(*line), Some(*column))
}
TokenizeError::UnterminatedString { line }
| TokenizeError::InvalidNumber { line }
| TokenizeError::UnterminatedComment { line } => (Some(*line), None),
},
}
}
fn print_source_snippet(filename: &str, src: &str, line: usize, col: Option<usize>) {
if src.is_empty() {
return;
}
let lines: Vec<&str> = src.lines().collect();
if line == 0 || line > lines.len() {
return;
}
let start = line.saturating_sub(2).max(1);
let end = (line + 1).min(lines.len());
eprintln!("[parse/context] in {}", filename);
for ln in start..=end {
let text = lines[ln - 1];
let marker = if ln == line { ">" } else { " " };
eprintln!("{} {:6} | {}", marker, ln, text);
}
if let Some(col) = col {
if line <= lines.len() {
let text = lines[line - 1];
let mut underline = String::new();
let mut idx = 0usize;
for (i, ch) in text.chars().enumerate() {
if i + 1 >= col {
break;
}
// Preserve tabs visually; spaces elsewhere
underline.push(if ch == '\t' { '\t' } else { ' ' });
idx = i;
}
let pad = " "; // align under " LNNNNN |"
eprintln!(" {}{}^", pad, underline);
}
}
}

View File

@ -52,11 +52,7 @@ pub fn llvmlite_emit_object(
// Verify output
match std::fs::metadata(out_path) {
Ok(meta) if meta.len() > 0 => {
crate::cli_v!(
"[LLVM] object emitted: {} ({} bytes)",
out_path,
meta.len()
);
crate::cli_v!("[LLVM] object emitted: {} ({} bytes)", out_path, meta.len());
Ok(())
}
_ => Err(format!("harness output not found or empty: {}", out_path)),
@ -67,7 +63,13 @@ pub fn llvmlite_emit_object(
fn resolve_ny_llvmc() -> std::path::PathBuf {
std::env::var("NYASH_NY_LLVM_COMPILER")
.ok()
.and_then(|s| if !s.is_empty() { Some(std::path::PathBuf::from(s)) } else { None })
.and_then(|s| {
if !s.is_empty() {
Some(std::path::PathBuf::from(s))
} else {
None
}
})
.or_else(|| which::which("ny-llvmc").ok())
.unwrap_or_else(|| std::path::PathBuf::from("target/release/ny-llvmc"))
}
@ -103,9 +105,24 @@ pub fn ny_llvmc_emit_exe_lib(
.arg("exe")
.arg("--out")
.arg(exe_out);
let default_nyrt = std::env::var("NYASH_EMIT_EXE_NYRT") .ok() .or_else(|| std::env::var("NYASH_ROOT").ok().map(|r| format!("{}/target/release", r))) .unwrap_or_else(|| "target/release".to_string());
if let Some(dir) = nyrt_dir { cmd.arg("--nyrt").arg(dir); } else { cmd.arg("--nyrt").arg(default_nyrt); }
if let Some(flags) = extra_libs { if !flags.trim().is_empty() { cmd.arg("--libs").arg(flags); } }
let default_nyrt = std::env::var("NYASH_EMIT_EXE_NYRT")
.ok()
.or_else(|| {
std::env::var("NYASH_ROOT")
.ok()
.map(|r| format!("{}/target/release", r))
})
.unwrap_or_else(|| "target/release".to_string());
if let Some(dir) = nyrt_dir {
cmd.arg("--nyrt").arg(dir);
} else {
cmd.arg("--nyrt").arg(default_nyrt);
}
if let Some(flags) = extra_libs {
if !flags.trim().is_empty() {
cmd.arg("--libs").arg(flags);
}
}
let status = cmd.status().map_err(|e| {
let prog_path = std::path::Path::new(cmd.get_program());
format!(
@ -147,9 +164,24 @@ pub fn ny_llvmc_emit_exe_bin(
.arg("exe")
.arg("--out")
.arg(exe_out);
let default_nyrt = std::env::var("NYASH_EMIT_EXE_NYRT") .ok() .or_else(|| std::env::var("NYASH_ROOT").ok().map(|r| format!("{}/target/release", r))) .unwrap_or_else(|| "target/release".to_string());
if let Some(dir) = nyrt_dir { cmd.arg("--nyrt").arg(dir); } else { cmd.arg("--nyrt").arg(default_nyrt); }
if let Some(flags) = extra_libs { if !flags.trim().is_empty() { cmd.arg("--libs").arg(flags); } }
let default_nyrt = std::env::var("NYASH_EMIT_EXE_NYRT")
.ok()
.or_else(|| {
std::env::var("NYASH_ROOT")
.ok()
.map(|r| format!("{}/target/release", r))
})
.unwrap_or_else(|| "target/release".to_string());
if let Some(dir) = nyrt_dir {
cmd.arg("--nyrt").arg(dir);
} else {
cmd.arg("--nyrt").arg(default_nyrt);
}
if let Some(flags) = extra_libs {
if !flags.trim().is_empty() {
cmd.arg("--libs").arg(flags);
}
}
let status = cmd.status().map_err(|e| {
let prog_path = std::path::Path::new(cmd.get_program());
format!(
@ -176,9 +208,11 @@ pub fn run_executable(
timeout_ms: u64,
) -> Result<(i32, bool, String), String> {
let mut cmd = std::process::Command::new(exe_path);
for a in args { cmd.arg(a); }
let out = super::io::spawn_with_timeout(cmd, timeout_ms)
.map_err(|e| format!("spawn exe: {}", e))?;
for a in args {
cmd.arg(a);
}
let out =
super::io::spawn_with_timeout(cmd, timeout_ms).map_err(|e| format!("spawn exe: {}", e))?;
let code = out.exit_code.unwrap_or(1);
let stdout_text = String::from_utf8_lossy(&out.stdout).into_owned();
Ok((code, out.timed_out, stdout_text))

View File

@ -22,13 +22,15 @@ pub fn strip_local_decl(s: &str) -> String {
for line in s.lines() {
let bytes = line.as_bytes();
let mut i = 0;
while i < bytes.len() && (bytes[i] == b' ' || bytes[i] == b'\t') { i += 1; }
while i < bytes.len() && (bytes[i] == b' ' || bytes[i] == b'\t') {
i += 1;
}
let mut stripped = false;
// Only strip `local ` if it's at the very beginning (i == 0)
// Keep `local ` inside blocks (i > 0) to preserve variable declarations
if i == 0 && i + 6 <= bytes.len() && &bytes[i..i+6] == b"local " {
if i == 0 && i + 6 <= bytes.len() && &bytes[i..i + 6] == b"local " {
out.push_str(&line[..i]);
out.push_str(&line[i+6..]);
out.push_str(&line[i + 6..]);
out.push('\n');
stripped = true;
}
@ -45,7 +47,10 @@ pub fn strip_local_decl(s: &str) -> String {
pub fn fail_fast_on_hako() -> bool {
// Default: OFF仕様不変拡張子だけで拒否しない
// 明示時のみ ONbring-up やデバッグ用途)。
match std::env::var("HAKO_FAIL_FAST_ON_HAKO_IN_NYASH_VM").ok().as_deref() {
match std::env::var("HAKO_FAIL_FAST_ON_HAKO_IN_NYASH_VM")
.ok()
.as_deref()
{
Some("1") | Some("true") | Some("on") => true,
_ => false,
}

View File

@ -24,7 +24,10 @@ pub fn spawn_with_timeout(mut cmd: Command, timeout_ms: u64) -> std::io::Result<
let mut exit_status: Option<std::process::ExitStatus> = None;
loop {
match child.try_wait()? {
Some(status) => { exit_status = Some(status); break },
Some(status) => {
exit_status = Some(status);
break;
}
None => {
if start.elapsed() >= Duration::from_millis(timeout_ms) {
let _ = child.kill();
@ -46,7 +49,9 @@ pub fn spawn_with_timeout(mut cmd: Command, timeout_ms: u64) -> std::io::Result<
}
let (status_ok, exit_code) = if let Some(st) = exit_status {
(st.success(), st.code())
} else { (false, None) };
} else {
(false, None)
};
Ok(ChildOutput {
stdout: out_buf,
stderr: err_buf,

View File

@ -4,14 +4,14 @@
* Minimal extraction to reduce duplication and prepare for full split.
*/
pub mod pyvm;
pub mod selfhost_exe;
pub mod io;
pub mod selfhost;
pub mod resolve;
pub mod exec;
pub mod core_bridge;
pub mod diag;
pub mod exec;
pub mod hako;
pub mod io;
pub mod plugin_guard;
pub mod provider_registry;
pub mod diag;
pub mod pyvm;
pub mod resolve;
pub mod selfhost;
pub mod selfhost_exe;

View File

@ -31,8 +31,7 @@ static PROVIDER_FACTORIES: OnceLock<Mutex<HashMap<String, Vec<Arc<dyn ProviderFa
/// Register a provider factory (called by builtin/dynamic loaders)
pub fn register_provider_factory(factory: Arc<dyn ProviderFactory>) {
let registry = PROVIDER_FACTORIES
.get_or_init(|| Mutex::new(HashMap::new()));
let registry = PROVIDER_FACTORIES.get_or_init(|| Mutex::new(HashMap::new()));
let mut guard = registry.lock().unwrap();
let key = factory.box_name().to_string();
guard.entry(key).or_default().push(factory);
@ -42,16 +41,23 @@ pub fn register_provider_factory(factory: Arc<dyn ProviderFactory>) {
struct CoreRoFileProviderFactory;
impl ProviderFactory for CoreRoFileProviderFactory {
fn box_name(&self) -> &str { "FileBox" }
fn create_provider(&self) -> Arc<dyn FileIo> { Arc::new(CoreRoFileIo::new()) }
fn is_available(&self) -> bool { true }
fn priority(&self) -> i32 { -100 } // ring1: lower than any plugin/provider
fn box_name(&self) -> &str {
"FileBox"
}
fn create_provider(&self) -> Arc<dyn FileIo> {
Arc::new(CoreRoFileIo::new())
}
fn is_available(&self) -> bool {
true
}
fn priority(&self) -> i32 {
-100
} // ring1: lower than any plugin/provider
}
/// Ensure ring1 (corero) provider is present in the registry
fn ensure_builtin_file_provider_registered() {
let reg = PROVIDER_FACTORIES
.get_or_init(|| Mutex::new(HashMap::new()));
let reg = PROVIDER_FACTORIES.get_or_init(|| Mutex::new(HashMap::new()));
let mut guard = reg.lock().unwrap();
let list = guard.entry("FileBox".to_string()).or_default();
// keep ring1 present for safety; avoid duplicates by checking any corero present by priority
@ -63,7 +69,9 @@ fn ensure_builtin_file_provider_registered() {
/// Backward-compat public readers for existing callers (if any)
#[allow(dead_code)]
pub fn read_filebox_mode_from_env() -> FileBoxMode { provider_env::filebox_mode_from_env() }
pub fn read_filebox_mode_from_env() -> FileBoxMode {
provider_env::filebox_mode_from_env()
}
/// Select provider based on mode and registered factories (SSOT)
#[allow(dead_code)]
@ -93,7 +101,10 @@ pub fn select_file_provider(mode: FileBoxMode) -> Arc<dyn FileIo> {
ProviderPolicy::StrictPluginFirst => {
if let Some(factory) = factories.first() {
if diag::provider_log_enabled(quiet_pipe) {
diag::provider_log_info(&format!("FileBox: using registered provider (priority={})", factory.priority()));
diag::provider_log_info(&format!(
"FileBox: using registered provider (priority={})",
factory.priority()
));
diag::provider_log_select("FileBox", "plugin", "dynamic", None);
}
return factory.create_provider();
@ -111,7 +122,10 @@ pub fn select_file_provider(mode: FileBoxMode) -> Arc<dyn FileIo> {
// Fallback to first available (plugin)
if let Some(factory) = factories.first() {
if diag::provider_log_enabled(quiet_pipe) {
diag::provider_log_info(&format!("FileBox: using registered provider (priority={})", factory.priority()));
diag::provider_log_info(&format!(
"FileBox: using registered provider (priority={})",
factory.priority()
));
diag::provider_log_select("FileBox", "plugin", "dynamic", None);
}
return factory.create_provider();
@ -155,7 +169,10 @@ pub fn select_file_provider(mode: FileBoxMode) -> Arc<dyn FileIo> {
if let Some(factory) = factories.first() {
if diag::provider_log_enabled(quiet_pipe) {
diag::provider_log_info(&format!("FileBox: using plugin-only provider (priority={})", factory.priority()));
diag::provider_log_info(&format!(
"FileBox: using plugin-only provider (priority={})",
factory.priority()
));
diag::provider_log_select("FileBox", "plugin", "dynamic", None);
}
return factory.create_provider();
@ -179,8 +196,8 @@ pub fn select_file_provider(mode: FileBoxMode) -> Arc<dyn FileIo> {
#[derive(Clone, Debug)]
struct ProviderDescriptor {
box_name: &'static str,
ring: &'static str, // "0" | "1" | "plugin"
source: &'static str, // "static" | "dynamic"
ring: &'static str, // "0" | "1" | "plugin"
source: &'static str, // "static" | "dynamic"
capabilities: &'static [&'static str], // e.g., ["read"]
priority: i32,
}

View File

@ -9,18 +9,26 @@ pub fn run_pyvm_harness(module: &crate::mir::MirModule, tag: &str) -> Result<i32
if !runner_buf.exists() {
if let Ok(root) = std::env::var("NYASH_ROOT") {
let alt = std::path::Path::new(&root).join("tools/pyvm_runner.py");
if alt.exists() { runner_buf = alt; }
if alt.exists() {
runner_buf = alt;
}
}
}
if !runner_buf.exists() {
return Err(format!("PyVM runner not found: tools/pyvm_runner.py (cwd) or $NYASH_ROOT/tools/pyvm_runner.py"));
return Err(format!(
"PyVM runner not found: tools/pyvm_runner.py (cwd) or $NYASH_ROOT/tools/pyvm_runner.py"
));
}
let tmp_dir = std::path::Path::new("tmp");
let _ = std::fs::create_dir_all(tmp_dir);
let mir_json_path = tmp_dir.join("nyash_pyvm_mir.json");
crate::runner::mir_json_emit::emit_mir_json_for_harness_bin(module, &mir_json_path)
.map_err(|e| format!("PyVM MIR JSON emit error: {}", e))?;
crate::cli_v!("[ny-compiler] using PyVM ({} ) → {}", tag, mir_json_path.display());
crate::cli_v!(
"[ny-compiler] using PyVM ({} ) → {}",
tag,
mir_json_path.display()
);
// Determine entry function (prefer Main.main; top-level main only if allowed)
let allow_top = crate::config::env::entry_allow_toplevel_main();
let entry = if module.functions.contains_key("Main.main") {
@ -71,18 +79,26 @@ pub fn run_pyvm_harness_lib(module: &nyash_rust::mir::MirModule, tag: &str) -> R
if !runner_buf.exists() {
if let Ok(root) = std::env::var("NYASH_ROOT") {
let alt = std::path::Path::new(&root).join("tools/pyvm_runner.py");
if alt.exists() { runner_buf = alt; }
if alt.exists() {
runner_buf = alt;
}
}
}
if !runner_buf.exists() {
return Err(format!("PyVM runner not found: tools/pyvm_runner.py (cwd) or $NYASH_ROOT/tools/pyvm_runner.py"));
return Err(format!(
"PyVM runner not found: tools/pyvm_runner.py (cwd) or $NYASH_ROOT/tools/pyvm_runner.py"
));
}
let tmp_dir = std::path::Path::new("tmp");
let _ = std::fs::create_dir_all(tmp_dir);
let mir_json_path = tmp_dir.join("nyash_pyvm_mir.json");
crate::runner::mir_json_emit::emit_mir_json_for_harness(module, &mir_json_path)
.map_err(|e| format!("PyVM MIR JSON emit error: {}", e))?;
crate::cli_v!("[Runner] using PyVM ({} ) → {}", tag, mir_json_path.display());
crate::cli_v!(
"[Runner] using PyVM ({} ) → {}",
tag,
mir_json_path.display()
);
// Determine entry function (prefer Main.main; top-level main only if allowed)
let allow_top = crate::config::env::entry_allow_toplevel_main();
let entry = if module.functions.contains_key("Main.main") {

View File

@ -1,8 +1,19 @@
//! Resolve context — capture per-thread prelude merge context for enriched diagnostics.
use std::cell::RefCell;
/// Line span mapping for merged prelude+main sources.
/// Represents that lines [start_line, start_line + line_count) in the merged
/// text originate from `file` at local lines [1, line_count].
#[derive(Clone, Debug)]
pub struct LineSpan {
pub file: String,
pub start_line: usize,
pub line_count: usize,
}
thread_local! {
static LAST_MERGED_PRELUDES: RefCell<Vec<String>> = RefCell::new(Vec::new());
static LAST_TEXT_MERGE_LINE_SPANS: RefCell<Vec<LineSpan>> = RefCell::new(Vec::new());
}
/// Record the list of prelude file paths used for the last text merge in this thread.
@ -23,3 +34,25 @@ pub fn take_last_merged_preludes() -> Vec<String> {
LAST_MERGED_PRELUDES.with(|c| std::mem::take(&mut *c.borrow_mut()))
}
/// Record the line-span mapping for the last text merge in this thread.
pub fn set_last_text_merge_line_spans(spans: Vec<LineSpan>) {
LAST_TEXT_MERGE_LINE_SPANS.with(|c| {
*c.borrow_mut() = spans;
});
}
/// Try to map a merged (global) line number back to its origin file and local line.
pub fn map_merged_line_to_origin(line: usize) -> Option<(String, usize)> {
if line == 0 {
return None;
}
LAST_TEXT_MERGE_LINE_SPANS.with(|c| {
for span in c.borrow().iter() {
if line >= span.start_line && line < span.start_line + span.line_count {
let local = line - span.start_line + 1;
return Some((span.file.clone(), local));
}
}
None
})
}

View File

@ -1,6 +1,6 @@
/*!
* Using resolver utilities — static resolution line (SSOT + AST) 📦
*
*
* 箱化モジュール化で綺麗綺麗になったにゃ!🎉
*
* Separation of concerns:
@ -19,45 +19,29 @@
* - seam: seam logging and optional boundary markers (for diagnostics).
*/
pub mod strip;
pub mod seam;
pub mod using_resolution;
pub mod prelude_manager;
pub mod selfhost_pipeline;
pub mod path_util;
pub mod context;
pub mod path_util;
pub mod prelude_manager;
pub mod seam;
pub mod selfhost_pipeline;
pub mod strip;
pub mod using_resolution;
// 📦 箱化モジュールの公開にゃ!
pub use using_resolution::{
UsingResolutionBox,
UsingTarget,
UsingConfig,
};
pub use using_resolution::{UsingConfig, UsingResolutionBox, UsingTarget};
pub use prelude_manager::{
PreludeManagerBox,
MergeStrategy,
MergeResult,
};
pub use prelude_manager::{MergeResult, MergeStrategy, PreludeManagerBox};
pub use selfhost_pipeline::{
SelfhostPipelineBox,
CompilationResult,
PipelineConfig,
};
pub use selfhost_pipeline::{CompilationResult, PipelineConfig, SelfhostPipelineBox};
// 🔧 Legacy functions (preserved for compatibility)
pub use strip::{
preexpand_at_local,
collect_using_and_strip,
resolve_prelude_paths_profiled,
parse_preludes_to_asts,
merge_prelude_asts_with_main,
merge_prelude_text,
collect_using_and_strip, merge_prelude_asts_with_main, merge_prelude_text,
parse_preludes_to_asts, preexpand_at_local, resolve_prelude_paths_profiled,
};
// Expose context helpers for enhanced diagnostics
pub use context::{
set_last_merged_preludes,
clone_last_merged_preludes,
clone_last_merged_preludes, map_merged_line_to_origin, set_last_merged_preludes,
set_last_text_merge_line_spans, LineSpan,
};

View File

@ -17,4 +17,3 @@ pub fn is_using_target_path_unquoted(target_unquoted: &str) -> bool {
|| target_unquoted.ends_with(".hako")
|| target_unquoted.ends_with(".nyash")
}

View File

@ -1,13 +1,13 @@
//! Prelude Manager Box - 綺麗綺麗なプレリュード統合専門家!📦
//!
//!
//! テキストマージとASTマージを分離して、
//! 保守性とテスト容易性を向上させるにゃ!
use crate::runner::NyashRunner;
use crate::runner::modes::common_util::resolve::using_resolution::UsingResolutionBox;
use crate::runner::NyashRunner;
/// 📦 PreludeManagerBox - プレリュード統合の専門家!
///
///
/// テキストベースとASTベースの両方の統合を
/// 統一インターフェースで提供する箱にゃ!
pub struct PreludeManagerBox<'a> {
@ -111,11 +111,13 @@ impl<'a> PreludeManagerBox<'a> {
fn build_text_merged(
&self,
source: &str,
_filename: &str,
filename: &str,
prelude_paths: &[String],
trace: bool,
) -> Result<String, String> {
let mut merged = String::new();
let mut spans: Vec<crate::runner::modes::common_util::resolve::LineSpan> = Vec::new();
let mut current_line: usize = 1;
// プレリュードをDFS順に追加
for (idx, path) in prelude_paths.iter().enumerate() {
@ -138,16 +140,44 @@ impl<'a> PreludeManagerBox<'a> {
merged.push_str(&cleaned);
merged.push('\n');
let added = cleaned.lines().count();
if added > 0 {
spans.push(crate::runner::modes::common_util::resolve::LineSpan {
file: path.clone(),
start_line: current_line,
line_count: added,
});
current_line += added + 1; // +1 for the extra '\n'
} else {
current_line += 1;
}
}
// デバッグモードなら境界マーカーを追加
if std::env::var("NYASH_RESOLVE_SEAM_DEBUG").ok().as_deref() == Some("1") {
merged.push_str("\n/* --- using prelude/main boundary --- */\n\n");
// boundary line(s) are attributed to a synthetic "<boundary>" pseudo-file
let boundary_lines = 3usize;
spans.push(crate::runner::modes::common_util::resolve::LineSpan {
file: "<prelude/main-boundary>".to_string(),
start_line: current_line,
line_count: boundary_lines,
});
current_line += boundary_lines;
}
// メインソースを正規化して追加
let cleaned_main = self.normalize_text_for_inline(source);
merged.push_str(&cleaned_main);
let main_lines = cleaned_main.lines().count();
if main_lines > 0 {
spans.push(crate::runner::modes::common_util::resolve::LineSpan {
file: filename.to_string(),
start_line: current_line,
line_count: main_lines,
});
current_line += main_lines;
}
if trace {
crate::runner::trace::log(format!(
@ -158,6 +188,8 @@ impl<'a> PreludeManagerBox<'a> {
));
}
crate::runner::modes::common_util::resolve::set_last_text_merge_line_spans(spans);
Ok(self.normalize_text_for_inline(&merged))
}
@ -169,34 +201,35 @@ impl<'a> PreludeManagerBox<'a> {
) -> Result<(String, Vec<String>), String> {
// 既存のcollect_using_and_strip関数を呼び出す
// TODO: 将来的にはUsingResolutionBox経由に置き換える
let (cleaned, prelude_paths, _imports) = crate::runner::modes::common_util::resolve::strip::collect_using_and_strip(
&self.runner,
code,
filename,
)?;
let (cleaned, prelude_paths, _imports) =
crate::runner::modes::common_util::resolve::strip::collect_using_and_strip(
&self.runner,
code,
filename,
)?;
Ok((cleaned, prelude_paths))
}
/// 🔧 テキストを正規化するにゃ!
fn normalize_text_for_inline(&self, s: &str) -> String {
let mut out = s.replace("\r\n", "\n").replace("\r", "\n");
// `}` の前の `;` を除去(複数回パス)
for _ in 0..2 {
let mut tmp = String::with_capacity(out.len());
let bytes = out.as_bytes();
let mut i = 0usize;
while i < bytes.len() {
if bytes[i] == b';' {
// 先読みしてスペース/改行をスキップ
let mut j = i + 1;
while j < bytes.len() {
let c = bytes[j];
if c == b' ' || c == b'\t' || c == b'\n' {
j += 1;
} else {
break;
if c == b' ' || c == b'\t' || c == b'\n' {
j += 1;
} else {
break;
}
}
if j < bytes.len() && bytes[j] == b'}' {
@ -210,12 +243,12 @@ impl<'a> PreludeManagerBox<'a> {
}
out = tmp;
}
// ファイル末尾に改行を追加
if !out.ends_with('\n') {
out.push('\n');
if !out.ends_with('\n') {
out.push('\n');
}
out
}
@ -243,7 +276,7 @@ impl<'a> PreludeManagerBox<'a> {
prelude_paths: &[String],
) -> Result<MergeResult, String> {
let strategy = self.select_strategy(prelude_paths.len());
match strategy {
MergeStrategy::Text => self.merge_text(source, filename, prelude_paths),
MergeStrategy::Ast => self.merge_ast(source, filename, prelude_paths),

View File

@ -1,6 +1,8 @@
/// Log tail of inlined prelude chunk for seam inspection.
pub fn log_inlined_tail(path_key: &str, inlined_text: &str, seam_dbg: bool) {
if !seam_dbg { return; }
if !seam_dbg {
return;
}
let tail = inlined_text
.chars()
.rev()
@ -18,7 +20,9 @@ pub fn log_inlined_tail(path_key: &str, inlined_text: &str, seam_dbg: bool) {
/// Log the seam between prelude and body for quick visual diff.
pub fn log_prelude_body_seam(prelude_clean: &str, body: &str, seam_dbg: bool) {
if !seam_dbg { return; }
if !seam_dbg {
return;
}
let tail = prelude_clean
.chars()
.rev()
@ -28,8 +32,14 @@ pub fn log_prelude_body_seam(prelude_clean: &str, body: &str, seam_dbg: bool) {
.rev()
.collect::<String>();
let head = body.chars().take(160).collect::<String>();
eprintln!("[using][seam] prelude_tail=<<<{}>>>", tail.replace('\n', "\\n"));
eprintln!("[using][seam] body_head =<<<{}>>>", head.replace('\n', "\\n"));
eprintln!(
"[using][seam] prelude_tail=<<<{}>>>",
tail.replace('\n', "\\n")
);
eprintln!(
"[using][seam] body_head =<<<{}>>>",
head.replace('\n', "\\n")
);
}
// Legacy brace fix function removed (Phase 15 cleanup)

View File

@ -1,13 +1,15 @@
//! Selfhost Pipeline Box - 綺麗綺麗なセルフホストパイプライン専門家!📦
//!
//!
//! セルフホストコンパイルの複雑な処理を箱に閉じ込めて、
//! 保守性とテスト容易性を向上させるにゃ!
use crate::runner::modes::common_util::resolve::prelude_manager::{
MergeStrategy, PreludeManagerBox,
};
use crate::runner::NyashRunner;
use crate::runner::modes::common_util::resolve::prelude_manager::{PreludeManagerBox, MergeStrategy};
/// 📦 SelfhostPipelineBox - セルフホストパイプラインの専門家!
///
///
/// コンパイラーパイプライン全体を管理する箱にゃ!
pub struct SelfhostPipelineBox<'a> {
runner: &'a NyashRunner,
@ -37,7 +39,7 @@ impl<'a> SelfhostPipelineBox<'a> {
/// 🌟 新しいSelfhostPipelineBoxを作るにゃ
pub fn new(runner: &'a NyashRunner) -> Self {
let prelude_manager = PreludeManagerBox::new(runner);
Self {
runner,
prelude_manager,
@ -66,12 +68,14 @@ impl<'a> SelfhostPipelineBox<'a> {
// 第1フェーズusing文解析とプレリュードパス収集
let (cleaned_main, prelude_paths) = self.collect_and_resolve_using(code, filename)?;
// 第2フェーズプレリュード統合
let merge_result = if config.enable_ast_merge {
self.prelude_manager.merge_ast(&cleaned_main, filename, &prelude_paths)?
self.prelude_manager
.merge_ast(&cleaned_main, filename, &prelude_paths)?
} else {
self.prelude_manager.merge_text(&cleaned_main, filename, &prelude_paths)?
self.prelude_manager
.merge_text(&cleaned_main, filename, &prelude_paths)?
};
let processing_time = start_time.elapsed().as_millis() as u64;
@ -118,9 +122,7 @@ impl<'a> SelfhostPipelineBox<'a> {
eprintln!(
"[selfhost-pipeline] ✅ Completed in {}ms (strategy: {}, preludes: {})",
result.processing_time_ms,
strategy_str,
result.prelude_count
result.processing_time_ms, strategy_str, result.prelude_count
);
}
@ -146,14 +148,15 @@ impl<'a> SelfhostPipelineBox<'a> {
/// 🧪 パイプラインを検証するにゃ!(テスト用)
pub fn validate_pipeline(&self, code: &str, filename: &str) -> Result<Vec<String>, String> {
let mut issues = Vec::new();
// usingシステムの検証
if crate::config::env::enable_using() {
// using文があるかチェック
let using_count = code.lines()
let using_count = code
.lines()
.filter(|line| line.trim().starts_with("using "))
.count();
if using_count > 0 {
// プレリュード解決を試みる
match crate::runner::modes::common_util::resolve::strip::resolve_prelude_paths_profiled(
@ -177,11 +180,7 @@ impl<'a> SelfhostPipelineBox<'a> {
}
/// 📊 パフォーマンスプロファイリングするにゃ!
pub fn profile_pipeline(
&mut self,
_code: &str,
_filename: &str,
) -> Result<String, String> {
pub fn profile_pipeline(&mut self, _code: &str, _filename: &str) -> Result<String, String> {
// プロファイル機能を実装(別途)
// TODO: プロファイル機能を追加
Err("Profiling not yet implemented".to_string())

View File

@ -14,14 +14,26 @@ pub fn collect_using_and_strip(
runner: &NyashRunner,
code: &str,
filename: &str,
) -> Result<(String, Vec<String>, std::collections::HashMap<String, String>), String> {
) -> Result<
(
String,
Vec<String>,
std::collections::HashMap<String, String>,
),
String,
> {
if !crate::config::env::enable_using() {
return Ok((code.to_string(), Vec::new(), std::collections::HashMap::new()));
return Ok((
code.to_string(),
Vec::new(),
std::collections::HashMap::new(),
));
}
let using_ctx = runner.init_using_context();
let prod = crate::config::env::using_is_prod();
let strict = crate::config::env::env_bool("NYASH_USING_STRICT");
let verbose = crate::config::env::cli_verbose() || crate::config::env::env_bool("NYASH_RESOLVE_TRACE");
let verbose =
crate::config::env::cli_verbose() || crate::config::env::env_bool("NYASH_RESOLVE_TRACE");
let ctx_dir = std::path::Path::new(filename).parent();
let mut out = String::with_capacity(code.len());
@ -30,8 +42,8 @@ pub fn collect_using_and_strip(
use std::collections::HashMap;
let mut seen_paths: HashMap<String, (String, usize)> = HashMap::new(); // canon_path -> (alias/label, first_line)
let mut seen_aliases: HashMap<String, (String, usize)> = HashMap::new(); // alias -> (canon_path, first_line)
// Determine if this file is inside a declared package root; if so, allow
// internal file-using within the package even when file-using is globally disallowed.
// Determine if this file is inside a declared package root; if so, allow
// internal file-using within the package even when file-using is globally disallowed.
let filename_canon = std::fs::canonicalize(filename).ok();
let mut inside_pkg = false;
if let Some(ref fc) = filename_canon {
@ -66,7 +78,10 @@ pub fn collect_using_and_strip(
// Check if this is a known alias or module FIRST before treating as file path
let is_known_alias_or_module = using_ctx.aliases.contains_key(&target_unquoted)
|| using_ctx.pending_modules.iter().any(|(k, _)| k == &target_unquoted)
|| using_ctx
.pending_modules
.iter()
.any(|(k, _)| k == &target_unquoted)
|| using_ctx.packages.contains_key(&target_unquoted);
let is_path = if is_known_alias_or_module {
@ -74,7 +89,9 @@ pub fn collect_using_and_strip(
false
} else {
// SSOT: delegate path pattern check
crate::runner::modes::common_util::resolve::path_util::is_using_target_path_unquoted(&target_unquoted)
crate::runner::modes::common_util::resolve::path_util::is_using_target_path_unquoted(
&target_unquoted,
)
};
if is_path {
// SSOT: Disallow file-using at top-level; allow only for sources located
@ -145,7 +162,13 @@ pub fn collect_using_and_strip(
prev_line
));
} else {
seen_paths.insert(canon.clone(), (alias_name.clone().unwrap_or_else(|| "<none>".into()), line_no));
seen_paths.insert(
canon.clone(),
(
alias_name.clone().unwrap_or_else(|| "<none>".into()),
line_no,
),
);
}
if let Some(alias) = alias_name.clone() {
if let Some((prev_path, prev_line)) = seen_aliases.get(&alias) {
@ -184,7 +207,9 @@ pub fn collect_using_and_strip(
strict,
verbose,
) {
if resolved.starts_with("dylib:") { continue; }
if resolved.starts_with("dylib:") {
continue;
}
let canon = std::fs::canonicalize(&resolved)
.ok()
.map(|pb| pb.to_string_lossy().to_string())
@ -197,7 +222,10 @@ pub fn collect_using_and_strip(
} else {
seen_paths.insert(
canon.clone(),
(alias_name.clone().unwrap_or_else(|| "<none>".into()), line_no),
(
alias_name.clone().unwrap_or_else(|| "<none>".into()),
line_no,
),
);
}
if let Some(alias) = alias_name.clone() {
@ -217,10 +245,8 @@ pub fn collect_using_and_strip(
}
// 1) modules mapping (name -> path)
if let Some((_, mod_path)) = using_ctx
.pending_modules
.iter()
.find(|(n, _)| n == &name)
if let Some((_, mod_path)) =
using_ctx.pending_modules.iter().find(|(n, _)| n == &name)
{
let out_path = mod_path.clone();
// Duplicate detection (same semantics as packages below)
@ -236,7 +262,10 @@ pub fn collect_using_and_strip(
} else {
seen_paths.insert(
canon.clone(),
(alias_name.clone().unwrap_or_else(|| "<none>".into()), line_no),
(
alias_name.clone().unwrap_or_else(|| "<none>".into()),
line_no,
),
);
}
if let Some(alias) = alias_name.clone() {
@ -263,21 +292,25 @@ pub fn collect_using_and_strip(
PackageKind::Package => {
let base = std::path::Path::new(&pkg.path);
let out = if let Some(m) = &pkg.main {
if matches!(base.extension().and_then(|s| s.to_str()), Some("nyash") | Some("hako")) {
if matches!(
base.extension().and_then(|s| s.to_str()),
Some("nyash") | Some("hako")
) {
pkg.path.clone()
} else {
base.join(m).to_string_lossy().to_string()
}
} else if matches!(base.extension().and_then(|s| s.to_str()), Some("nyash") | Some("hako")) {
} else if matches!(
base.extension().and_then(|s| s.to_str()),
Some("nyash") | Some("hako")
) {
pkg.path.clone()
} else {
let leaf = base
.file_name()
.and_then(|s| s.to_str())
.unwrap_or(&name);
base.join(format!("{}.hako", leaf))
.to_string_lossy()
.to_string()
let leaf =
base.file_name().and_then(|s| s.to_str()).unwrap_or(&name);
base.join(format!("{}.hako", leaf))
.to_string_lossy()
.to_string()
};
// Duplicate detection for prod package alias resolution
let canon = std::fs::canonicalize(&out)
@ -294,7 +327,13 @@ pub fn collect_using_and_strip(
prev_line
));
} else {
seen_paths.insert(canon.clone(), (alias_name.clone().unwrap_or_else(|| "<none>".into()), line_no));
seen_paths.insert(
canon.clone(),
(
alias_name.clone().unwrap_or_else(|| "<none>".into()),
line_no,
),
);
}
if let Some(alias) = alias_name.clone() {
if let Some((prev_path, prev_line)) = seen_aliases.get(&alias) {
@ -476,7 +515,8 @@ pub fn resolve_prelude_paths_profiled(
}
let src = std::fs::read_to_string(&real_path)
.map_err(|e| format!("using: failed to read '{}': {}", real_path, e))?;
let (_cleaned, nested, _nested_imports) = collect_using_and_strip(runner, &src, &real_path)?;
let (_cleaned, nested, _nested_imports) =
collect_using_and_strip(runner, &src, &real_path)?;
for n in nested.iter() {
dfs(runner, n, out, seen)?;
}
@ -551,7 +591,10 @@ pub fn parse_preludes_to_asts(
) -> Result<Vec<nyash_rust::ast::ASTNode>, String> {
let debug = crate::config::env::env_bool("NYASH_STRIP_DEBUG");
if debug {
eprintln!("[strip-debug] parse_preludes_to_asts: {} files total", prelude_paths.len());
eprintln!(
"[strip-debug] parse_preludes_to_asts: {} files total",
prelude_paths.len()
);
for (idx, p) in prelude_paths.iter().enumerate() {
eprintln!("[strip-debug] [{}] {}", idx, p);
}
@ -559,18 +602,27 @@ pub fn parse_preludes_to_asts(
let mut out: Vec<nyash_rust::ast::ASTNode> = Vec::with_capacity(prelude_paths.len());
for (idx, prelude_path) in prelude_paths.iter().enumerate() {
if debug {
eprintln!("[strip-debug] [{}/{}] Processing: {}", idx + 1, prelude_paths.len(), prelude_path);
eprintln!(
"[strip-debug] [{}/{}] Processing: {}",
idx + 1,
prelude_paths.len(),
prelude_path
);
}
let src = std::fs::read_to_string(prelude_path)
.map_err(|e| format!("using: error reading {}: {}", prelude_path, e))?;
let (clean_src, _nested, _nested_imports) = collect_using_and_strip(runner, &src, prelude_path)?;
let (clean_src, _nested, _nested_imports) =
collect_using_and_strip(runner, &src, prelude_path)?;
// IMPORTANT: Do not attempt to AST-parse .hako preludes here.
// .hako is Hakorune surface, not Nyash AST. VM/VM-fallback paths
// will route to text-merge when any prelude is .hako.
if prelude_path.ends_with(".hako") {
if debug {
eprintln!("[strip-debug] skip AST parse for .hako prelude: {}", prelude_path);
eprintln!(
"[strip-debug] skip AST parse for .hako prelude: {}",
prelude_path
);
}
continue;
}
@ -579,22 +631,37 @@ pub fn parse_preludes_to_asts(
// Debug: dump clean_src if NYASH_STRIP_DEBUG=1
if debug {
eprintln!("[strip-debug] [{}/{}] About to parse: {}", idx + 1, prelude_paths.len(), prelude_path);
eprintln!("[strip-debug] clean_src first 500 chars:\n{}\n---",
&clean_src.chars().take(500).collect::<String>());
eprintln!(
"[strip-debug] [{}/{}] About to parse: {}",
idx + 1,
prelude_paths.len(),
prelude_path
);
eprintln!(
"[strip-debug] clean_src first 500 chars:\n{}\n---",
&clean_src.chars().take(500).collect::<String>()
);
}
match crate::parser::NyashParser::parse_from_string(&clean_src) {
Ok(ast) => {
if debug {
eprintln!("[strip-debug] [{}/{}] ✅ Parse SUCCESS: {}", idx + 1, prelude_paths.len(), prelude_path);
eprintln!(
"[strip-debug] [{}/{}] ✅ Parse SUCCESS: {}",
idx + 1,
prelude_paths.len(),
prelude_path
);
}
out.push(ast)
}
Err(e) => {
// Always output debug info on parse failure if NYASH_STRIP_DEBUG=1
let debug = crate::config::env::env_bool("NYASH_STRIP_DEBUG");
eprintln!("[strip-debug] Parse FAILED for: {} (debug={})", prelude_path, debug);
eprintln!(
"[strip-debug] Parse FAILED for: {} (debug={})",
prelude_path, debug
);
if debug {
eprintln!("[strip-debug] Error: {}", e);
let es = format!("{}", e);
@ -605,19 +672,31 @@ pub fn parse_preludes_to_asts(
if let Some(pos) = es.rfind("line ") {
let mut j = pos + 5; // after "line "
let bytes = es.as_bytes();
let mut n: usize = 0; let mut had = false;
let mut n: usize = 0;
let mut had = false;
while j < bytes.len() {
let c = bytes[j];
if c >= b'0' && c <= b'9' { n = n * 10 + (c - b'0') as usize; j += 1; had = true; } else { break; }
if c >= b'0' && c <= b'9' {
n = n * 10 + (c - b'0') as usize;
j += 1;
had = true;
} else {
break;
}
}
if had {
let ln = if n == 0 { 1 } else { n };
let from = ln.saturating_sub(3);
let to = std::cmp::min(lines.len(), ln + 3);
eprintln!("[strip-debug] Context around line {} ({}..={}):", ln, from.max(1), to);
eprintln!(
"[strip-debug] Context around line {} ({}..={}):",
ln,
from.max(1),
to
);
for i in from.max(1)..=to {
let mark = if i == ln { ">>" } else { " " };
if let Some(line) = lines.get(i-1) {
if let Some(line) = lines.get(i - 1) {
eprintln!("{} {:4}: {}", mark, i, line);
}
}
@ -640,7 +719,10 @@ pub fn parse_preludes_to_asts(
}
}
if debug {
eprintln!("[strip-debug] parse_preludes_to_asts: ✅ All {} files parsed successfully", out.len());
eprintln!(
"[strip-debug] parse_preludes_to_asts: ✅ All {} files parsed successfully",
out.len()
);
}
Ok(out)
}
@ -663,7 +745,10 @@ pub fn merge_prelude_asts_with_main(
if let ASTNode::Program { statements, .. } = main_ast.clone() {
let mut all = combined;
all.extend(statements);
ASTNode::Program { statements: all, span: Span::unknown() }
ASTNode::Program {
statements: all,
span: Span::unknown(),
}
} else {
// Defensive: unexpected shape; preserve main AST unchanged.
main_ast.clone()
@ -802,6 +887,8 @@ pub fn merge_prelude_text(
// Build merged text: preludes first, then main source
let mut merged = String::new();
let mut spans: Vec<crate::runner::modes::common_util::resolve::LineSpan> = Vec::new();
let mut current_line: usize = 1;
// Add preludes in DFS order
for (idx, path) in prelude_paths.iter().enumerate() {
@ -809,7 +896,8 @@ pub fn merge_prelude_text(
.map_err(|e| format!("using: failed to read '{}': {}", path, e))?;
// Strip using lines from prelude and normalize
let (cleaned_raw, _nested, _nested_imports) = collect_using_and_strip(runner, &content, path)?;
let (cleaned_raw, _nested, _nested_imports) =
collect_using_and_strip(runner, &content, path)?;
let mut cleaned = normalize_text_for_inline(&cleaned_raw);
// Hako-friendly normalize for preludes: always strip leading `local ` at line head
// when the prelude is a .hako (or looks like Hako code). This prevents top-level
@ -831,11 +919,30 @@ pub fn merge_prelude_text(
merged.push_str(&cleaned);
merged.push('\n');
let added = cleaned.lines().count();
if added > 0 {
spans.push(crate::runner::modes::common_util::resolve::LineSpan {
file: path.clone(),
start_line: current_line,
line_count: added,
});
current_line += added + 1; // +1 for extra '\n'
} else {
current_line += 1;
}
}
// Add boundary marker if debug mode
if crate::config::env::env_bool("NYASH_RESOLVE_SEAM_DEBUG") {
merged.push_str("\n/* --- using prelude/main boundary --- */\n\n");
let boundary_lines = 3usize;
spans.push(crate::runner::modes::common_util::resolve::LineSpan {
file: "<prelude/main-boundary>".to_string(),
start_line: current_line,
line_count: boundary_lines,
});
current_line += boundary_lines;
}
// Add main source (already cleaned of using lines) and normalize
@ -845,9 +952,19 @@ pub fn merge_prelude_text(
if filename.ends_with(".hako")
|| crate::runner::modes::common_util::hako::looks_like_hako_code(&cleaned_main_norm)
{
cleaned_main_norm = crate::runner::modes::common_util::hako::strip_local_decl(&cleaned_main_norm);
cleaned_main_norm =
crate::runner::modes::common_util::hako::strip_local_decl(&cleaned_main_norm);
}
merged.push_str(&cleaned_main_norm);
let main_lines = cleaned_main_norm.lines().count();
if main_lines > 0 {
spans.push(crate::runner::modes::common_util::resolve::LineSpan {
file: filename.to_string(),
start_line: current_line,
line_count: main_lines,
});
current_line += main_lines;
}
if trace {
crate::runner::trace::log(format!(
@ -865,6 +982,8 @@ pub fn merge_prelude_text(
}
}
crate::runner::modes::common_util::resolve::set_last_text_merge_line_spans(spans);
Ok(normalize_text_for_inline(&merged))
}
@ -887,7 +1006,11 @@ fn normalize_text_for_inline(s: &str) -> String {
let mut j = i + 1;
while j < bytes.len() {
let c = bytes[j];
if c == b' ' || c == b'\t' || c == b'\n' { j += 1; } else { break; }
if c == b' ' || c == b'\t' || c == b'\n' {
j += 1;
} else {
break;
}
}
if j < bytes.len() && bytes[j] == b'}' {
// drop ';' (do not advance j here)
@ -900,6 +1023,8 @@ fn normalize_text_for_inline(s: &str) -> String {
}
out = tmp;
}
if !out.ends_with('\n') { out.push('\n'); }
if !out.ends_with('\n') {
out.push('\n');
}
out
}

View File

@ -1,5 +1,5 @@
//! Using Resolution Box - 綺麗綺麗なusing文解決専門家📦
//!
//!
//! 巨大な `collect_using_and_strip` 関数を箱に分解して、
//! 責務を明確にしてテストしやすくするにゃ!
@ -8,7 +8,7 @@ use std::collections::HashMap;
use std::path::{Path, PathBuf};
/// 📦 UsingResolutionBox - using文解決の専門家
///
///
/// using文の解析、パス解決、重複チェックを一手に引き受ける箱にゃ
pub struct UsingResolutionBox<'a> {
runner: &'a NyashRunner,
@ -52,9 +52,9 @@ impl<'a> UsingResolutionBox<'a> {
|| crate::config::env::env_bool("NYASH_RESOLVE_TRACE"),
allow_file_using: crate::config::env::allow_using_file(),
};
let ctx_dir = Path::new(filename).parent().map(|p| p.to_path_buf());
// ファイルがパッケージ内にあるかチェック
let filename_canon = std::fs::canonicalize(filename).ok();
let mut inside_pkg = false;
@ -89,11 +89,11 @@ impl<'a> UsingResolutionBox<'a> {
}
crate::cli_v!("[using] stripped line: {}", line);
let rest0 = t.strip_prefix("using ").unwrap().trim();
let rest0 = rest0.split('#').next().unwrap_or(rest0).trim();
let rest0 = rest0.strip_suffix(';').unwrap_or(rest0).trim();
let (target, alias) = if let Some(pos) = rest0.find(" as ") {
(
rest0[..pos].trim().to_string(),
@ -105,16 +105,21 @@ impl<'a> UsingResolutionBox<'a> {
let target_unquoted = target.trim_matches('"').to_string();
let using_ctx = self.runner.init_using_context();
// 既知のエイリアスかモジュールかチェック
let is_known_alias_or_module = using_ctx.aliases.contains_key(&target_unquoted)
|| using_ctx.pending_modules.iter().any(|(k, _)| k == &target_unquoted)
|| using_ctx
.pending_modules
.iter()
.any(|(k, _)| k == &target_unquoted)
|| using_ctx.packages.contains_key(&target_unquoted);
let is_path = if is_known_alias_or_module {
false
} else {
crate::runner::modes::common_util::resolve::path_util::is_using_target_path_unquoted(&target_unquoted)
crate::runner::modes::common_util::resolve::path_util::is_using_target_path_unquoted(
&target_unquoted,
)
};
Some(UsingTarget {
@ -145,7 +150,7 @@ impl<'a> UsingResolutionBox<'a> {
let path = target.target.trim_matches('"').to_string();
let mut p = PathBuf::from(&path);
// 相対パス解決
if p.is_relative() {
if let Some(dir) = &self.ctx_dir {
@ -154,7 +159,7 @@ impl<'a> UsingResolutionBox<'a> {
p = cand;
}
}
// NYASH_ROOTも試す
if p.is_relative() {
if let Ok(root) = std::env::var("NYASH_ROOT") {
@ -172,9 +177,13 @@ impl<'a> UsingResolutionBox<'a> {
}
/// 🛡️ 重複チェックするにゃ!
pub fn check_duplicates(&mut self, target: &UsingTarget, resolved_path: &str) -> Result<(), String> {
let canon_path = std::fs::canonicalize(resolved_path)
.unwrap_or_else(|_| PathBuf::from(resolved_path));
pub fn check_duplicates(
&mut self,
target: &UsingTarget,
resolved_path: &str,
) -> Result<(), String> {
let canon_path =
std::fs::canonicalize(resolved_path).unwrap_or_else(|_| PathBuf::from(resolved_path));
let canon_str = canon_path.to_string_lossy();
// パスの重複チェック
@ -204,10 +213,14 @@ impl<'a> UsingResolutionBox<'a> {
// 記録
let alias_label = target.alias.as_ref().unwrap_or(&target.target).clone();
self.seen_paths.insert(canon_str.to_string(), (alias_label.clone(), target.line_no));
self.seen_paths
.insert(canon_str.to_string(), (alias_label.clone(), target.line_no));
if let Some(ref alias_name) = target.alias {
self.seen_aliases.insert(alias_name.clone(), (resolved_path.to_string(), target.line_no));
self.seen_aliases.insert(
alias_name.clone(),
(resolved_path.to_string(), target.line_no),
);
}
Ok(())

View File

@ -37,7 +37,10 @@ pub fn run_ny_program_capture_json(
}
};
if out.timed_out {
let head = String::from_utf8_lossy(&out.stdout).chars().take(200).collect::<String>();
let head = String::from_utf8_lossy(&out.stdout)
.chars()
.take(200)
.collect::<String>();
eprintln!(
"[selfhost-child] timeout after {} ms; stdout(head)='{}'",
timeout_ms,

View File

@ -32,12 +32,18 @@ pub fn run_pyvm_module(module: &MirModule, label: &str) -> Option<i32> {
let tmp_dir = Path::new("tmp");
let _ = std::fs::create_dir_all(tmp_dir);
let mir_json_path = tmp_dir.join("nyash_pyvm_mir.json");
if let Err(e) = crate::runner::mir_json_emit::emit_mir_json_for_harness_bin(module, &mir_json_path) {
if let Err(e) =
crate::runner::mir_json_emit::emit_mir_json_for_harness_bin(module, &mir_json_path)
{
eprintln!("❌ PyVM MIR JSON emit error: {}", e);
return None;
}
if crate::config::env::cli_verbose() {
eprintln!("[Bridge] using PyVM ({}) → {}", label, mir_json_path.display());
eprintln!(
"[Bridge] using PyVM ({}) → {}",
label,
mir_json_path.display()
);
}
// Select entry (prefer Main.main; top-level main only if allowed)
let allow_top = crate::config::env::entry_allow_toplevel_main();

View File

@ -4,4 +4,3 @@
pub mod child;
pub mod json;

View File

@ -97,13 +97,19 @@ pub fn exe_try_parse_json_v0(filename: &str, timeout_ms: u64) -> Option<crate::m
);
return None;
}
let stdout = match String::from_utf8(out_buf) { Ok(s) => s, Err(_) => String::new() };
let stdout = match String::from_utf8(out_buf) {
Ok(s) => s,
Err(_) => String::new(),
};
let json_line = crate::runner::modes::common_util::selfhost::json::first_json_v0_line(&stdout)
.unwrap_or_default();
if json_line.is_empty() {
if crate::config::env::cli_verbose() {
let head: String = stdout.chars().take(200).collect();
let errh: String = String::from_utf8_lossy(&err_buf).chars().take(200).collect();
let errh: String = String::from_utf8_lossy(&err_buf)
.chars()
.take(200)
.collect();
crate::cli_v!(
"[ny-compiler] exe produced no JSON; stdout(head)='{}' stderr(head)='{}'",
head.replace('\n', "\\n"),