phase-20.45: PRIMARY no-fallback reps + MIR v0 shape fixes

- Fix MIR v0 shape in lowers: functions[] + name="main" + blocks.id
  * lower_return_int_box.hako
  * lower_return_binop_box.hako
- runner_min: adopt LowerReturnBinOpBox before ReturnInt
- Add PRIMARY no-fallback canaries (all PASS):
  * return-binop / array-size / load-store / return-logical (OR)
- Fix phase2043 runner_min canary alias (Runner -> BuilderRunnerMinBox)
- Update docs: phase-20.45 README (PRIMARY reps), CURRENT_TASK progress

Ancillary: keep builder/provider/canary files in sync; no unrelated behavior changes.
This commit is contained in:
nyash-codex
2025-11-05 18:57:03 +09:00
parent 0996090d6d
commit 96ea3892af
119 changed files with 4746 additions and 316 deletions

View File

@ -0,0 +1,48 @@
/*!
* Hako-like source detection and minimal normalization helpers.
*
* - looks_like_hako_code: heuristics to detect Hako surface in Nyash path
* - strip_local_decl: drop leading `local ` at line head for Nyash parser compatibility
* - fail_fast_on_hako: env-gated policy (default ON) to fail fast on Hako-like source in Nyash VM path
*/
/// Heuristic detection of Hako-like source (development-only convenience)
pub fn looks_like_hako_code(s: &str) -> bool {
s.contains("using selfhost.")
|| s.contains("using hakorune.")
|| s.lines().any(|l| l.trim_start().starts_with("local "))
}
/// Remove leading `local ` declarations at line head to keep Nyash parser stable
pub fn strip_local_decl(s: &str) -> String {
let mut out = String::with_capacity(s.len());
for line in s.lines() {
let leading = line.len() - line.trim_start().len();
let (indent, rest) = line.split_at(leading);
if rest.starts_with("local ") || rest.starts_with("local\t") {
let bytes = rest.as_bytes();
let mut i = 5; // after 'local'
while i < bytes.len() && (bytes[i] == b' ' || bytes[i] == b'\t') {
i += 1;
break;
}
out.push_str(indent);
out.push_str(&rest[i..]);
out.push('\n');
} else {
out.push_str(line);
out.push('\n');
}
}
out
}
/// Policy toggle: fail fast when Hako-like code enters Nyash VM path
/// Default: ON (true)
pub fn fail_fast_on_hako() -> bool {
match std::env::var("HAKO_FAIL_FAST_ON_HAKO_IN_NYASH_VM").ok().as_deref() {
Some("0") | Some("false") | Some("off") => false,
_ => true,
}
}

View File

@ -11,3 +11,4 @@ pub mod selfhost;
pub mod resolve;
pub mod exec;
pub mod core_bridge;
pub mod hako;

View File

@ -359,11 +359,12 @@ pub fn resolve_prelude_paths_profiled(
) -> Result<(String, Vec<String>), String> {
// First pass: strip using from the main source and collect direct prelude paths
let (cleaned, direct) = collect_using_and_strip(runner, code, filename)?;
// When AST using is enabled、recursively collect nested preludes in DFS order
// Recursively collect nested preludes (DFS) for both AST/text merges.
// Rationale: even when we merge via text, nested `using` inside preludes
// must be discovered so that their definitions are present at runtime
// (e.g., runner_min -> lower_* boxes). Previously this only ran when
// NYASH_USING_AST=1, which caused unresolved calls in inline flows.
let ast_on = std::env::var("NYASH_USING_AST").ok().as_deref() == Some("1");
if !ast_on {
return Ok((cleaned, direct));
}
let mut out: Vec<String> = Vec::new();
let mut seen: std::collections::HashSet<String> = std::collections::HashSet::new();
fn normalize_path(path: &str) -> (String, String) {
@ -451,6 +452,9 @@ pub fn resolve_prelude_paths_profiled(
}
}
}
// If AST merge is disabled, still return the discovered nested prelude list
// so that the text merger can inline all dependencies. This keeps behavior
// consistent across strategies and fixes nested `using` resolution.
Ok((cleaned, out))
}
@ -659,6 +663,40 @@ pub fn merge_prelude_text(
// First pass: collect and resolve prelude paths
let (cleaned_main, prelude_paths) = resolve_prelude_paths_profiled(runner, source, filename)?;
// Expand nested preludes for text-merge too (DFS) so that any `using`
// inside prelude files (e.g., runner_min -> lower_* boxes) are also
// included even when NYASH_USING_AST is OFF.
let mut expanded: Vec<String> = Vec::new();
let mut seen: std::collections::HashSet<String> = std::collections::HashSet::new();
fn canonize(p: &str) -> String {
std::fs::canonicalize(p)
.ok()
.map(|pb| pb.to_string_lossy().to_string())
.unwrap_or_else(|| p.to_string())
}
fn dfs_text(
runner: &NyashRunner,
path: &str,
out: &mut Vec<String>,
seen: &mut std::collections::HashSet<String>,
) -> Result<(), String> {
let key = canonize(path);
if !seen.insert(key.clone()) {
return Ok(());
}
let src = std::fs::read_to_string(path)
.map_err(|e| format!("using: failed to read '{}': {}", path, e))?;
let (_cleaned, nested) = collect_using_and_strip(runner, &src, path)?;
for n in nested.iter() {
dfs_text(runner, n, out, seen)?;
}
out.push(key);
Ok(())
}
for p in prelude_paths.iter() {
dfs_text(runner, p, &mut expanded, &mut seen)?;
}
let prelude_paths = &expanded;
if prelude_paths.is_empty() {
// No using statements, return original