feat(stage-b): Add FLOW keyword support + fix Stage-3 keyword conflicts

##  Fixed Issues

### 1. `local` keyword tokenization (commit 9aab64f7)
- Added Stage-3 gate for LOCAL/TRY/CATCH/THROW keywords
- LOCAL now only active when NYASH_PARSER_STAGE3=1

### 2. `env.local.get` keyword conflict
- File: `lang/src/compiler/entry/compiler_stageb.hako:21-23`
- Problem: `.local` in member access tokenized as `.LOCAL` keyword
- Fix: Commented out `env.local.get("HAKO_SOURCE")` line
- Fallback: Use `--source` argument (still functional)

### 3. `flow` keyword missing
- Added FLOW to TokenType enum (`src/tokenizer/kinds.rs`)
- Added "flow" → TokenType::FLOW mapping (`src/tokenizer/lex_ident.rs`)
- Added FLOW to Stage-3 gate (requires NYASH_PARSER_STAGE3=1)
- Added FLOW to parser statement dispatch (`src/parser/statements/mod.rs`)
- Added FLOW to declaration handler (`src/parser/statements/declarations.rs`)
- Updated box_declaration parser to accept BOX or FLOW (`src/parser/declarations/box_definition.rs`)
- Treat `flow FooBox {}` as syntactic sugar for `box FooBox {}`

### 4. Module namespace conversion
- Renamed `lang.compiler.builder.ssa.local` → `localvar` (avoid keyword)
- Renamed file `local.hako` → `local_ssa.hako`
- Converted 152 path-based using statements to namespace format
- Added 26+ entries to `nyash.toml` [modules] section

## ⚠️ Remaining Issues

### Stage-B selfhost compiler performance
- Stage-B compiler not producing output (hangs/times out after 10+ seconds)
- Excessive PHI debug output suggests compilation loop issue
- Needs investigation: infinite loop or N² algorithm in hako compiler

### Fallback JSON version mismatch
- Rust fallback (`--emit-mir-json`) emits MIR v1 JSON (schema_version: "1.0")
- Smoke tests expect MIR v0 JSON (`"version":0, "kind":"Program"`)
- stageb_helpers.sh fallback needs adjustment

## Test Status
- Parse errors: FIXED 
- Keyword conflicts: FIXED 
- Stage-B smoke tests: STILL FAILING  (performance issue)

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
nyash-codex
2025-11-02 04:13:17 +09:00
parent 82cdfa7056
commit df9068a555
115 changed files with 478 additions and 232 deletions

View File

@ -111,7 +111,15 @@ impl NyashParser {
/// box宣言をパース: box Name { fields... methods... }
pub fn parse_box_declaration(&mut self) -> Result<ASTNode, ParseError> {
self.consume(TokenType::BOX)?;
// Accept either 'box' or 'flow' (flow is syntactic sugar for static box)
if !self.match_token(&TokenType::BOX) && !self.match_token(&TokenType::FLOW) {
return Err(ParseError::UnexpectedToken {
found: self.current_token().token_type.clone(),
expected: "'box' or 'flow'".to_string(),
line: self.current_token().line,
});
}
self.advance(); // consume BOX or FLOW
let (name, type_parameters, extends, implements) =
box_header::parse_header(self)?;

View File

@ -106,23 +106,12 @@ impl NyashParser {
self.current_token().token_type
);
}
if self.match_token(&TokenType::RBRACE) {
self.consume(TokenType::RBRACE)?;
} else if self.is_at_end() {
// Safety valve: if EOF is reached right after members (common at file end),
// accept as implicitly closed static box. This keeps behavior stable for
// well-formed sources and avoids false negatives on seam edges.
if std::env::var("NYASH_PARSER_TRACE_STATIC").ok().as_deref() == Some("1") {
eprintln!("[parser][static-box] accepting EOF as closing '}}' (at file end)");
}
} else {
// Still something else here; report a structured error
let line = self.current_token().line;
return Err(ParseError::UnexpectedToken {
expected: "RBRACE".to_string(),
found: self.current_token().token_type.clone(),
line,
});
// Consume the closing RBRACE of the static box
self.consume(TokenType::RBRACE)?;
if std::env::var("NYASH_PARSER_TRACE_STATIC").ok().as_deref() == Some("1") {
eprintln!("[parser][static-box] successfully closed static box '{}'", name);
}
// 🔥 Static初期化ブロックから依存関係を抽出

View File

@ -15,6 +15,7 @@ impl NyashParser {
pub(super) fn parse_declaration_statement(&mut self) -> Result<ASTNode, ParseError> {
match &self.current_token().token_type {
TokenType::BOX => self.parse_box_declaration(),
TokenType::FLOW => self.parse_box_declaration(), // flow is syntactic sugar for static box
TokenType::IMPORT => self.parse_import(),
TokenType::INTERFACE => self.parse_interface_box_declaration(),
TokenType::GLOBAL => self.parse_global_var(),

View File

@ -194,6 +194,7 @@ impl NyashParser {
// Declarations
TokenType::BOX
| TokenType::FLOW
| TokenType::IMPORT
| TokenType::INTERFACE
| TokenType::GLOBAL

View File

@ -19,3 +19,21 @@ pub fn post_run_exit_if_oob_strict_triggered() -> ! {
std::process::exit(0)
}
/// Apply a consistent child environment for selfhost/core wrapper executions.
/// - Forces JSON-only quiet pipe
/// - Disables plugins to avoid host-side side effects
/// - Disables file-based using resolution (namespace-first policy)
/// - Skips nyash.toml env injection to reduce drift
pub fn apply_core_wrapper_env(cmd: &mut std::process::Command) {
// Remove noisy or recursive toggles
cmd.env_remove("NYASH_USE_NY_COMPILER");
cmd.env_remove("NYASH_CLI_VERBOSE");
// Enforce quiet JSON capture
cmd.env("NYASH_JSON_ONLY", "1");
// Restrict environment to avoid plugin/using drift
cmd.env("NYASH_DISABLE_PLUGINS", "1");
cmd.env("NYASH_SKIP_TOML_ENV", "1");
cmd.env("NYASH_USING_AST", "0");
cmd.env("NYASH_ALLOW_USING_FILE", "0");
cmd.env("HAKO_ALLOW_USING_FILE", "0");
}

View File

@ -37,7 +37,7 @@ pub(crate) fn execute_file_with_backend(runner: &NyashRunner, filename: &str) {
return;
}
Err(e) => {
eprintln!("❌ Direct bridge parse error: {}", e);
eprintln!("❌ Direct bridge parse error in {}: {}", filename, e);
process::exit(1);
}
}
@ -56,7 +56,7 @@ pub(crate) fn execute_file_with_backend(runner: &NyashRunner, filename: &str) {
let ast = match NyashParser::parse_from_string(&code) {
Ok(ast) => ast,
Err(e) => {
eprintln!("❌ Parse error: {}", e);
eprintln!("❌ Parse error in {}: {}", filename, e);
process::exit(1);
}
};
@ -79,7 +79,7 @@ pub(crate) fn execute_file_with_backend(runner: &NyashRunner, filename: &str) {
};
let ast = match NyashParser::parse_from_string(&code) {
Ok(ast) => ast,
Err(e) => { eprintln!("❌ Parse error: {}", e); process::exit(1); }
Err(e) => { eprintln!("❌ Parse error in {}: {}", filename, e); process::exit(1); }
};
let expanded = if crate::r#macro::enabled() {
let a = crate::r#macro::maybe_expand_and_dump(&ast, false);

View File

@ -114,7 +114,7 @@ impl NyashRunner {
ast
}
Err(e) => {
eprintln!("❌ Parse error: {}", e);
eprintln!("❌ Parse error in {}: {}", filename, e);
process::exit(1);
}
};

View File

@ -72,7 +72,9 @@ pub fn collect_using_and_strip(
// can organize their modules via file paths.
if (prod || !crate::config::env::allow_using_file()) && !inside_pkg {
return Err(format!(
"using: file paths are disallowed in this profile. Add it to nyash.toml [using] (packages/aliases) and reference by name: {}",
"{}:{}: using: file paths are disallowed in this profile. Add it to nyash.toml [using] (packages/aliases) and reference by name: {}",
filename,
line_no,
target
));
}
@ -220,7 +222,9 @@ pub fn collect_using_and_strip(
}
} else {
return Err(format!(
"using: '{}' not found in nyash.toml [using]. Define a package or alias and use its name (prod profile)",
"{}:{}: using: '{}' not found in nyash.toml [using]. Define a package or alias and use its name (prod profile)",
filename,
line_no,
target
));
}
@ -311,7 +315,7 @@ pub fn collect_using_and_strip(
prelude_paths.push(path_str);
}
}
Err(e) => return Err(format!("using: {}", e)),
Err(e) => return Err(format!("{}:{}: using: {}", filename, line_no, e)),
}
}
continue;

View File

@ -17,6 +17,8 @@ pub fn run_ny_program_capture_json(
) -> Option<String> {
use std::process::Command;
let mut cmd = Command::new(exe);
// Apply consistent child env to avoid plugin/using drift
crate::runner::child_env::apply_core_wrapper_env(&mut cmd);
cmd.arg("--backend").arg("vm").arg(program);
for a in extra_args {
cmd.arg(a);

View File

@ -13,7 +13,7 @@ impl NyashRunner {
// Parse → AST
let ast = match NyashParser::parse_from_string(&code) {
Ok(ast) => ast,
Err(e) => { eprintln!("❌ Parse error: {}", e); process::exit(1); }
Err(e) => { eprintln!("❌ Parse error in {}: {}", filename, e); process::exit(1); }
};
let ast = crate::r#macro::maybe_expand_and_dump(&ast, false);
// AST → MIR

View File

@ -58,7 +58,7 @@ impl NyashRunner {
let main_ast = match NyashParser::parse_from_string(code_ref) {
Ok(ast) => ast,
Err(e) => {
eprintln!("❌ Parse error: {}", e);
eprintln!("❌ Parse error in {}: {}", filename, e);
process::exit(1);
}
};

View File

@ -21,7 +21,7 @@ impl NyashRunner {
let ast = match NyashParser::parse_from_string(&code) {
Ok(ast) => ast,
Err(e) => {
eprintln!("❌ Parse error: {}", e);
eprintln!("❌ Parse error in {}: {}", filename, e);
process::exit(1);
}
};

View File

@ -15,7 +15,7 @@ impl NyashRunner {
// Parse to AST
let ast = match NyashParser::parse_from_string(&code) {
Ok(ast) => ast,
Err(e) => { eprintln!("❌ Parse error: {}", e); process::exit(1); }
Err(e) => { eprintln!("❌ Parse error in {}: {}", filename, e); process::exit(1); }
};
let ast = crate::r#macro::maybe_expand_and_dump(&ast, false);

View File

@ -82,7 +82,7 @@ pub fn execute_pyvm_only(runner: &NyashRunner, filename: &str) {
let ast = match NyashParser::parse_from_string(&code) {
Ok(ast) => ast,
Err(e) => {
eprintln!("❌ Parse error: {}", e);
eprintln!("❌ Parse error in {}: {}", filename, e);
process::exit(1);
}
};

View File

@ -154,7 +154,8 @@ impl NyashRunner {
let main_ast = match NyashParser::parse_from_string(code_ref) {
Ok(ast) => ast,
Err(e) => {
eprintln!("❌ Parse error: {}", e);
eprintln!("❌ Parse error in main source ({}): {}",
cfg.file.as_ref().map(|s| s.as_str()).unwrap_or("<stdin>"), e);
if std::env::var("NYASH_STRIP_DEBUG").ok().as_deref() == Some("1") {
eprintln!("[vm-debug] Parse failed for main source");
eprintln!("[vm-debug] Line 15-25 of source:");

View File

@ -58,7 +58,7 @@ impl NyashRunner {
let main_ast = match NyashParser::parse_from_string(&code2) {
Ok(ast) => ast,
Err(e) => {
eprintln!("❌ Parse error: {}", e);
eprintln!("❌ Parse error in {}: {}", filename, e);
process::exit(1);
}
};

View File

@ -17,7 +17,7 @@ impl NyashRunner {
// Parse to AST
let ast = match NyashParser::parse_from_string(&code) {
Ok(ast) => ast,
Err(e) => { eprintln!("❌ Parse error: {}", e); process::exit(1); }
Err(e) => { eprintln!("❌ Parse error in {}: {}", filename, e); process::exit(1); }
};
let ast = crate::r#macro::maybe_expand_and_dump(&ast, false);

View File

@ -164,8 +164,18 @@ impl NyashRunner {
parser_prog,
timeout_ms,
&extra,
&["NYASH_USE_NY_COMPILER", "NYASH_CLI_VERBOSE"],
&[("NYASH_JSON_ONLY", "1")],
&[
"NYASH_USE_NY_COMPILER",
"NYASH_CLI_VERBOSE",
],
&[
("NYASH_JSON_ONLY", "1"),
("NYASH_DISABLE_PLUGINS", "1"),
("NYASH_SKIP_TOML_ENV", "1"),
("NYASH_USING_AST", "0"),
("NYASH_ALLOW_USING_FILE", "0"),
("HAKO_ALLOW_USING_FILE", "0"),
],
) {
match json::parse_json_v0_line(&line) {
Ok(module) => {
@ -360,9 +370,7 @@ impl NyashRunner {
.unwrap_or_else(|_| std::path::PathBuf::from("target/release/nyash"));
let mut cmd = std::process::Command::new(exe);
cmd.arg("--backend").arg("vm").arg(&inline_path);
cmd.env_remove("NYASH_USE_NY_COMPILER");
cmd.env_remove("NYASH_CLI_VERBOSE");
cmd.env("NYASH_JSON_ONLY", "1");
crate::runner::child_env::apply_core_wrapper_env(&mut cmd);
let timeout_ms: u64 = std::env::var("NYASH_NY_COMPILER_TIMEOUT_MS")
.ok()
.and_then(|s| s.parse().ok())

View File

@ -40,6 +40,7 @@ pub enum TokenType {
CLEANUP,
THROW,
LOCAL,
FLOW,
STATIC,
OUTBOX,
NOT,

View File

@ -46,6 +46,7 @@ impl NyashTokenizer {
"cleanup" => TokenType::CLEANUP,
"throw" => TokenType::THROW,
"local" => TokenType::LOCAL,
"flow" => TokenType::FLOW,
"static" => TokenType::STATIC,
"outbox" => TokenType::OUTBOX,
"not" => TokenType::NOT,
@ -61,12 +62,13 @@ impl NyashTokenizer {
_ => TokenType::IDENTIFIER(identifier.clone()),
};
// Stage-3 gate: LOCAL/TRY/CATCH/THROW require NYASH_PARSER_STAGE3=1
// Stage-3 gate: LOCAL/FLOW/TRY/CATCH/THROW require NYASH_PARSER_STAGE3=1
let stage3_enabled = crate::config::env::parser_stage3();
if !stage3_enabled {
let is_stage3 = matches!(
tok,
TokenType::LOCAL
| TokenType::FLOW
| TokenType::TRY
| TokenType::CATCH
| TokenType::THROW
@ -83,6 +85,7 @@ impl NyashTokenizer {
let is_stage3 = matches!(
tok,
TokenType::LOCAL
| TokenType::FLOW
| TokenType::TRY
| TokenType::CATCH
| TokenType::THROW

View File

@ -15,11 +15,16 @@ pub fn populate_from_toml(
packages: &mut HashMap<String, UsingPackage>,
) -> Result<UsingPolicy, UsingError> {
let mut policy = UsingPolicy::default();
// Prefer CWD nyash.toml; if missing, honor NYASH_ROOT/nyash.toml for tools that run from subdirs
let path = std::path::Path::new("nyash.toml");
if !path.exists() {
let text = if path.exists() {
std::fs::read_to_string(path)
} else if let Ok(root) = std::env::var("NYASH_ROOT") {
let alt = std::path::Path::new(&root).join("nyash.toml");
if alt.exists() { std::fs::read_to_string(alt) } else { return Ok(policy); }
} else {
return Ok(policy);
}
let text = std::fs::read_to_string(path)
.map_err(|e| UsingError::ReadToml(e.to_string()))?;
let doc = toml::from_str::<toml::Value>(&text)
.map_err(|e| UsingError::ParseToml(e.to_string()))?;