runner/env: centralize CLI/env getters; parser expr split (call/primary); verifier utils direct; optimizer: boxfield peephole; LLVM: branch cond normalize hook; add trace macro scaffolding; refactor common.rs verbose checks
This commit is contained in:
@ -23,6 +23,12 @@ What Changed (today)
|
|||||||
- 環境変数アクセスを `config::env` に集約(`mir_no_phi()`/`verify_allow_no_phi()`/`llvm_use_harness()`)。
|
- 環境変数アクセスを `config::env` に集約(`mir_no_phi()`/`verify_allow_no_phi()`/`llvm_use_harness()`)。
|
||||||
- dev プロファイル `tools/dev_env.sh phi_off` を追加。ルート清掃ユーティリティ `tools/clean_root_artifacts.sh` を追加。
|
- dev プロファイル `tools/dev_env.sh phi_off` を追加。ルート清掃ユーティリティ `tools/clean_root_artifacts.sh` を追加。
|
||||||
- CI(GH Actions)を curated LLVM(PHI‑on/off)実行に刷新。旧JITジョブは停止。
|
- CI(GH Actions)を curated LLVM(PHI‑on/off)実行に刷新。旧JITジョブは停止。
|
||||||
|
- Verifier: verification.rs 内の compute_* ラッパーを撤去し、全て `verification::utils::*` を直参照に置換。
|
||||||
|
- Parser: `bit_or/xor/and` と `equality/comparison/range/term/shift/factor` に加え、`call/primary` も `parser/expr/` へ分割。`expressions.rs` は委譲ラッパーのみに縮退(互換維持)。
|
||||||
|
- Optimizer(BoxField): 同一ブロック内の set 直後の get(同一 box+index)を Copy に置換する軽量 peephole を追加(load-after-store 短絡)。
|
||||||
|
- LLVM(select/terminators): `function.rs` から `instructions::term_emit_*` を利用しつつ、`normalize_branch_condition()` をブランチ直前で適用する流れを固定化(truthy 正規化の前段フック)。
|
||||||
|
- Runner/env 集約: `src/config/env.rs` に CLI/自ホスト/VM まわりの getter を追加(`cli_verbose()/enable_using()/vm_use_py()/ny_compiler_*()` など)。`runner/selfhost.rs`/`runner/pipe_io.rs`/`runner/modes/common.rs` のホットパスを getter 参照に更新(段階導入)。
|
||||||
|
- VM dispatch: 実装は既に dispatch 中央化済み(`backend::dispatch` 経由)。`NYASH_VM_USE_DISPATCH` フラグの getter を追加(将来の選択切替用)。
|
||||||
|
|
||||||
Refactor Progress (2025‑09‑16, end of day)
|
Refactor Progress (2025‑09‑16, end of day)
|
||||||
- Runner: ヘッダ指令スキャンとトレース出力を分離(`runner/cli_directives.rs`, `runner/trace.rs`)。using 解決ログを集約。
|
- Runner: ヘッダ指令スキャンとトレース出力を分離(`runner/cli_directives.rs`, `runner/trace.rs`)。using 解決ログを集約。
|
||||||
|
|||||||
@ -7,10 +7,22 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
use crate::mir::{function::MirFunction, ValueId};
|
use crate::mir::{function::MirFunction, ValueId};
|
||||||
|
use crate::mir::MirType;
|
||||||
|
use super::super::types; // access mapping helpers if needed later
|
||||||
|
|
||||||
/// Normalize a branch condition if needed (scaffolding).
|
/// Normalize a branch condition if needed (scaffolding).
|
||||||
/// Currently returns the input unchanged; provides a single place
|
/// Currently returns the input unchanged; provides a single place
|
||||||
/// to adjust semantics later (e.g., truthy rules, short-circuit pre-pass).
|
/// to adjust semantics later (e.g., truthy rules, short-circuit pre-pass).
|
||||||
pub(crate) fn normalize_branch_condition(_func: &MirFunction, cond: &ValueId) -> ValueId {
|
pub(crate) fn normalize_branch_condition(func: &MirFunction, cond: &ValueId) -> ValueId {
|
||||||
|
// Minimal truthy normalization hook.
|
||||||
|
// Strategy (no new instructions here):
|
||||||
|
// - If we have a recorded type for `cond` and it is a boolean-like i1/i64 (0/1), return as-is.
|
||||||
|
// - Otherwise, return the original cond and let flow/emit handle `!= 0` lowering as today.
|
||||||
|
if let Some(ty) = func.metadata.value_types.get(cond) {
|
||||||
|
match ty {
|
||||||
|
MirType::I1 | MirType::I64 | MirType::Bool => { return *cond; }
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
*cond
|
*cond
|
||||||
}
|
}
|
||||||
|
|||||||
@ -166,3 +166,22 @@ pub fn vm_pic_threshold() -> u32 {
|
|||||||
|
|
||||||
/// Route VM ExternCall via name→slot handlers when available
|
/// Route VM ExternCall via name→slot handlers when available
|
||||||
pub fn extern_route_slots() -> bool { std::env::var("NYASH_EXTERN_ROUTE_SLOTS").ok().as_deref() == Some("1") }
|
pub fn extern_route_slots() -> bool { std::env::var("NYASH_EXTERN_ROUTE_SLOTS").ok().as_deref() == Some("1") }
|
||||||
|
|
||||||
|
// ---- Runner/CLI common toggles (hot-path centralization)
|
||||||
|
pub fn cli_verbose() -> bool { std::env::var("NYASH_CLI_VERBOSE").ok().as_deref() == Some("1") }
|
||||||
|
pub fn enable_using() -> bool { std::env::var("NYASH_ENABLE_USING").ok().as_deref() == Some("1") }
|
||||||
|
pub fn vm_use_py() -> bool { std::env::var("NYASH_VM_USE_PY").ok().as_deref() == Some("1") }
|
||||||
|
pub fn pipe_use_pyvm() -> bool { std::env::var("NYASH_PIPE_USE_PYVM").ok().as_deref() == Some("1") }
|
||||||
|
pub fn vm_use_dispatch() -> bool { std::env::var("NYASH_VM_USE_DISPATCH").ok().as_deref() == Some("1") }
|
||||||
|
|
||||||
|
// Self-host compiler knobs
|
||||||
|
pub fn ny_compiler_timeout_ms() -> u64 { std::env::var("NYASH_NY_COMPILER_TIMEOUT_MS").ok().and_then(|s| s.parse().ok()).unwrap_or(2000) }
|
||||||
|
pub fn ny_compiler_emit_only() -> bool { std::env::var("NYASH_NY_COMPILER_EMIT_ONLY").unwrap_or_else(|_| "1".to_string()) == "1" }
|
||||||
|
pub fn ny_compiler_skip_py() -> bool { std::env::var("NYASH_NY_COMPILER_SKIP_PY").ok().as_deref() == Some("1") }
|
||||||
|
pub fn use_ny_compiler_exe() -> bool { std::env::var("NYASH_USE_NY_COMPILER_EXE").ok().as_deref() == Some("1") }
|
||||||
|
pub fn ny_compiler_exe_path() -> Option<String> { std::env::var("NYASH_NY_COMPILER_EXE_PATH").ok() }
|
||||||
|
pub fn ny_compiler_min_json() -> bool { std::env::var("NYASH_NY_COMPILER_MIN_JSON").ok().as_deref() == Some("1") }
|
||||||
|
pub fn selfhost_read_tmp() -> bool { std::env::var("NYASH_SELFHOST_READ_TMP").ok().as_deref() == Some("1") }
|
||||||
|
pub fn ny_compiler_stage3() -> bool { std::env::var("NYASH_NY_COMPILER_STAGE3").ok().as_deref() == Some("1") }
|
||||||
|
pub fn ny_compiler_child_args() -> Option<String> { std::env::var("NYASH_NY_COMPILER_CHILD_ARGS").ok() }
|
||||||
|
pub fn ny_compiler_use_tmp_only() -> bool { std::env::var("NYASH_NY_COMPILER_USE_TMP_ONLY").ok().as_deref() == Some("1") }
|
||||||
|
|||||||
@ -1,16 +1,46 @@
|
|||||||
use crate::mir::MirModule;
|
use crate::mir::{MirModule, MirInstruction as I, EffectMask};
|
||||||
use crate::mir::optimizer::MirOptimizer;
|
use crate::mir::optimizer::MirOptimizer;
|
||||||
use crate::mir::optimizer_stats::OptimizationStats;
|
use crate::mir::optimizer_stats::OptimizationStats;
|
||||||
|
|
||||||
/// Optimize BoxField operations (scaffolding)
|
/// Optimize BoxField operations (scaffolding)
|
||||||
pub fn optimize_boxfield_operations(opt: &mut MirOptimizer, module: &mut MirModule) -> OptimizationStats {
|
pub fn optimize_boxfield_operations(opt: &mut MirOptimizer, module: &mut MirModule) -> OptimizationStats {
|
||||||
let mut stats = OptimizationStats::new();
|
let mut stats = OptimizationStats::new();
|
||||||
for (func_name, _function) in &mut module.functions {
|
for (func_name, function) in &mut module.functions {
|
||||||
if opt.debug_enabled() {
|
if opt.debug_enabled() { println!(" 📦 BoxField optimization in function: {}", func_name); }
|
||||||
println!(" 📦 BoxField optimization in function: {}", func_name);
|
for (_bb_id, block) in &mut function.blocks {
|
||||||
|
let mut changed = 0usize;
|
||||||
|
let mut out: Vec<I> = Vec::with_capacity(block.instructions.len());
|
||||||
|
let mut i = 0usize;
|
||||||
|
while i < block.instructions.len() {
|
||||||
|
// Look ahead for simple store-followed-by-load on same box/index
|
||||||
|
if i + 1 < block.instructions.len() {
|
||||||
|
match (&block.instructions[i], &block.instructions[i + 1]) {
|
||||||
|
(
|
||||||
|
I::BoxCall { box_val: b1, method: m1, args: a1, .. },
|
||||||
|
I::BoxCall { dst: Some(dst2), box_val: b2, method: m2, args: a2, .. },
|
||||||
|
) if (m1 == "set" || m1 == "setField") && (m2 == "get" || m2 == "getField") => {
|
||||||
|
// set(arg0=index/key, arg1=value), then get(arg0=index/key)
|
||||||
|
if b1 == b2 && a1.len() == 2 && a2.len() == 1 && a1[0] == a2[0] {
|
||||||
|
// Replace the second with Copy from just-stored value
|
||||||
|
let src_val = a1[1];
|
||||||
|
out.push(block.instructions[i].clone());
|
||||||
|
out.push(I::Copy { dst: *dst2, src: src_val });
|
||||||
|
changed += 1;
|
||||||
|
i += 2;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
out.push(block.instructions[i].clone());
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
if changed > 0 {
|
||||||
|
block.instructions = out;
|
||||||
|
stats.boxfield_optimizations += changed;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
// Placeholder: no transformation yet; maintain existing behavior
|
|
||||||
}
|
}
|
||||||
stats
|
stats
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -223,8 +223,8 @@ impl MirVerifier {
|
|||||||
let mut errors = Vec::new();
|
let mut errors = Vec::new();
|
||||||
|
|
||||||
// Build def -> block map and dominators
|
// Build def -> block map and dominators
|
||||||
let def_block = self.compute_def_blocks(function);
|
let def_block = utils::compute_def_blocks(function);
|
||||||
let dominators = self.compute_dominators(function);
|
let dominators = utils::compute_dominators(function);
|
||||||
|
|
||||||
for (use_block_id, block) in &function.blocks {
|
for (use_block_id, block) in &function.blocks {
|
||||||
for instruction in block.all_instructions() {
|
for instruction in block.all_instructions() {
|
||||||
@ -267,7 +267,7 @@ impl MirVerifier {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Check that all blocks are reachable from entry
|
// Check that all blocks are reachable from entry
|
||||||
let reachable = self.compute_reachable_blocks(function);
|
let reachable = utils::compute_reachable_blocks(function);
|
||||||
for block_id in function.blocks.keys() {
|
for block_id in function.blocks.keys() {
|
||||||
if !reachable.contains(block_id) && *block_id != function.entry_block {
|
if !reachable.contains(block_id) && *block_id != function.entry_block {
|
||||||
errors.push(VerificationError::UnreachableBlock {
|
errors.push(VerificationError::UnreachableBlock {
|
||||||
@ -292,9 +292,9 @@ impl MirVerifier {
|
|||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
let mut errors = Vec::new();
|
let mut errors = Vec::new();
|
||||||
let preds = self.compute_predecessors(function);
|
let preds = utils::compute_predecessors(function);
|
||||||
let def_block = self.compute_def_blocks(function);
|
let def_block = utils::compute_def_blocks(function);
|
||||||
let dominators = self.compute_dominators(function);
|
let dominators = utils::compute_dominators(function);
|
||||||
// Helper: collect phi dsts in a block
|
// Helper: collect phi dsts in a block
|
||||||
let mut phi_dsts_in_block: std::collections::HashMap<BasicBlockId, std::collections::HashSet<ValueId>> = std::collections::HashMap::new();
|
let mut phi_dsts_in_block: std::collections::HashMap<BasicBlockId, std::collections::HashSet<ValueId>> = std::collections::HashMap::new();
|
||||||
for (bid, block) in &function.blocks {
|
for (bid, block) in &function.blocks {
|
||||||
@ -334,11 +334,6 @@ impl MirVerifier {
|
|||||||
if errors.is_empty() { Ok(()) } else { Err(errors) }
|
if errors.is_empty() { Ok(()) } else { Err(errors) }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Compute reachable blocks from entry
|
|
||||||
fn compute_reachable_blocks(&self, function: &MirFunction) -> HashSet<BasicBlockId> {
|
|
||||||
utils::compute_reachable_blocks(function)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get all verification errors from the last run
|
/// Get all verification errors from the last run
|
||||||
pub fn get_errors(&self) -> &[VerificationError] {
|
pub fn get_errors(&self) -> &[VerificationError] {
|
||||||
&self.errors
|
&self.errors
|
||||||
@ -349,20 +344,7 @@ impl MirVerifier {
|
|||||||
self.errors.clear();
|
self.errors.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Build predecessor map for all blocks
|
// Wrapper helpers removed; use verification::utils directly at call sites
|
||||||
fn compute_predecessors(&self, function: &MirFunction) -> HashMap<BasicBlockId, Vec<BasicBlockId>> {
|
|
||||||
utils::compute_predecessors(function)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Build a map from ValueId to its defining block
|
|
||||||
fn compute_def_blocks(&self, function: &MirFunction) -> HashMap<ValueId, BasicBlockId> {
|
|
||||||
utils::compute_def_blocks(function)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Compute dominator sets per block using standard iterative algorithm
|
|
||||||
fn compute_dominators(&self, function: &MirFunction) -> HashMap<BasicBlockId, HashSet<BasicBlockId>> {
|
|
||||||
utils::compute_dominators(function)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for MirVerifier {
|
impl Default for MirVerifier {
|
||||||
|
|||||||
40
src/parser/expr/bit.rs
Normal file
40
src/parser/expr/bit.rs
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
use crate::parser::{NyashParser, ParseError};
|
||||||
|
use crate::parser::common::ParserUtils;
|
||||||
|
use crate::tokenizer::TokenType;
|
||||||
|
use crate::ast::{ASTNode, BinaryOperator, Span};
|
||||||
|
|
||||||
|
impl NyashParser {
|
||||||
|
pub(crate) fn expr_parse_bit_or(&mut self) -> Result<ASTNode, ParseError> {
|
||||||
|
let mut expr = self.expr_parse_bit_xor()?;
|
||||||
|
while self.match_token(&TokenType::BitOr) {
|
||||||
|
let operator = BinaryOperator::BitOr;
|
||||||
|
self.advance();
|
||||||
|
let right = self.expr_parse_bit_xor()?;
|
||||||
|
expr = ASTNode::BinaryOp { operator, left: Box::new(expr), right: Box::new(right), span: Span::unknown() };
|
||||||
|
}
|
||||||
|
Ok(expr)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn expr_parse_bit_xor(&mut self) -> Result<ASTNode, ParseError> {
|
||||||
|
let mut expr = self.expr_parse_bit_and()?;
|
||||||
|
while self.match_token(&TokenType::BitXor) {
|
||||||
|
let operator = BinaryOperator::BitXor;
|
||||||
|
self.advance();
|
||||||
|
let right = self.expr_parse_bit_and()?;
|
||||||
|
expr = ASTNode::BinaryOp { operator, left: Box::new(expr), right: Box::new(right), span: Span::unknown() };
|
||||||
|
}
|
||||||
|
Ok(expr)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn expr_parse_bit_and(&mut self) -> Result<ASTNode, ParseError> {
|
||||||
|
let mut expr = self.expr_parse_equality()?;
|
||||||
|
while self.match_token(&TokenType::BitAnd) {
|
||||||
|
let operator = BinaryOperator::BitAnd;
|
||||||
|
self.advance();
|
||||||
|
let right = self.expr_parse_equality()?;
|
||||||
|
expr = ASTNode::BinaryOp { operator, left: Box::new(expr), right: Box::new(right), span: Span::unknown() };
|
||||||
|
}
|
||||||
|
Ok(expr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
136
src/parser/expr/call.rs
Normal file
136
src/parser/expr/call.rs
Normal file
@ -0,0 +1,136 @@
|
|||||||
|
use crate::parser::{NyashParser, ParseError};
|
||||||
|
use crate::parser::common::ParserUtils;
|
||||||
|
use crate::tokenizer::TokenType;
|
||||||
|
use crate::ast::{ASTNode, Span};
|
||||||
|
use crate::must_advance;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn is_sugar_enabled() -> bool { crate::parser::sugar_gate::is_enabled() }
|
||||||
|
|
||||||
|
impl NyashParser {
|
||||||
|
pub(crate) fn expr_parse_call(&mut self) -> Result<ASTNode, ParseError> {
|
||||||
|
let mut expr = self.expr_parse_primary()?;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
if self.match_token(&TokenType::DOT) {
|
||||||
|
self.advance(); // consume '.'
|
||||||
|
|
||||||
|
if let TokenType::IDENTIFIER(method_name) = &self.current_token().token_type {
|
||||||
|
let method_name = method_name.clone();
|
||||||
|
self.advance();
|
||||||
|
|
||||||
|
if self.match_token(&TokenType::LPAREN) {
|
||||||
|
// メソッド呼び出し: obj.method(args)
|
||||||
|
self.advance(); // consume '('
|
||||||
|
let mut arguments = Vec::new();
|
||||||
|
let mut _arg_count = 0;
|
||||||
|
|
||||||
|
while !self.match_token(&TokenType::RPAREN) && !self.is_at_end() {
|
||||||
|
must_advance!(self, _unused, "method call argument parsing");
|
||||||
|
|
||||||
|
arguments.push(self.parse_expression()?);
|
||||||
|
_arg_count += 1;
|
||||||
|
|
||||||
|
if self.match_token(&TokenType::COMMA) {
|
||||||
|
self.advance();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.consume(TokenType::RPAREN)?;
|
||||||
|
|
||||||
|
expr = ASTNode::MethodCall {
|
||||||
|
object: Box::new(expr),
|
||||||
|
method: method_name,
|
||||||
|
arguments,
|
||||||
|
span: Span::unknown(),
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
// フィールドアクセス: obj.field
|
||||||
|
expr = ASTNode::FieldAccess {
|
||||||
|
object: Box::new(expr),
|
||||||
|
field: method_name,
|
||||||
|
span: Span::unknown(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let line = self.current_token().line;
|
||||||
|
return Err(ParseError::UnexpectedToken {
|
||||||
|
found: self.current_token().token_type.clone(),
|
||||||
|
expected: "identifier".to_string(),
|
||||||
|
line,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else if self.match_token(&TokenType::QmarkDot) {
|
||||||
|
if !is_sugar_enabled() {
|
||||||
|
let line = self.current_token().line;
|
||||||
|
return Err(ParseError::UnexpectedToken {
|
||||||
|
found: self.current_token().token_type.clone(),
|
||||||
|
expected: "enable NYASH_SYNTAX_SUGAR_LEVEL=basic|full for '?.'".to_string(),
|
||||||
|
line,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
self.advance(); // consume '?.'
|
||||||
|
// ident then optional call
|
||||||
|
let name = match &self.current_token().token_type {
|
||||||
|
TokenType::IDENTIFIER(s) => { let v = s.clone(); self.advance(); v }
|
||||||
|
_ => {
|
||||||
|
let line = self.current_token().line;
|
||||||
|
return Err(ParseError::UnexpectedToken { found: self.current_token().token_type.clone(), expected: "identifier after '?.'".to_string(), line });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let access = if self.match_token(&TokenType::LPAREN) {
|
||||||
|
// method call
|
||||||
|
self.advance();
|
||||||
|
let mut arguments = Vec::new();
|
||||||
|
while !self.match_token(&TokenType::RPAREN) && !self.is_at_end() {
|
||||||
|
must_advance!(self, _unused, "safe method call arg parsing");
|
||||||
|
arguments.push(self.parse_expression()?);
|
||||||
|
if self.match_token(&TokenType::COMMA) { self.advance(); }
|
||||||
|
}
|
||||||
|
self.consume(TokenType::RPAREN)?;
|
||||||
|
ASTNode::MethodCall { object: Box::new(expr.clone()), method: name, arguments, span: Span::unknown() }
|
||||||
|
} else {
|
||||||
|
// field access
|
||||||
|
ASTNode::FieldAccess { object: Box::new(expr.clone()), field: name, span: Span::unknown() }
|
||||||
|
};
|
||||||
|
|
||||||
|
// Wrap with peek: peek expr { null => null, else => access(expr) }
|
||||||
|
expr = ASTNode::PeekExpr {
|
||||||
|
scrutinee: Box::new(expr.clone()),
|
||||||
|
arms: vec![(crate::ast::LiteralValue::Null, ASTNode::Literal { value: crate::ast::LiteralValue::Null, span: Span::unknown() })],
|
||||||
|
else_expr: Box::new(access),
|
||||||
|
span: Span::unknown(),
|
||||||
|
};
|
||||||
|
|
||||||
|
} else if self.match_token(&TokenType::LPAREN) {
|
||||||
|
// 関数呼び出し: function(args) または 一般式呼び出し: (callee)(args)
|
||||||
|
self.advance(); // consume '('
|
||||||
|
let mut arguments = Vec::new();
|
||||||
|
while !self.match_token(&TokenType::RPAREN) && !self.is_at_end() {
|
||||||
|
must_advance!(self, _unused, "function call argument parsing");
|
||||||
|
arguments.push(self.parse_expression()?);
|
||||||
|
if self.match_token(&TokenType::COMMA) { self.advance(); }
|
||||||
|
}
|
||||||
|
self.consume(TokenType::RPAREN)?;
|
||||||
|
|
||||||
|
if let ASTNode::Variable { name, .. } = expr.clone() {
|
||||||
|
expr = ASTNode::FunctionCall { name, arguments, span: Span::unknown() };
|
||||||
|
} else {
|
||||||
|
expr = ASTNode::Call { callee: Box::new(expr), arguments, span: Span::unknown() };
|
||||||
|
}
|
||||||
|
} else if self.match_token(&TokenType::QUESTION) {
|
||||||
|
let nt = self.peek_token();
|
||||||
|
let is_ender = matches!(nt,
|
||||||
|
TokenType::NEWLINE | TokenType::EOF | TokenType::RPAREN | TokenType::COMMA | TokenType::RBRACE
|
||||||
|
);
|
||||||
|
if !is_ender { break; }
|
||||||
|
self.advance();
|
||||||
|
expr = ASTNode::QMarkPropagate { expression: Box::new(expr), span: Span::unknown() };
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(expr)
|
||||||
|
}
|
||||||
|
}
|
||||||
48
src/parser/expr/compare.rs
Normal file
48
src/parser/expr/compare.rs
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
use crate::parser::{NyashParser, ParseError};
|
||||||
|
use crate::parser::common::ParserUtils;
|
||||||
|
use crate::tokenizer::TokenType;
|
||||||
|
use crate::ast::{ASTNode, BinaryOperator, Span};
|
||||||
|
|
||||||
|
impl NyashParser {
|
||||||
|
pub(crate) fn expr_parse_equality(&mut self) -> Result<ASTNode, ParseError> {
|
||||||
|
let mut expr = self.expr_parse_comparison()?;
|
||||||
|
while self.match_token(&TokenType::EQUALS) || self.match_token(&TokenType::NotEquals) {
|
||||||
|
let operator = match &self.current_token().token_type {
|
||||||
|
TokenType::EQUALS => BinaryOperator::Equal,
|
||||||
|
TokenType::NotEquals => BinaryOperator::NotEqual,
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
|
self.advance();
|
||||||
|
let right = self.expr_parse_comparison()?;
|
||||||
|
if std::env::var("NYASH_GRAMMAR_DIFF").ok().as_deref() == Some("1") {
|
||||||
|
let name = match operator { BinaryOperator::Equal=>"eq", BinaryOperator::NotEqual=>"ne", _=>"cmp" };
|
||||||
|
let ok = crate::grammar::engine::get().syntax_is_allowed_binop(name);
|
||||||
|
if !ok { eprintln!("[GRAMMAR-DIFF][Parser] binop '{}' not allowed by syntax rules", name); }
|
||||||
|
}
|
||||||
|
expr = ASTNode::BinaryOp { operator, left: Box::new(expr), right: Box::new(right), span: Span::unknown() };
|
||||||
|
}
|
||||||
|
Ok(expr)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn expr_parse_comparison(&mut self) -> Result<ASTNode, ParseError> {
|
||||||
|
let mut expr = self.expr_parse_range()?;
|
||||||
|
while self.match_token(&TokenType::LESS)
|
||||||
|
|| self.match_token(&TokenType::LessEquals)
|
||||||
|
|| self.match_token(&TokenType::GREATER)
|
||||||
|
|| self.match_token(&TokenType::GreaterEquals)
|
||||||
|
{
|
||||||
|
let operator = match &self.current_token().token_type {
|
||||||
|
TokenType::LESS => BinaryOperator::Less,
|
||||||
|
TokenType::LessEquals => BinaryOperator::LessEqual,
|
||||||
|
TokenType::GREATER => BinaryOperator::Greater,
|
||||||
|
TokenType::GreaterEquals => BinaryOperator::GreaterEqual,
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
|
self.advance();
|
||||||
|
let right = self.expr_parse_range()?;
|
||||||
|
expr = ASTNode::BinaryOp { operator, left: Box::new(expr), right: Box::new(right), span: Span::unknown() };
|
||||||
|
}
|
||||||
|
Ok(expr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
31
src/parser/expr/factor.rs
Normal file
31
src/parser/expr/factor.rs
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
use crate::parser::{NyashParser, ParseError};
|
||||||
|
use crate::parser::common::ParserUtils;
|
||||||
|
use crate::tokenizer::TokenType;
|
||||||
|
use crate::ast::{ASTNode, BinaryOperator, Span};
|
||||||
|
|
||||||
|
impl NyashParser {
|
||||||
|
pub(crate) fn expr_parse_factor(&mut self) -> Result<ASTNode, ParseError> {
|
||||||
|
let mut expr = self.parse_unary()?;
|
||||||
|
while self.match_token(&TokenType::MULTIPLY)
|
||||||
|
|| self.match_token(&TokenType::DIVIDE)
|
||||||
|
|| self.match_token(&TokenType::MODULO)
|
||||||
|
{
|
||||||
|
let operator = match &self.current_token().token_type {
|
||||||
|
TokenType::MULTIPLY => BinaryOperator::Multiply,
|
||||||
|
TokenType::DIVIDE => BinaryOperator::Divide,
|
||||||
|
TokenType::MODULO => BinaryOperator::Modulo,
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
|
self.advance();
|
||||||
|
let right = self.parse_unary()?;
|
||||||
|
if std::env::var("NYASH_GRAMMAR_DIFF").ok().as_deref() == Some("1") {
|
||||||
|
let name = match operator { BinaryOperator::Multiply=>"mul", BinaryOperator::Divide=>"div", _=>"mod" };
|
||||||
|
let ok = crate::grammar::engine::get().syntax_is_allowed_binop(name);
|
||||||
|
if !ok { eprintln!("[GRAMMAR-DIFF][Parser] binop '{}' not allowed by syntax rules", name); }
|
||||||
|
}
|
||||||
|
expr = ASTNode::BinaryOp { operator, left: Box::new(expr), right: Box::new(right), span: Span::unknown() };
|
||||||
|
}
|
||||||
|
Ok(expr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@ -20,11 +20,11 @@ impl NyashParser {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn expr_parse_and(&mut self) -> Result<ASTNode, ParseError> {
|
pub(crate) fn expr_parse_and(&mut self) -> Result<ASTNode, ParseError> {
|
||||||
let mut expr = self.parse_bit_or()?;
|
let mut expr = self.expr_parse_bit_or()?;
|
||||||
while self.match_token(&TokenType::AND) {
|
while self.match_token(&TokenType::AND) {
|
||||||
let operator = BinaryOperator::And;
|
let operator = BinaryOperator::And;
|
||||||
self.advance();
|
self.advance();
|
||||||
let right = self.parse_equality()?;
|
let right = self.expr_parse_equality()?;
|
||||||
if std::env::var("NYASH_GRAMMAR_DIFF").ok().as_deref() == Some("1") {
|
if std::env::var("NYASH_GRAMMAR_DIFF").ok().as_deref() == Some("1") {
|
||||||
let ok = crate::grammar::engine::get().syntax_is_allowed_binop("and");
|
let ok = crate::grammar::engine::get().syntax_is_allowed_binop("and");
|
||||||
if !ok { eprintln!("[GRAMMAR-DIFF][Parser] binop 'and' not allowed by syntax rules"); }
|
if !ok { eprintln!("[GRAMMAR-DIFF][Parser] binop 'and' not allowed by syntax rules"); }
|
||||||
|
|||||||
@ -1,4 +1,11 @@
|
|||||||
pub(crate) mod ternary;
|
pub(crate) mod ternary;
|
||||||
pub(crate) mod coalesce;
|
pub(crate) mod coalesce;
|
||||||
pub(crate) mod logic;
|
pub(crate) mod logic;
|
||||||
|
pub(crate) mod bit;
|
||||||
|
pub(crate) mod compare;
|
||||||
|
pub(crate) mod range;
|
||||||
|
pub(crate) mod term;
|
||||||
|
pub(crate) mod shift;
|
||||||
|
pub(crate) mod factor;
|
||||||
|
pub(crate) mod call;
|
||||||
|
pub(crate) mod primary;
|
||||||
|
|||||||
152
src/parser/expr/primary.rs
Normal file
152
src/parser/expr/primary.rs
Normal file
@ -0,0 +1,152 @@
|
|||||||
|
use crate::parser::{NyashParser, ParseError};
|
||||||
|
use crate::parser::common::ParserUtils;
|
||||||
|
use crate::tokenizer::TokenType;
|
||||||
|
use crate::ast::{ASTNode, Span, LiteralValue};
|
||||||
|
|
||||||
|
impl NyashParser {
|
||||||
|
pub(crate) fn expr_parse_primary(&mut self) -> Result<ASTNode, ParseError> {
|
||||||
|
match &self.current_token().token_type {
|
||||||
|
TokenType::LBRACK => {
|
||||||
|
let sugar_on = crate::parser::sugar_gate::is_enabled()
|
||||||
|
|| std::env::var("NYASH_ENABLE_ARRAY_LITERAL").ok().as_deref() == Some("1");
|
||||||
|
if !sugar_on {
|
||||||
|
let line = self.current_token().line;
|
||||||
|
return Err(ParseError::UnexpectedToken {
|
||||||
|
found: self.current_token().token_type.clone(),
|
||||||
|
expected: "enable NYASH_SYNTAX_SUGAR_LEVEL=basic|full or NYASH_ENABLE_ARRAY_LITERAL=1".to_string(),
|
||||||
|
line,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
self.advance();
|
||||||
|
let mut elems: Vec<ASTNode> = Vec::new();
|
||||||
|
while !self.match_token(&TokenType::RBRACK) && !self.is_at_end() {
|
||||||
|
crate::must_advance!(self, _unused, "array literal element parsing");
|
||||||
|
let el = self.parse_expression()?;
|
||||||
|
elems.push(el);
|
||||||
|
if self.match_token(&TokenType::COMMA) { self.advance(); }
|
||||||
|
}
|
||||||
|
self.consume(TokenType::RBRACK)?;
|
||||||
|
Ok(ASTNode::ArrayLiteral { elements: elems, span: Span::unknown() })
|
||||||
|
}
|
||||||
|
TokenType::LBRACE => {
|
||||||
|
let sugar_on = crate::parser::sugar_gate::is_enabled()
|
||||||
|
|| std::env::var("NYASH_ENABLE_MAP_LITERAL").ok().as_deref() == Some("1");
|
||||||
|
if !sugar_on {
|
||||||
|
let line = self.current_token().line;
|
||||||
|
return Err(ParseError::UnexpectedToken { found: self.current_token().token_type.clone(), expected: "enable NYASH_SYNTAX_SUGAR_LEVEL=basic|full or NYASH_ENABLE_MAP_LITERAL=1".to_string(), line });
|
||||||
|
}
|
||||||
|
self.advance();
|
||||||
|
let mut entries: Vec<(String, ASTNode)> = Vec::new();
|
||||||
|
let sugar_level = std::env::var("NYASH_SYNTAX_SUGAR_LEVEL").ok();
|
||||||
|
let ident_key_on = std::env::var("NYASH_ENABLE_MAP_IDENT_KEY").ok().as_deref() == Some("1") || sugar_level.as_deref() == Some("full");
|
||||||
|
while !self.match_token(&TokenType::RBRACE) && !self.is_at_end() {
|
||||||
|
let key = match &self.current_token().token_type {
|
||||||
|
TokenType::STRING(s) => { let v = s.clone(); self.advance(); v }
|
||||||
|
TokenType::IDENTIFIER(id) if ident_key_on => { let v = id.clone(); self.advance(); v }
|
||||||
|
_ => {
|
||||||
|
let line = self.current_token().line;
|
||||||
|
return Err(ParseError::UnexpectedToken { found: self.current_token().token_type.clone(), expected: if ident_key_on { "string or identifier key in map literal".to_string() } else { "string key in map literal".to_string() }, line });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
self.consume(TokenType::COLON)?;
|
||||||
|
let value_expr = self.parse_expression()?;
|
||||||
|
entries.push((key, value_expr));
|
||||||
|
if self.match_token(&TokenType::COMMA) { self.advance(); }
|
||||||
|
}
|
||||||
|
self.consume(TokenType::RBRACE)?;
|
||||||
|
Ok(ASTNode::MapLiteral { entries, span: Span::unknown() })
|
||||||
|
}
|
||||||
|
TokenType::INCLUDE => { self.parse_include() }
|
||||||
|
TokenType::STRING(s) => { let value = s.clone(); self.advance(); Ok(ASTNode::Literal { value: LiteralValue::String(value), span: Span::unknown() }) }
|
||||||
|
TokenType::NUMBER(n) => { let value = *n; self.advance(); Ok(ASTNode::Literal { value: LiteralValue::Integer(value), span: Span::unknown() }) }
|
||||||
|
TokenType::FLOAT(f) => { let value = *f; self.advance(); Ok(ASTNode::Literal { value: LiteralValue::Float(value), span: Span::unknown() }) }
|
||||||
|
TokenType::TRUE => { self.advance(); Ok(ASTNode::Literal { value: LiteralValue::Bool(true), span: Span::unknown() }) }
|
||||||
|
TokenType::FALSE => { self.advance(); Ok(ASTNode::Literal { value: LiteralValue::Bool(false), span: Span::unknown() }) }
|
||||||
|
TokenType::NULL => { self.advance(); Ok(ASTNode::Literal { value: LiteralValue::Null, span: Span::unknown() }) }
|
||||||
|
TokenType::THIS => {
|
||||||
|
if std::env::var("NYASH_DEPRECATE_THIS").ok().as_deref() == Some("1") {
|
||||||
|
eprintln!("[deprecate:this] 'this' is deprecated; use 'me' instead (line {})", self.current_token().line);
|
||||||
|
}
|
||||||
|
self.advance();
|
||||||
|
Ok(ASTNode::Me { span: Span::unknown() })
|
||||||
|
}
|
||||||
|
TokenType::ME => { self.advance(); Ok(ASTNode::Me { span: Span::unknown() }) }
|
||||||
|
TokenType::NEW => {
|
||||||
|
self.advance();
|
||||||
|
if let TokenType::IDENTIFIER(class_name) = &self.current_token().token_type {
|
||||||
|
let class = class_name.clone();
|
||||||
|
self.advance();
|
||||||
|
let mut type_arguments: Vec<String> = Vec::new();
|
||||||
|
if self.match_token(&TokenType::LESS) {
|
||||||
|
self.advance();
|
||||||
|
loop {
|
||||||
|
if let TokenType::IDENTIFIER(tn) = &self.current_token().token_type { type_arguments.push(tn.clone()); self.advance(); }
|
||||||
|
else { let line = self.current_token().line; return Err(ParseError::UnexpectedToken { found: self.current_token().token_type.clone(), expected: "type argument".to_string(), line }); }
|
||||||
|
if self.match_token(&TokenType::COMMA) { self.advance(); continue; }
|
||||||
|
self.consume(TokenType::GREATER)?; break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.consume(TokenType::LPAREN)?;
|
||||||
|
let mut arguments = Vec::new();
|
||||||
|
while !self.match_token(&TokenType::RPAREN) && !self.is_at_end() {
|
||||||
|
crate::must_advance!(self, _unused, "new expression argument parsing");
|
||||||
|
arguments.push(self.parse_expression()?);
|
||||||
|
if self.match_token(&TokenType::COMMA) { self.advance(); }
|
||||||
|
}
|
||||||
|
self.consume(TokenType::RPAREN)?;
|
||||||
|
Ok(ASTNode::New { class, arguments, type_arguments, span: Span::unknown() })
|
||||||
|
} else {
|
||||||
|
let line = self.current_token().line;
|
||||||
|
Err(ParseError::UnexpectedToken { found: self.current_token().token_type.clone(), expected: "class name".to_string(), line })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
TokenType::FROM => { self.parse_from_call() }
|
||||||
|
TokenType::IDENTIFIER(name) => {
|
||||||
|
let parent = name.clone();
|
||||||
|
self.advance();
|
||||||
|
if self.match_token(&TokenType::DoubleColon) {
|
||||||
|
self.advance();
|
||||||
|
let method = match &self.current_token().token_type {
|
||||||
|
TokenType::IDENTIFIER(m) => { let s=m.clone(); self.advance(); s }
|
||||||
|
TokenType::INIT => { self.advance(); "init".to_string() }
|
||||||
|
TokenType::PACK => { self.advance(); "pack".to_string() }
|
||||||
|
TokenType::BIRTH => { self.advance(); "birth".to_string() }
|
||||||
|
_ => { let line = self.current_token().line; return Err(ParseError::UnexpectedToken { found: self.current_token().token_type.clone(), expected: "method name".to_string(), line }); }
|
||||||
|
};
|
||||||
|
self.consume(TokenType::LPAREN)?;
|
||||||
|
let mut arguments = Vec::new();
|
||||||
|
while !self.match_token(&TokenType::RPAREN) && !self.is_at_end() {
|
||||||
|
crate::must_advance!(self, _unused, "Parent::method call argument parsing");
|
||||||
|
arguments.push(self.parse_expression()?);
|
||||||
|
if self.match_token(&TokenType::COMMA) { self.advance(); }
|
||||||
|
}
|
||||||
|
self.consume(TokenType::RPAREN)?;
|
||||||
|
Ok(ASTNode::FromCall { parent, method, arguments, span: Span::unknown() })
|
||||||
|
} else {
|
||||||
|
Ok(ASTNode::Variable { name: parent, span: Span::unknown() })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
TokenType::LPAREN => { self.advance(); let expr = self.parse_expression()?; self.consume(TokenType::RPAREN)?; Ok(expr) }
|
||||||
|
TokenType::FN => {
|
||||||
|
self.advance();
|
||||||
|
let mut params: Vec<String> = Vec::new();
|
||||||
|
if self.match_token(&TokenType::LPAREN) { self.advance();
|
||||||
|
while !self.match_token(&TokenType::RPAREN) && !self.is_at_end() {
|
||||||
|
if let TokenType::IDENTIFIER(p) = &self.current_token().token_type { params.push(p.clone()); self.advance(); if self.match_token(&TokenType::COMMA) { self.advance(); } }
|
||||||
|
else { let line = self.current_token().line; return Err(ParseError::UnexpectedToken { found: self.current_token().token_type.clone(), expected: "parameter name".to_string(), line }); }
|
||||||
|
}
|
||||||
|
self.consume(TokenType::RPAREN)?;
|
||||||
|
}
|
||||||
|
self.consume(TokenType::LBRACE)?;
|
||||||
|
let mut body: Vec<ASTNode> = Vec::new();
|
||||||
|
while !self.match_token(&TokenType::RBRACE) && !self.is_at_end() {
|
||||||
|
self.skip_newlines();
|
||||||
|
if !self.match_token(&TokenType::RBRACE) { body.push(self.parse_statement()?); }
|
||||||
|
}
|
||||||
|
self.consume(TokenType::RBRACE)?;
|
||||||
|
Ok(ASTNode::Lambda { params, body, span: Span::unknown() })
|
||||||
|
}
|
||||||
|
_ => { let line = self.current_token().line; Err(ParseError::InvalidExpression { line }) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
28
src/parser/expr/range.rs
Normal file
28
src/parser/expr/range.rs
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
use crate::parser::{NyashParser, ParseError};
|
||||||
|
use crate::parser::common::ParserUtils;
|
||||||
|
use crate::tokenizer::TokenType;
|
||||||
|
use crate::ast::{ASTNode, Span};
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn is_sugar_enabled() -> bool { crate::parser::sugar_gate::is_enabled() }
|
||||||
|
|
||||||
|
impl NyashParser {
|
||||||
|
pub(crate) fn expr_parse_range(&mut self) -> Result<ASTNode, ParseError> {
|
||||||
|
let mut expr = self.expr_parse_term()?;
|
||||||
|
while self.match_token(&TokenType::RANGE) {
|
||||||
|
if !is_sugar_enabled() {
|
||||||
|
let line = self.current_token().line;
|
||||||
|
return Err(ParseError::UnexpectedToken {
|
||||||
|
found: self.current_token().token_type.clone(),
|
||||||
|
expected: "enable NYASH_SYNTAX_SUGAR_LEVEL=basic|full for '..'".to_string(),
|
||||||
|
line,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
self.advance();
|
||||||
|
let rhs = self.expr_parse_term()?;
|
||||||
|
expr = ASTNode::FunctionCall { name: "Range".to_string(), arguments: vec![expr, rhs], span: Span::unknown() };
|
||||||
|
}
|
||||||
|
Ok(expr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
27
src/parser/expr/shift.rs
Normal file
27
src/parser/expr/shift.rs
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
use crate::parser::{NyashParser, ParseError};
|
||||||
|
use crate::parser::common::ParserUtils;
|
||||||
|
use crate::tokenizer::TokenType;
|
||||||
|
use crate::ast::{ASTNode, BinaryOperator, Span};
|
||||||
|
|
||||||
|
impl NyashParser {
|
||||||
|
pub(crate) fn expr_parse_shift(&mut self) -> Result<ASTNode, ParseError> {
|
||||||
|
let mut expr = self.expr_parse_factor()?;
|
||||||
|
loop {
|
||||||
|
if self.match_token(&TokenType::ShiftLeft) {
|
||||||
|
self.advance();
|
||||||
|
let rhs = self.expr_parse_factor()?;
|
||||||
|
expr = ASTNode::BinaryOp { operator: BinaryOperator::Shl, left: Box::new(expr), right: Box::new(rhs), span: Span::unknown() };
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if self.match_token(&TokenType::ShiftRight) {
|
||||||
|
self.advance();
|
||||||
|
let rhs = self.expr_parse_factor()?;
|
||||||
|
expr = ASTNode::BinaryOp { operator: BinaryOperator::Shr, left: Box::new(expr), right: Box::new(rhs), span: Span::unknown() };
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
Ok(expr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
27
src/parser/expr/term.rs
Normal file
27
src/parser/expr/term.rs
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
use crate::parser::{NyashParser, ParseError};
|
||||||
|
use crate::parser::common::ParserUtils;
|
||||||
|
use crate::tokenizer::TokenType;
|
||||||
|
use crate::ast::{ASTNode, BinaryOperator, Span};
|
||||||
|
|
||||||
|
impl NyashParser {
|
||||||
|
pub(crate) fn expr_parse_term(&mut self) -> Result<ASTNode, ParseError> {
|
||||||
|
let mut expr = self.expr_parse_shift()?;
|
||||||
|
while self.match_token(&TokenType::PLUS) || self.match_token(&TokenType::MINUS) {
|
||||||
|
let operator = match &self.current_token().token_type {
|
||||||
|
TokenType::PLUS => BinaryOperator::Add,
|
||||||
|
TokenType::MINUS => BinaryOperator::Subtract,
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
|
self.advance();
|
||||||
|
let right = self.expr_parse_shift()?;
|
||||||
|
if std::env::var("NYASH_GRAMMAR_DIFF").ok().as_deref() == Some("1") {
|
||||||
|
let name = match operator { BinaryOperator::Add=>"add", BinaryOperator::Subtract=>"sub", _=>"term" };
|
||||||
|
let ok = crate::grammar::engine::get().syntax_is_allowed_binop(name);
|
||||||
|
if !ok { eprintln!("[GRAMMAR-DIFF][Parser] binop '{}' not allowed by syntax rules", name); }
|
||||||
|
}
|
||||||
|
expr = ASTNode::BinaryOp { operator, left: Box::new(expr), right: Box::new(right), span: Span::unknown() };
|
||||||
|
}
|
||||||
|
Ok(expr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@ -97,196 +97,34 @@ impl NyashParser {
|
|||||||
fn parse_and(&mut self) -> Result<ASTNode, ParseError> { self.expr_parse_and() }
|
fn parse_and(&mut self) -> Result<ASTNode, ParseError> { self.expr_parse_and() }
|
||||||
|
|
||||||
/// ビットOR: |
|
/// ビットOR: |
|
||||||
pub(crate) fn parse_bit_or(&mut self) -> Result<ASTNode, ParseError> {
|
pub(crate) fn parse_bit_or(&mut self) -> Result<ASTNode, ParseError> { self.expr_parse_bit_or() }
|
||||||
let mut expr = self.parse_bit_xor()?;
|
|
||||||
while self.match_token(&TokenType::BitOr) {
|
|
||||||
let operator = BinaryOperator::BitOr;
|
|
||||||
self.advance();
|
|
||||||
let right = self.parse_bit_xor()?;
|
|
||||||
expr = ASTNode::BinaryOp { operator, left: Box::new(expr), right: Box::new(right), span: Span::unknown() };
|
|
||||||
}
|
|
||||||
Ok(expr)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// ビットXOR: ^
|
/// ビットXOR: ^
|
||||||
fn parse_bit_xor(&mut self) -> Result<ASTNode, ParseError> {
|
fn parse_bit_xor(&mut self) -> Result<ASTNode, ParseError> { self.expr_parse_bit_xor() }
|
||||||
let mut expr = self.parse_bit_and()?;
|
|
||||||
while self.match_token(&TokenType::BitXor) {
|
|
||||||
let operator = BinaryOperator::BitXor;
|
|
||||||
self.advance();
|
|
||||||
let right = self.parse_bit_and()?;
|
|
||||||
expr = ASTNode::BinaryOp { operator, left: Box::new(expr), right: Box::new(right), span: Span::unknown() };
|
|
||||||
}
|
|
||||||
Ok(expr)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// ビットAND: &
|
/// ビットAND: &
|
||||||
fn parse_bit_and(&mut self) -> Result<ASTNode, ParseError> {
|
fn parse_bit_and(&mut self) -> Result<ASTNode, ParseError> { self.expr_parse_bit_and() }
|
||||||
let mut expr = self.parse_equality()?;
|
|
||||||
while self.match_token(&TokenType::BitAnd) {
|
|
||||||
let operator = BinaryOperator::BitAnd;
|
|
||||||
self.advance();
|
|
||||||
let right = self.parse_equality()?;
|
|
||||||
expr = ASTNode::BinaryOp { operator, left: Box::new(expr), right: Box::new(right), span: Span::unknown() };
|
|
||||||
}
|
|
||||||
Ok(expr)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// 等値演算子をパース: == !=
|
/// 等値演算子をパース: == !=
|
||||||
pub(crate) fn parse_equality(&mut self) -> Result<ASTNode, ParseError> {
|
pub(crate) fn parse_equality(&mut self) -> Result<ASTNode, ParseError> { self.expr_parse_equality() }
|
||||||
let mut expr = self.parse_comparison()?;
|
|
||||||
|
|
||||||
while self.match_token(&TokenType::EQUALS) || self.match_token(&TokenType::NotEquals) {
|
|
||||||
let operator = match &self.current_token().token_type {
|
|
||||||
TokenType::EQUALS => BinaryOperator::Equal,
|
|
||||||
TokenType::NotEquals => BinaryOperator::NotEqual,
|
|
||||||
_ => unreachable!(),
|
|
||||||
};
|
|
||||||
self.advance();
|
|
||||||
let right = self.parse_comparison()?;
|
|
||||||
if std::env::var("NYASH_GRAMMAR_DIFF").ok().as_deref() == Some("1") {
|
|
||||||
let name = match operator { BinaryOperator::Equal=>"eq", BinaryOperator::NotEqual=>"ne", _=>"cmp" };
|
|
||||||
let ok = crate::grammar::engine::get().syntax_is_allowed_binop(name);
|
|
||||||
if !ok { eprintln!("[GRAMMAR-DIFF][Parser] binop '{}' not allowed by syntax rules", name); }
|
|
||||||
}
|
|
||||||
expr = ASTNode::BinaryOp {
|
|
||||||
operator,
|
|
||||||
left: Box::new(expr),
|
|
||||||
right: Box::new(right),
|
|
||||||
span: Span::unknown(),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(expr)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// 比較演算子をパース: < <= > >=
|
/// 比較演算子をパース: < <= > >=
|
||||||
fn parse_comparison(&mut self) -> Result<ASTNode, ParseError> {
|
fn parse_comparison(&mut self) -> Result<ASTNode, ParseError> { self.expr_parse_comparison() }
|
||||||
let mut expr = self.parse_range()?;
|
|
||||||
|
|
||||||
while self.match_token(&TokenType::LESS) ||
|
|
||||||
self.match_token(&TokenType::LessEquals) ||
|
|
||||||
self.match_token(&TokenType::GREATER) ||
|
|
||||||
self.match_token(&TokenType::GreaterEquals) {
|
|
||||||
let operator = match &self.current_token().token_type {
|
|
||||||
TokenType::LESS => BinaryOperator::Less,
|
|
||||||
TokenType::LessEquals => BinaryOperator::LessEqual,
|
|
||||||
TokenType::GREATER => BinaryOperator::Greater,
|
|
||||||
TokenType::GreaterEquals => BinaryOperator::GreaterEqual,
|
|
||||||
_ => unreachable!(),
|
|
||||||
};
|
|
||||||
self.advance();
|
|
||||||
let right = self.parse_range()?;
|
|
||||||
expr = ASTNode::BinaryOp {
|
|
||||||
operator,
|
|
||||||
left: Box::new(expr),
|
|
||||||
right: Box::new(right),
|
|
||||||
span: Span::unknown(),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(expr)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// 範囲演算子: a .. b => Range(a,b)
|
/// 範囲演算子: a .. b => Range(a,b)
|
||||||
fn parse_range(&mut self) -> Result<ASTNode, ParseError> {
|
fn parse_range(&mut self) -> Result<ASTNode, ParseError> { self.expr_parse_range() }
|
||||||
let mut expr = self.parse_term()?;
|
|
||||||
while self.match_token(&TokenType::RANGE) {
|
|
||||||
if !is_sugar_enabled() {
|
|
||||||
let line = self.current_token().line;
|
|
||||||
return Err(ParseError::UnexpectedToken {
|
|
||||||
found: self.current_token().token_type.clone(),
|
|
||||||
expected: "enable NYASH_SYNTAX_SUGAR_LEVEL=basic|full for '..'".to_string(),
|
|
||||||
line,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
self.advance(); // consume '..'
|
|
||||||
let rhs = self.parse_term()?;
|
|
||||||
expr = ASTNode::FunctionCall { name: "Range".to_string(), arguments: vec![expr, rhs], span: Span::unknown() };
|
|
||||||
}
|
|
||||||
Ok(expr)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// 項をパース: + -
|
/// 項をパース: + -
|
||||||
fn parse_term(&mut self) -> Result<ASTNode, ParseError> {
|
fn parse_term(&mut self) -> Result<ASTNode, ParseError> { self.expr_parse_term() }
|
||||||
let mut expr = self.parse_shift()?;
|
|
||||||
|
|
||||||
while self.match_token(&TokenType::PLUS) || self.match_token(&TokenType::MINUS) {
|
|
||||||
let operator = match &self.current_token().token_type {
|
|
||||||
TokenType::PLUS => BinaryOperator::Add,
|
|
||||||
TokenType::MINUS => BinaryOperator::Subtract,
|
|
||||||
_ => unreachable!(),
|
|
||||||
};
|
|
||||||
self.advance();
|
|
||||||
let right = self.parse_shift()?;
|
|
||||||
if std::env::var("NYASH_GRAMMAR_DIFF").ok().as_deref() == Some("1") {
|
|
||||||
let name = match operator { BinaryOperator::Add=>"add", BinaryOperator::Subtract=>"sub", _=>"term" };
|
|
||||||
let ok = crate::grammar::engine::get().syntax_is_allowed_binop(name);
|
|
||||||
if !ok { eprintln!("[GRAMMAR-DIFF][Parser] binop '{}' not allowed by syntax rules", name); }
|
|
||||||
}
|
|
||||||
expr = ASTNode::BinaryOp {
|
|
||||||
operator,
|
|
||||||
left: Box::new(expr),
|
|
||||||
right: Box::new(right),
|
|
||||||
span: Span::unknown(),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(expr)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// シフトをパース: << >>
|
/// シフトをパース: << >>
|
||||||
fn parse_shift(&mut self) -> Result<ASTNode, ParseError> {
|
fn parse_shift(&mut self) -> Result<ASTNode, ParseError> { self.expr_parse_shift() }
|
||||||
let mut expr = self.parse_factor()?;
|
|
||||||
loop {
|
|
||||||
if self.match_token(&TokenType::ShiftLeft) {
|
|
||||||
self.advance();
|
|
||||||
let rhs = self.parse_factor()?;
|
|
||||||
expr = ASTNode::BinaryOp { operator: BinaryOperator::Shl, left: Box::new(expr), right: Box::new(rhs), span: Span::unknown() };
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if self.match_token(&TokenType::ShiftRight) {
|
|
||||||
self.advance();
|
|
||||||
let rhs = self.parse_factor()?;
|
|
||||||
expr = ASTNode::BinaryOp { operator: BinaryOperator::Shr, left: Box::new(expr), right: Box::new(rhs), span: Span::unknown() };
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
Ok(expr)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// 因子をパース: * /
|
/// 因子をパース: * /
|
||||||
fn parse_factor(&mut self) -> Result<ASTNode, ParseError> {
|
fn parse_factor(&mut self) -> Result<ASTNode, ParseError> { self.expr_parse_factor() }
|
||||||
let mut expr = self.parse_unary()?;
|
|
||||||
|
|
||||||
while self.match_token(&TokenType::MULTIPLY) || self.match_token(&TokenType::DIVIDE) || self.match_token(&TokenType::MODULO) {
|
|
||||||
let operator = match &self.current_token().token_type {
|
|
||||||
TokenType::MULTIPLY => BinaryOperator::Multiply,
|
|
||||||
TokenType::DIVIDE => BinaryOperator::Divide,
|
|
||||||
TokenType::MODULO => BinaryOperator::Modulo,
|
|
||||||
_ => unreachable!(),
|
|
||||||
};
|
|
||||||
self.advance();
|
|
||||||
let right = self.parse_unary()?;
|
|
||||||
if std::env::var("NYASH_GRAMMAR_DIFF").ok().as_deref() == Some("1") {
|
|
||||||
let name = match operator { BinaryOperator::Multiply=>"mul", BinaryOperator::Divide=>"div", _=>"mod" };
|
|
||||||
let ok = crate::grammar::engine::get().syntax_is_allowed_binop(name);
|
|
||||||
if !ok { eprintln!("[GRAMMAR-DIFF][Parser] binop '{}' not allowed by syntax rules", name); }
|
|
||||||
}
|
|
||||||
expr = ASTNode::BinaryOp {
|
|
||||||
operator,
|
|
||||||
left: Box::new(expr),
|
|
||||||
right: Box::new(right),
|
|
||||||
span: Span::unknown(),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(expr)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// 単項演算子をパース
|
/// 単項演算子をパース
|
||||||
fn parse_unary(&mut self) -> Result<ASTNode, ParseError> {
|
pub(crate) fn parse_unary(&mut self) -> Result<ASTNode, ParseError> {
|
||||||
// peek式の先読み
|
// peek式の先読み
|
||||||
if self.match_token(&TokenType::PEEK) {
|
if self.match_token(&TokenType::PEEK) {
|
||||||
return self.parse_peek_expr();
|
return self.parse_peek_expr();
|
||||||
@ -421,402 +259,10 @@ impl NyashParser {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// 関数・メソッド呼び出しをパース
|
/// 関数・メソッド呼び出しをパース
|
||||||
fn parse_call(&mut self) -> Result<ASTNode, ParseError> {
|
fn parse_call(&mut self) -> Result<ASTNode, ParseError> { self.expr_parse_call() }
|
||||||
let mut expr = self.parse_primary()?;
|
|
||||||
|
|
||||||
loop {
|
|
||||||
if self.match_token(&TokenType::DOT) {
|
|
||||||
self.advance(); // consume '.'
|
|
||||||
|
|
||||||
if let TokenType::IDENTIFIER(method_name) = &self.current_token().token_type {
|
|
||||||
let method_name = method_name.clone();
|
|
||||||
self.advance();
|
|
||||||
|
|
||||||
if self.match_token(&TokenType::LPAREN) {
|
|
||||||
// メソッド呼び出し: obj.method(args)
|
|
||||||
self.advance(); // consume '('
|
|
||||||
let mut arguments = Vec::new();
|
|
||||||
let mut _arg_count = 0;
|
|
||||||
|
|
||||||
while !self.match_token(&TokenType::RPAREN) && !self.is_at_end() {
|
|
||||||
must_advance!(self, _unused, "method call argument parsing");
|
|
||||||
|
|
||||||
arguments.push(self.parse_expression()?);
|
|
||||||
_arg_count += 1;
|
|
||||||
|
|
||||||
if self.match_token(&TokenType::COMMA) {
|
|
||||||
self.advance();
|
|
||||||
// カンマの後の trailing comma をチェック
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
self.consume(TokenType::RPAREN)?;
|
|
||||||
|
|
||||||
expr = ASTNode::MethodCall {
|
|
||||||
object: Box::new(expr),
|
|
||||||
method: method_name,
|
|
||||||
arguments,
|
|
||||||
span: Span::unknown(),
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
// フィールドアクセス: obj.field
|
|
||||||
expr = ASTNode::FieldAccess {
|
|
||||||
object: Box::new(expr),
|
|
||||||
field: method_name,
|
|
||||||
span: Span::unknown(),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let line = self.current_token().line;
|
|
||||||
return Err(ParseError::UnexpectedToken {
|
|
||||||
found: self.current_token().token_type.clone(),
|
|
||||||
expected: "identifier".to_string(),
|
|
||||||
line,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} else if self.match_token(&TokenType::QmarkDot) {
|
|
||||||
if !is_sugar_enabled() {
|
|
||||||
let line = self.current_token().line;
|
|
||||||
return Err(ParseError::UnexpectedToken {
|
|
||||||
found: self.current_token().token_type.clone(),
|
|
||||||
expected: "enable NYASH_SYNTAX_SUGAR_LEVEL=basic|full for '?.'".to_string(),
|
|
||||||
line,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
self.advance(); // consume '?.'
|
|
||||||
// ident then optional call
|
|
||||||
let name = match &self.current_token().token_type {
|
|
||||||
TokenType::IDENTIFIER(s) => { let v = s.clone(); self.advance(); v }
|
|
||||||
_ => {
|
|
||||||
let line = self.current_token().line;
|
|
||||||
return Err(ParseError::UnexpectedToken { found: self.current_token().token_type.clone(), expected: "identifier after '?.'".to_string(), line });
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let access = if self.match_token(&TokenType::LPAREN) {
|
|
||||||
// method call
|
|
||||||
self.advance();
|
|
||||||
let mut arguments = Vec::new();
|
|
||||||
while !self.match_token(&TokenType::RPAREN) && !self.is_at_end() {
|
|
||||||
must_advance!(self, _unused, "safe method call arg parsing");
|
|
||||||
arguments.push(self.parse_expression()?);
|
|
||||||
if self.match_token(&TokenType::COMMA) { self.advance(); }
|
|
||||||
}
|
|
||||||
self.consume(TokenType::RPAREN)?;
|
|
||||||
ASTNode::MethodCall { object: Box::new(expr.clone()), method: name, arguments, span: Span::unknown() }
|
|
||||||
} else {
|
|
||||||
// field access
|
|
||||||
ASTNode::FieldAccess { object: Box::new(expr.clone()), field: name, span: Span::unknown() }
|
|
||||||
};
|
|
||||||
|
|
||||||
// Wrap with peek: peek expr { null => null, else => access(expr) }
|
|
||||||
expr = ASTNode::PeekExpr {
|
|
||||||
scrutinee: Box::new(expr.clone()),
|
|
||||||
arms: vec![(crate::ast::LiteralValue::Null, ASTNode::Literal { value: crate::ast::LiteralValue::Null, span: Span::unknown() })],
|
|
||||||
else_expr: Box::new(access),
|
|
||||||
span: Span::unknown(),
|
|
||||||
};
|
|
||||||
|
|
||||||
} else if self.match_token(&TokenType::LPAREN) {
|
|
||||||
// 関数呼び出し: function(args) または 一般式呼び出し: (callee)(args)
|
|
||||||
self.advance(); // consume '('
|
|
||||||
let mut arguments = Vec::new();
|
|
||||||
while !self.match_token(&TokenType::RPAREN) && !self.is_at_end() {
|
|
||||||
must_advance!(self, _unused, "function call argument parsing");
|
|
||||||
arguments.push(self.parse_expression()?);
|
|
||||||
if self.match_token(&TokenType::COMMA) { self.advance(); }
|
|
||||||
}
|
|
||||||
self.consume(TokenType::RPAREN)?;
|
|
||||||
|
|
||||||
if let ASTNode::Variable { name, .. } = expr.clone() {
|
|
||||||
expr = ASTNode::FunctionCall { name, arguments, span: Span::unknown() };
|
|
||||||
} else {
|
|
||||||
expr = ASTNode::Call { callee: Box::new(expr), arguments, span: Span::unknown() };
|
|
||||||
}
|
|
||||||
} else if self.match_token(&TokenType::QUESTION) {
|
|
||||||
// 後置 ?(Result伝播)。三項 '?:' と衝突するため、
|
|
||||||
// 次トークンが式開始(識別子/数値/括弧/文字列/true/false/null など)の場合は消費せず上位へ委譲。
|
|
||||||
// ここでは「終端系(NEWLINE/EOF/)/, /})」のみ後置?を許容する。
|
|
||||||
let nt = self.peek_token();
|
|
||||||
let is_ender = matches!(nt,
|
|
||||||
TokenType::NEWLINE | TokenType::EOF | TokenType::RPAREN | TokenType::COMMA | TokenType::RBRACE
|
|
||||||
);
|
|
||||||
if !is_ender { break; }
|
|
||||||
self.advance();
|
|
||||||
expr = ASTNode::QMarkPropagate { expression: Box::new(expr), span: Span::unknown() };
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(expr)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// 基本式をパース: リテラル、変数、括弧、this、new、配列リテラル(糖衣)
|
/// 基本式をパース: リテラル、変数、括弧、this、new、配列リテラル(糖衣)
|
||||||
fn parse_primary(&mut self) -> Result<ASTNode, ParseError> {
|
fn parse_primary(&mut self) -> Result<ASTNode, ParseError> { self.expr_parse_primary() }
|
||||||
match &self.current_token().token_type {
|
|
||||||
TokenType::LBRACK => {
|
|
||||||
// Array literal: [e1, e2, ...] (sugar)
|
|
||||||
let sugar_on = crate::parser::sugar_gate::is_enabled()
|
|
||||||
|| std::env::var("NYASH_ENABLE_ARRAY_LITERAL").ok().as_deref() == Some("1");
|
|
||||||
if !sugar_on {
|
|
||||||
let line = self.current_token().line;
|
|
||||||
return Err(ParseError::UnexpectedToken {
|
|
||||||
found: self.current_token().token_type.clone(),
|
|
||||||
expected: "enable NYASH_SYNTAX_SUGAR_LEVEL=basic|full or NYASH_ENABLE_ARRAY_LITERAL=1".to_string(),
|
|
||||||
line,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
self.advance(); // consume '['
|
|
||||||
let mut elems: Vec<ASTNode> = Vec::new();
|
|
||||||
while !self.match_token(&TokenType::RBRACK) && !self.is_at_end() {
|
|
||||||
must_advance!(self, _unused, "array literal element parsing");
|
|
||||||
let el = self.parse_expression()?;
|
|
||||||
elems.push(el);
|
|
||||||
if self.match_token(&TokenType::COMMA) { self.advance(); }
|
|
||||||
}
|
|
||||||
self.consume(TokenType::RBRACK)?;
|
|
||||||
Ok(ASTNode::ArrayLiteral { elements: elems, span: Span::unknown() })
|
|
||||||
}
|
|
||||||
TokenType::LBRACE => {
|
|
||||||
// Map literal (Stage‑2, string keys only)
|
|
||||||
let sugar_on = crate::parser::sugar_gate::is_enabled()
|
|
||||||
|| std::env::var("NYASH_ENABLE_MAP_LITERAL").ok().as_deref() == Some("1");
|
|
||||||
if !sugar_on {
|
|
||||||
let line = self.current_token().line;
|
|
||||||
return Err(ParseError::UnexpectedToken {
|
|
||||||
found: self.current_token().token_type.clone(),
|
|
||||||
expected: "enable NYASH_SYNTAX_SUGAR_LEVEL=basic|full or NYASH_ENABLE_MAP_LITERAL=1".to_string(),
|
|
||||||
line,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
self.advance(); // consume '{'
|
|
||||||
let mut entries: Vec<(String, ASTNode)> = Vec::new();
|
|
||||||
let sugar_level = std::env::var("NYASH_SYNTAX_SUGAR_LEVEL").ok();
|
|
||||||
let ident_key_on = std::env::var("NYASH_ENABLE_MAP_IDENT_KEY").ok().as_deref() == Some("1")
|
|
||||||
|| sugar_level.as_deref() == Some("full");
|
|
||||||
while !self.match_token(&TokenType::RBRACE) && !self.is_at_end() {
|
|
||||||
// Key: string literal (Stage‑2) or identifier key sugar (Stage‑3; gated)
|
|
||||||
let key = match &self.current_token().token_type {
|
|
||||||
TokenType::STRING(s) => { let v = s.clone(); self.advance(); v }
|
|
||||||
TokenType::IDENTIFIER(id) if ident_key_on => { let v = id.clone(); self.advance(); v }
|
|
||||||
_ => {
|
|
||||||
let line = self.current_token().line;
|
|
||||||
return Err(ParseError::UnexpectedToken {
|
|
||||||
found: self.current_token().token_type.clone(),
|
|
||||||
expected: if ident_key_on { "string or identifier key in map literal".to_string() } else { "string key in map literal".to_string() },
|
|
||||||
line,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
self.consume(TokenType::COLON)?;
|
|
||||||
let value_expr = self.parse_expression()?;
|
|
||||||
entries.push((key, value_expr));
|
|
||||||
if self.match_token(&TokenType::COMMA) { self.advance(); }
|
|
||||||
}
|
|
||||||
self.consume(TokenType::RBRACE)?;
|
|
||||||
Ok(ASTNode::MapLiteral { entries, span: Span::unknown() })
|
|
||||||
}
|
|
||||||
TokenType::INCLUDE => {
|
|
||||||
// Allow include as an expression: include "path"
|
|
||||||
self.parse_include()
|
|
||||||
}
|
|
||||||
TokenType::STRING(s) => {
|
|
||||||
let value = s.clone();
|
|
||||||
self.advance();
|
|
||||||
// Use plain literal to keep primitives simple in interpreter/VM paths
|
|
||||||
Ok(ASTNode::Literal { value: LiteralValue::String(value), span: Span::unknown() })
|
|
||||||
}
|
|
||||||
|
|
||||||
TokenType::NUMBER(n) => {
|
|
||||||
let value = *n;
|
|
||||||
self.advance();
|
|
||||||
Ok(ASTNode::Literal { value: LiteralValue::Integer(value), span: Span::unknown() })
|
|
||||||
}
|
|
||||||
|
|
||||||
TokenType::FLOAT(f) => {
|
|
||||||
let value = *f;
|
|
||||||
self.advance();
|
|
||||||
Ok(ASTNode::Literal { value: LiteralValue::Float(value), span: Span::unknown() })
|
|
||||||
}
|
|
||||||
|
|
||||||
TokenType::TRUE => {
|
|
||||||
self.advance();
|
|
||||||
Ok(ASTNode::Literal { value: LiteralValue::Bool(true), span: Span::unknown() })
|
|
||||||
}
|
|
||||||
|
|
||||||
TokenType::FALSE => {
|
|
||||||
self.advance();
|
|
||||||
Ok(ASTNode::Literal { value: LiteralValue::Bool(false), span: Span::unknown() })
|
|
||||||
}
|
|
||||||
|
|
||||||
TokenType::NULL => {
|
|
||||||
self.advance();
|
|
||||||
Ok(ASTNode::Literal {
|
|
||||||
value: LiteralValue::Null,
|
|
||||||
span: Span::unknown(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
TokenType::THIS => {
|
|
||||||
// Deprecation: normalize 'this' to 'me'
|
|
||||||
if std::env::var("NYASH_DEPRECATE_THIS").ok().as_deref() == Some("1") {
|
|
||||||
eprintln!("[deprecate:this] 'this' is deprecated; use 'me' instead (line {})", self.current_token().line);
|
|
||||||
}
|
|
||||||
self.advance();
|
|
||||||
Ok(ASTNode::Me { span: Span::unknown() })
|
|
||||||
}
|
|
||||||
|
|
||||||
TokenType::ME => {
|
|
||||||
self.advance();
|
|
||||||
Ok(ASTNode::Me { span: Span::unknown() })
|
|
||||||
}
|
|
||||||
|
|
||||||
TokenType::NEW => {
|
|
||||||
self.advance();
|
|
||||||
|
|
||||||
if let TokenType::IDENTIFIER(class_name) = &self.current_token().token_type {
|
|
||||||
let class_name = class_name.clone();
|
|
||||||
self.advance();
|
|
||||||
|
|
||||||
// 🔥 ジェネリクス型引数のパース (<IntegerBox, StringBox>)
|
|
||||||
let type_arguments = if self.match_token(&TokenType::LESS) {
|
|
||||||
self.advance(); // consume '<'
|
|
||||||
let mut args = Vec::new();
|
|
||||||
|
|
||||||
loop {
|
|
||||||
if let TokenType::IDENTIFIER(type_name) = &self.current_token().token_type {
|
|
||||||
args.push(type_name.clone());
|
|
||||||
self.advance();
|
|
||||||
|
|
||||||
if self.match_token(&TokenType::COMMA) {
|
|
||||||
self.advance(); // consume ','
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let line = self.current_token().line;
|
|
||||||
return Err(ParseError::UnexpectedToken {
|
|
||||||
found: self.current_token().token_type.clone(),
|
|
||||||
expected: "type argument".to_string(),
|
|
||||||
line,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
self.consume(TokenType::GREATER)?; // consume '>'
|
|
||||||
args
|
|
||||||
} else {
|
|
||||||
Vec::new()
|
|
||||||
};
|
|
||||||
|
|
||||||
self.consume(TokenType::LPAREN)?;
|
|
||||||
let mut arguments = Vec::new();
|
|
||||||
|
|
||||||
while !self.match_token(&TokenType::RPAREN) && !self.is_at_end() {
|
|
||||||
must_advance!(self, _unused, "new expression argument parsing");
|
|
||||||
|
|
||||||
arguments.push(self.parse_expression()?);
|
|
||||||
if self.match_token(&TokenType::COMMA) {
|
|
||||||
self.advance();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
self.consume(TokenType::RPAREN)?;
|
|
||||||
|
|
||||||
Ok(ASTNode::New {
|
|
||||||
class: class_name,
|
|
||||||
arguments,
|
|
||||||
type_arguments,
|
|
||||||
span: Span::unknown(),
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
let line = self.current_token().line;
|
|
||||||
Err(ParseError::UnexpectedToken {
|
|
||||||
found: self.current_token().token_type.clone(),
|
|
||||||
expected: "class name".to_string(),
|
|
||||||
line,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
TokenType::FROM => {
|
|
||||||
// from構文をパース: from Parent.method(arguments)
|
|
||||||
self.parse_from_call()
|
|
||||||
}
|
|
||||||
|
|
||||||
TokenType::IDENTIFIER(name) => {
|
|
||||||
let parent = name.clone();
|
|
||||||
self.advance();
|
|
||||||
if self.match_token(&TokenType::DoubleColon) {
|
|
||||||
// Parent::method(args)
|
|
||||||
self.advance(); // consume '::'
|
|
||||||
let method = match &self.current_token().token_type {
|
|
||||||
TokenType::IDENTIFIER(m) => { let s=m.clone(); self.advance(); s }
|
|
||||||
TokenType::INIT => { self.advance(); "init".to_string() }
|
|
||||||
TokenType::PACK => { self.advance(); "pack".to_string() }
|
|
||||||
TokenType::BIRTH => { self.advance(); "birth".to_string() }
|
|
||||||
_ => {
|
|
||||||
let line = self.current_token().line;
|
|
||||||
return Err(ParseError::UnexpectedToken { found: self.current_token().token_type.clone(), expected: "method name".to_string(), line });
|
|
||||||
}
|
|
||||||
};
|
|
||||||
self.consume(TokenType::LPAREN)?;
|
|
||||||
let mut arguments = Vec::new();
|
|
||||||
while !self.match_token(&TokenType::RPAREN) && !self.is_at_end() {
|
|
||||||
must_advance!(self, _unused, "Parent::method call argument parsing");
|
|
||||||
arguments.push(self.parse_expression()?);
|
|
||||||
if self.match_token(&TokenType::COMMA) { self.advance(); }
|
|
||||||
}
|
|
||||||
self.consume(TokenType::RPAREN)?;
|
|
||||||
Ok(ASTNode::FromCall { parent, method, arguments, span: Span::unknown() })
|
|
||||||
} else {
|
|
||||||
Ok(ASTNode::Variable { name: parent, span: Span::unknown() })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
TokenType::LPAREN => {
|
|
||||||
self.advance(); // consume '('
|
|
||||||
let expr = self.parse_expression()?;
|
|
||||||
self.consume(TokenType::RPAREN)?;
|
|
||||||
Ok(expr)
|
|
||||||
}
|
|
||||||
|
|
||||||
TokenType::FN => {
|
|
||||||
// 無名関数: fn (params?) { body }
|
|
||||||
self.advance(); // consume 'fn'
|
|
||||||
let mut params: Vec<String> = Vec::new();
|
|
||||||
if self.match_token(&TokenType::LPAREN) {
|
|
||||||
self.advance();
|
|
||||||
while !self.match_token(&TokenType::RPAREN) && !self.is_at_end() {
|
|
||||||
if let TokenType::IDENTIFIER(p) = &self.current_token().token_type {
|
|
||||||
params.push(p.clone());
|
|
||||||
self.advance();
|
|
||||||
if self.match_token(&TokenType::COMMA) { self.advance(); }
|
|
||||||
} else {
|
|
||||||
let line = self.current_token().line;
|
|
||||||
return Err(ParseError::UnexpectedToken { found: self.current_token().token_type.clone(), expected: "parameter name".to_string(), line });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
self.consume(TokenType::RPAREN)?;
|
|
||||||
}
|
|
||||||
self.consume(TokenType::LBRACE)?;
|
|
||||||
let mut body: Vec<ASTNode> = Vec::new();
|
|
||||||
while !self.match_token(&TokenType::RBRACE) && !self.is_at_end() {
|
|
||||||
self.skip_newlines();
|
|
||||||
if !self.match_token(&TokenType::RBRACE) {
|
|
||||||
body.push(self.parse_statement()?);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
self.consume(TokenType::RBRACE)?;
|
|
||||||
Ok(ASTNode::Lambda { params, body, span: Span::unknown() })
|
|
||||||
}
|
|
||||||
|
|
||||||
_ => {
|
|
||||||
let line = self.current_token().line;
|
|
||||||
Err(ParseError::InvalidExpression { line })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// from構文をパース: from Parent.method(arguments)
|
/// from構文をパース: from Parent.method(arguments)
|
||||||
pub(super) fn parse_from_call(&mut self) -> Result<ASTNode, ParseError> {
|
pub(super) fn parse_from_call(&mut self) -> Result<ASTNode, ParseError> {
|
||||||
|
|||||||
@ -20,7 +20,7 @@ impl NyashRunner {
|
|||||||
if std::env::var("NYASH_USE_NY_COMPILER").ok().as_deref() == Some("1") {
|
if std::env::var("NYASH_USE_NY_COMPILER").ok().as_deref() == Some("1") {
|
||||||
if self.try_run_selfhost_pipeline(filename) {
|
if self.try_run_selfhost_pipeline(filename) {
|
||||||
return;
|
return;
|
||||||
} else if std::env::var("NYASH_CLI_VERBOSE").ok().as_deref() == Some("1") {
|
} else if crate::config::env::cli_verbose() {
|
||||||
eprintln!("[ny-compiler] fallback to default path (MVP unavailable for this input)");
|
eprintln!("[ny-compiler] fallback to default path (MVP unavailable for this input)");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -33,7 +33,7 @@ impl NyashRunner {
|
|||||||
};
|
};
|
||||||
match json_v0_bridge::parse_source_v0_to_module(&code) {
|
match json_v0_bridge::parse_source_v0_to_module(&code) {
|
||||||
Ok(module) => {
|
Ok(module) => {
|
||||||
if std::env::var("NYASH_CLI_VERBOSE").ok().as_deref() == Some("1") {
|
if crate::config::env::cli_verbose() {
|
||||||
println!("🚀 Nyash MIR Interpreter - (parser=ny) Executing file: {} 🚀", filename);
|
println!("🚀 Nyash MIR Interpreter - (parser=ny) Executing file: {} 🚀", filename);
|
||||||
}
|
}
|
||||||
self.execute_mir_module(&module);
|
self.execute_mir_module(&module);
|
||||||
@ -59,7 +59,7 @@ impl NyashRunner {
|
|||||||
|
|
||||||
// MIR dump/verify
|
// MIR dump/verify
|
||||||
if self.config.dump_mir || self.config.verify_mir {
|
if self.config.dump_mir || self.config.verify_mir {
|
||||||
if std::env::var("NYASH_CLI_VERBOSE").ok().as_deref() == Some("1") {
|
if crate::config::env::cli_verbose() {
|
||||||
println!("🚀 Nyash MIR Compiler - Processing file: {} 🚀", filename);
|
println!("🚀 Nyash MIR Compiler - Processing file: {} 🚀", filename);
|
||||||
}
|
}
|
||||||
self.execute_mir_mode(filename);
|
self.execute_mir_mode(filename);
|
||||||
@ -83,13 +83,13 @@ impl NyashRunner {
|
|||||||
// Backend selection
|
// Backend selection
|
||||||
match self.config.backend.as_str() {
|
match self.config.backend.as_str() {
|
||||||
"mir" => {
|
"mir" => {
|
||||||
if std::env::var("NYASH_CLI_VERBOSE").ok().as_deref() == Some("1") {
|
if crate::config::env::cli_verbose() {
|
||||||
println!("🚀 Nyash MIR Interpreter - Executing file: {} 🚀", filename);
|
println!("🚀 Nyash MIR Interpreter - Executing file: {} 🚀", filename);
|
||||||
}
|
}
|
||||||
self.execute_mir_interpreter_mode(filename);
|
self.execute_mir_interpreter_mode(filename);
|
||||||
}
|
}
|
||||||
"vm" => {
|
"vm" => {
|
||||||
if std::env::var("NYASH_CLI_VERBOSE").ok().as_deref() == Some("1") {
|
if crate::config::env::cli_verbose() {
|
||||||
println!("🚀 Nyash VM Backend - Executing file: {} 🚀", filename);
|
println!("🚀 Nyash VM Backend - Executing file: {} 🚀", filename);
|
||||||
}
|
}
|
||||||
self.execute_vm_mode(filename);
|
self.execute_vm_mode(filename);
|
||||||
@ -97,7 +97,7 @@ impl NyashRunner {
|
|||||||
"cranelift" => {
|
"cranelift" => {
|
||||||
#[cfg(feature = "cranelift-jit")]
|
#[cfg(feature = "cranelift-jit")]
|
||||||
{
|
{
|
||||||
if std::env::var("NYASH_CLI_VERBOSE").ok().as_deref() == Some("1") {
|
if crate::config::env::cli_verbose() {
|
||||||
println!("⚙️ Nyash Cranelift JIT - Executing file: {}", filename);
|
println!("⚙️ Nyash Cranelift JIT - Executing file: {}", filename);
|
||||||
}
|
}
|
||||||
self.execute_cranelift_mode(filename);
|
self.execute_cranelift_mode(filename);
|
||||||
@ -109,13 +109,13 @@ impl NyashRunner {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
"llvm" => {
|
"llvm" => {
|
||||||
if std::env::var("NYASH_CLI_VERBOSE").ok().as_deref() == Some("1") {
|
if crate::config::env::cli_verbose() {
|
||||||
println!("⚡ Nyash LLVM Backend - Executing file: {} ⚡", filename);
|
println!("⚡ Nyash LLVM Backend - Executing file: {} ⚡", filename);
|
||||||
}
|
}
|
||||||
self.execute_llvm_mode(filename);
|
self.execute_llvm_mode(filename);
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
if std::env::var("NYASH_CLI_VERBOSE").ok().as_deref() == Some("1") {
|
if crate::config::env::cli_verbose() {
|
||||||
println!("🦀 Nyash Rust Implementation - Executing file: {} 🦀", filename);
|
println!("🦀 Nyash Rust Implementation - Executing file: {} 🦀", filename);
|
||||||
if let Some(fuel) = self.config.debug_fuel {
|
if let Some(fuel) = self.config.debug_fuel {
|
||||||
println!("🔥 Debug fuel limit: {} iterations", fuel);
|
println!("🔥 Debug fuel limit: {} iterations", fuel);
|
||||||
@ -140,7 +140,7 @@ impl NyashRunner {
|
|||||||
Err(e) => { eprintln!("[ny-compiler] read error: {}", e); return false; }
|
Err(e) => { eprintln!("[ny-compiler] read error: {}", e); return false; }
|
||||||
};
|
};
|
||||||
// Optional Phase-15: strip `using` lines and register modules (same policy as execute_nyash_file)
|
// Optional Phase-15: strip `using` lines and register modules (same policy as execute_nyash_file)
|
||||||
let enable_using = std::env::var("NYASH_ENABLE_USING").ok().as_deref() == Some("1");
|
let enable_using = crate::config::env::enable_using();
|
||||||
let mut code_ref: std::borrow::Cow<'_, str> = std::borrow::Cow::Borrowed(&code);
|
let mut code_ref: std::borrow::Cow<'_, str> = std::borrow::Cow::Borrowed(&code);
|
||||||
if enable_using {
|
if enable_using {
|
||||||
let mut out = String::with_capacity(code.len());
|
let mut out = String::with_capacity(code.len());
|
||||||
@ -148,9 +148,7 @@ impl NyashRunner {
|
|||||||
for line in code.lines() {
|
for line in code.lines() {
|
||||||
let t = line.trim_start();
|
let t = line.trim_start();
|
||||||
if t.starts_with("using ") {
|
if t.starts_with("using ") {
|
||||||
if std::env::var("NYASH_CLI_VERBOSE").ok().as_deref() == Some("1") {
|
cli_v!("[using] stripped(line→selfhost): {}", line);
|
||||||
eprintln!("[using] stripped(line→selfhost): {}", line);
|
|
||||||
}
|
|
||||||
let rest0 = t.strip_prefix("using ").unwrap().trim();
|
let rest0 = t.strip_prefix("using ").unwrap().trim();
|
||||||
let rest0 = rest0.strip_suffix(';').unwrap_or(rest0).trim();
|
let rest0 = rest0.strip_suffix(';').unwrap_or(rest0).trim();
|
||||||
let (target, alias) = if let Some(pos) = rest0.find(" as ") {
|
let (target, alias) = if let Some(pos) = rest0.find(" as ") {
|
||||||
@ -188,7 +186,7 @@ impl NyashRunner {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Write to tmp/ny_parser_input.ny (as expected by Ny parser v0), unless forced to reuse existing tmp
|
// Write to tmp/ny_parser_input.ny (as expected by Ny parser v0), unless forced to reuse existing tmp
|
||||||
let use_tmp_only = std::env::var("NYASH_NY_COMPILER_USE_TMP_ONLY").ok().as_deref() == Some("1");
|
let use_tmp_only = crate::config::env::ny_compiler_use_tmp_only();
|
||||||
let tmp_dir = std::path::Path::new("tmp");
|
let tmp_dir = std::path::Path::new("tmp");
|
||||||
if let Err(e) = std::fs::create_dir_all(tmp_dir) {
|
if let Err(e) = std::fs::create_dir_all(tmp_dir) {
|
||||||
eprintln!("[ny-compiler] mkdir tmp failed: {}", e);
|
eprintln!("[ny-compiler] mkdir tmp failed: {}", e);
|
||||||
@ -207,9 +205,9 @@ impl NyashRunner {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
// EXE-first: if requested, try external parser EXE (nyash_compiler)
|
// EXE-first: if requested, try external parser EXE (nyash_compiler)
|
||||||
if std::env::var("NYASH_USE_NY_COMPILER_EXE").ok().as_deref() == Some("1") {
|
if crate::config::env::use_ny_compiler_exe() {
|
||||||
// Resolve parser EXE path
|
// Resolve parser EXE path
|
||||||
let exe_path = if let Ok(p) = std::env::var("NYASH_NY_COMPILER_EXE_PATH") {
|
let exe_path = if let Some(p) = crate::config::env::ny_compiler_exe_path() {
|
||||||
std::path::PathBuf::from(p)
|
std::path::PathBuf::from(p)
|
||||||
} else {
|
} else {
|
||||||
let mut p = std::path::PathBuf::from("dist/nyash_compiler");
|
let mut p = std::path::PathBuf::from("dist/nyash_compiler");
|
||||||
@ -227,10 +225,10 @@ impl NyashRunner {
|
|||||||
// Prefer passing the original filename directly (parser EXE accepts positional path)
|
// Prefer passing the original filename directly (parser EXE accepts positional path)
|
||||||
cmd.arg(filename);
|
cmd.arg(filename);
|
||||||
// Gates
|
// Gates
|
||||||
if std::env::var("NYASH_NY_COMPILER_MIN_JSON").ok().as_deref() == Some("1") { cmd.arg("--min-json"); }
|
if crate::config::env::ny_compiler_min_json() { cmd.arg("--min-json"); }
|
||||||
if std::env::var("NYASH_SELFHOST_READ_TMP").ok().as_deref() == Some("1") { cmd.arg("--read-tmp"); }
|
if crate::config::env::selfhost_read_tmp() { cmd.arg("--read-tmp"); }
|
||||||
if let Ok(raw) = std::env::var("NYASH_NY_COMPILER_CHILD_ARGS") { for tok in raw.split_whitespace() { cmd.arg(tok); } }
|
if let Some(raw) = crate::config::env::ny_compiler_child_args() { for tok in raw.split_whitespace() { cmd.arg(tok); } }
|
||||||
let timeout_ms: u64 = std::env::var("NYASH_NY_COMPILER_TIMEOUT_MS").ok().and_then(|s| s.parse().ok()).unwrap_or(2000);
|
let timeout_ms: u64 = crate::config::env::ny_compiler_timeout_ms();
|
||||||
let mut cmd = cmd.stdout(Stdio::piped()).stderr(Stdio::piped());
|
let mut cmd = cmd.stdout(Stdio::piped()).stderr(Stdio::piped());
|
||||||
let mut child = match cmd.spawn() { Ok(c) => c, Err(e) => { eprintln!("[ny-compiler] exe spawn failed: {}", e); return false; } };
|
let mut child = match cmd.spawn() { Ok(c) => c, Err(e) => { eprintln!("[ny-compiler] exe spawn failed: {}", e); return false; } };
|
||||||
let mut ch_stdout = child.stdout.take();
|
let mut ch_stdout = child.stdout.take();
|
||||||
@ -260,7 +258,7 @@ impl NyashRunner {
|
|||||||
let mut json_line = String::new();
|
let mut json_line = String::new();
|
||||||
for line in stdout.lines() { let t = line.trim(); if t.starts_with('{') && t.contains("\"version\"") && t.contains("\"kind\"") { json_line = t.to_string(); break; } }
|
for line in stdout.lines() { let t = line.trim(); if t.starts_with('{') && t.contains("\"version\"") && t.contains("\"kind\"") { json_line = t.to_string(); break; } }
|
||||||
if json_line.is_empty() {
|
if json_line.is_empty() {
|
||||||
if std::env::var("NYASH_CLI_VERBOSE").ok().as_deref() == Some("1") {
|
if crate::config::env::cli_verbose() {
|
||||||
let head: String = stdout.chars().take(200).collect();
|
let head: String = stdout.chars().take(200).collect();
|
||||||
let errh: String = String::from_utf8_lossy(&err_buf).chars().take(200).collect();
|
let errh: String = String::from_utf8_lossy(&err_buf).chars().take(200).collect();
|
||||||
eprintln!("[ny-compiler] exe produced no JSON; stdout(head)='{}' stderr(head)='{}'", head.replace('\n', "\\n"), errh.replace('\n', "\\n"));
|
eprintln!("[ny-compiler] exe produced no JSON; stdout(head)='{}' stderr(head)='{}'", head.replace('\n', "\\n"), errh.replace('\n', "\\n"));
|
||||||
@ -272,12 +270,12 @@ impl NyashRunner {
|
|||||||
Ok(module) => {
|
Ok(module) => {
|
||||||
println!("🚀 Ny compiler EXE path (ny→json_v0) ON");
|
println!("🚀 Ny compiler EXE path (ny→json_v0) ON");
|
||||||
json_v0_bridge::maybe_dump_mir(&module);
|
json_v0_bridge::maybe_dump_mir(&module);
|
||||||
let emit_only = std::env::var("NYASH_NY_COMPILER_EMIT_ONLY").unwrap_or_else(|_| "1".to_string()) == "1";
|
let emit_only = crate::config::env::ny_compiler_emit_only();
|
||||||
if emit_only {
|
if emit_only {
|
||||||
return false;
|
return false;
|
||||||
} else {
|
} else {
|
||||||
// Prefer PyVM when requested (reference semantics), regardless of BoxCall presence
|
// Prefer PyVM when requested (reference semantics), regardless of BoxCall presence
|
||||||
let prefer_pyvm = std::env::var("NYASH_VM_USE_PY").ok().as_deref() == Some("1");
|
let prefer_pyvm = crate::config::env::vm_use_py();
|
||||||
if prefer_pyvm {
|
if prefer_pyvm {
|
||||||
if let Ok(py3) = which::which("python3") {
|
if let Ok(py3) = which::which("python3") {
|
||||||
let runner = std::path::Path::new("tools/pyvm_runner.py");
|
let runner = std::path::Path::new("tools/pyvm_runner.py");
|
||||||
@ -289,7 +287,7 @@ impl NyashRunner {
|
|||||||
eprintln!("❌ PyVM MIR JSON emit error: {}", e);
|
eprintln!("❌ PyVM MIR JSON emit error: {}", e);
|
||||||
return true; // prevent double-run fallback
|
return true; // prevent double-run fallback
|
||||||
}
|
}
|
||||||
if std::env::var("NYASH_CLI_VERBOSE").ok().as_deref() == Some("1") {
|
if crate::config::env::cli_verbose() {
|
||||||
eprintln!("[ny-compiler] using PyVM (exe) → {}", mir_json_path.display());
|
eprintln!("[ny-compiler] using PyVM (exe) → {}", mir_json_path.display());
|
||||||
}
|
}
|
||||||
// Determine entry function hint (prefer Main.main if present)
|
// Determine entry function hint (prefer Main.main if present)
|
||||||
@ -307,11 +305,7 @@ impl NyashRunner {
|
|||||||
.map_err(|e| format!("spawn pyvm: {}", e))
|
.map_err(|e| format!("spawn pyvm: {}", e))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let code = status.code().unwrap_or(1);
|
let code = status.code().unwrap_or(1);
|
||||||
if !status.success() {
|
if !status.success() { if crate::config::env::cli_verbose() { eprintln!("❌ PyVM (exe) failed (status={})", code); } }
|
||||||
if std::env::var("NYASH_CLI_VERBOSE").ok().as_deref() == Some("1") {
|
|
||||||
eprintln!("❌ PyVM (exe) failed (status={})", code);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Harmonize CLI output with interpreter path for smokes
|
// Harmonize CLI output with interpreter path for smokes
|
||||||
println!("Result: {}", code);
|
println!("Result: {}", code);
|
||||||
std::process::exit(code);
|
std::process::exit(code);
|
||||||
@ -332,7 +326,7 @@ impl NyashRunner {
|
|||||||
Err(e) => { eprintln!("[ny-compiler] JSON parse failed (exe): {}", e); return false; }
|
Err(e) => { eprintln!("[ny-compiler] JSON parse failed (exe): {}", e); return false; }
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if std::env::var("NYASH_CLI_VERBOSE").ok().as_deref() == Some("1") { eprintln!("[ny-compiler] exe not found at {}", exe_path.display()); }
|
if crate::config::env::cli_verbose() { eprintln!("[ny-compiler] exe not found at {}", exe_path.display()); }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -419,7 +413,7 @@ impl NyashRunner {
|
|||||||
} else if let Ok(s) = String::from_utf8(err_buf.clone()) {
|
} else if let Ok(s) = String::from_utf8(err_buf.clone()) {
|
||||||
// If the child exited non-zero and printed stderr, surface it and fallback
|
// If the child exited non-zero and printed stderr, surface it and fallback
|
||||||
// We cannot easily access ExitStatus here after try_wait loop; rely on JSON detection path.
|
// We cannot easily access ExitStatus here after try_wait loop; rely on JSON detection path.
|
||||||
if s.trim().len() > 0 && std::env::var("NYASH_CLI_VERBOSE").ok().as_deref() == Some("1") {
|
if s.trim().len() > 0 && crate::config::env::cli_verbose() {
|
||||||
eprintln!("[ny-compiler] parser stderr:\n{}", s);
|
eprintln!("[ny-compiler] parser stderr:\n{}", s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -430,10 +424,10 @@ impl NyashRunner {
|
|||||||
}
|
}
|
||||||
if json_line.is_empty() {
|
if json_line.is_empty() {
|
||||||
// Fallback: try Python MVP parser to produce JSON v0 from the same tmp source (unless skipped).
|
// Fallback: try Python MVP parser to produce JSON v0 from the same tmp source (unless skipped).
|
||||||
if std::env::var("NYASH_CLI_VERBOSE").ok().as_deref() == Some("1") {
|
if crate::config::env::cli_verbose() {
|
||||||
let head: String = stdout.chars().take(200).collect();
|
let head: String = stdout.chars().take(200).collect();
|
||||||
eprintln!("[ny-compiler] JSON not found in child stdout (head): {}", head.replace('\n', "\\n"));
|
cli_v!("[ny-compiler] JSON not found in child stdout (head): {}", head.replace('\\n', "\\n"));
|
||||||
eprintln!("[ny-compiler] falling back to tools/ny_parser_mvp.py for this input");
|
cli_v!("[ny-compiler] falling back to tools/ny_parser_mvp.py for this input");
|
||||||
}
|
}
|
||||||
if std::env::var("NYASH_NY_COMPILER_SKIP_PY").ok().as_deref() != Some("1") {
|
if std::env::var("NYASH_NY_COMPILER_SKIP_PY").ok().as_deref() != Some("1") {
|
||||||
let py = which::which("python3").ok();
|
let py = which::which("python3").ok();
|
||||||
@ -470,7 +464,7 @@ impl NyashRunner {
|
|||||||
match json_v0_bridge::parse_json_v0_to_module(&json_line) {
|
match json_v0_bridge::parse_json_v0_to_module(&json_line) {
|
||||||
Ok(module) => {
|
Ok(module) => {
|
||||||
let emit_only_default = "1".to_string();
|
let emit_only_default = "1".to_string();
|
||||||
let emit_only = std::env::var("NYASH_NY_COMPILER_EMIT_ONLY").unwrap_or(emit_only_default) == "1";
|
let emit_only = if emit_only_default == "1" { true } else { crate::config::env::ny_compiler_emit_only() };
|
||||||
println!("🚀 Ny compiler MVP (ny→json_v0) path ON");
|
println!("🚀 Ny compiler MVP (ny→json_v0) path ON");
|
||||||
json_v0_bridge::maybe_dump_mir(&module);
|
json_v0_bridge::maybe_dump_mir(&module);
|
||||||
if emit_only {
|
if emit_only {
|
||||||
@ -478,7 +472,7 @@ impl NyashRunner {
|
|||||||
false
|
false
|
||||||
} else {
|
} else {
|
||||||
// Prefer PyVM when requested (reference semantics)
|
// Prefer PyVM when requested (reference semantics)
|
||||||
let prefer_pyvm = std::env::var("NYASH_VM_USE_PY").ok().as_deref() == Some("1");
|
let prefer_pyvm = crate::config::env::vm_use_py();
|
||||||
if prefer_pyvm {
|
if prefer_pyvm {
|
||||||
if let Ok(py3) = which::which("python3") {
|
if let Ok(py3) = which::which("python3") {
|
||||||
let runner = std::path::Path::new("tools/pyvm_runner.py");
|
let runner = std::path::Path::new("tools/pyvm_runner.py");
|
||||||
@ -490,7 +484,7 @@ impl NyashRunner {
|
|||||||
eprintln!("❌ PyVM MIR JSON emit error: {}", e);
|
eprintln!("❌ PyVM MIR JSON emit error: {}", e);
|
||||||
return true; // prevent double-run fallback
|
return true; // prevent double-run fallback
|
||||||
}
|
}
|
||||||
if std::env::var("NYASH_CLI_VERBOSE").ok().as_deref() == Some("1") {
|
if crate::config::env::cli_verbose() {
|
||||||
eprintln!("[ny-compiler] using PyVM (mvp) → {}", mir_json_path.display());
|
eprintln!("[ny-compiler] using PyVM (mvp) → {}", mir_json_path.display());
|
||||||
}
|
}
|
||||||
// Determine entry function hint (prefer Main.main if present)
|
// Determine entry function hint (prefer Main.main if present)
|
||||||
@ -508,11 +502,7 @@ impl NyashRunner {
|
|||||||
.map_err(|e| format!("spawn pyvm: {}", e))
|
.map_err(|e| format!("spawn pyvm: {}", e))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let code = status.code().unwrap_or(1);
|
let code = status.code().unwrap_or(1);
|
||||||
if !status.success() {
|
if !status.success() { if crate::config::env::cli_verbose() { eprintln!("❌ PyVM (mvp) failed (status={})", code); } }
|
||||||
if std::env::var("NYASH_CLI_VERBOSE").ok().as_deref() == Some("1") {
|
|
||||||
eprintln!("❌ PyVM (mvp) failed (status={})", code);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Harmonize CLI output with interpreter path for smokes
|
// Harmonize CLI output with interpreter path for smokes
|
||||||
println!("Result: {}", code);
|
println!("Result: {}", code);
|
||||||
std::process::exit(code);
|
std::process::exit(code);
|
||||||
@ -550,14 +540,14 @@ impl NyashRunner {
|
|||||||
Ok(content) => content,
|
Ok(content) => content,
|
||||||
Err(e) => { eprintln!("❌ Error reading file {}: {}", filename, e); process::exit(1); }
|
Err(e) => { eprintln!("❌ Error reading file {}: {}", filename, e); process::exit(1); }
|
||||||
};
|
};
|
||||||
if std::env::var("NYASH_CLI_VERBOSE").ok().as_deref() == Some("1") && !quiet_pipe {
|
if crate::config::env::cli_verbose() && !quiet_pipe {
|
||||||
println!("📝 File contents:\n{}", code);
|
println!("📝 File contents:\n{}", code);
|
||||||
println!("\n🚀 Parsing and executing...\n");
|
println!("\n🚀 Parsing and executing...\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Optional Phase-15: strip `using` lines (gate) for minimal acceptance
|
// Optional Phase-15: strip `using` lines (gate) for minimal acceptance
|
||||||
let mut code_ref: &str = &code;
|
let mut code_ref: &str = &code;
|
||||||
let enable_using = std::env::var("NYASH_ENABLE_USING").ok().as_deref() == Some("1");
|
let enable_using = crate::config::env::enable_using();
|
||||||
let cleaned_code_owned;
|
let cleaned_code_owned;
|
||||||
if enable_using {
|
if enable_using {
|
||||||
let mut out = String::with_capacity(code.len());
|
let mut out = String::with_capacity(code.len());
|
||||||
@ -566,7 +556,7 @@ impl NyashRunner {
|
|||||||
let t = line.trim_start();
|
let t = line.trim_start();
|
||||||
if t.starts_with("using ") {
|
if t.starts_with("using ") {
|
||||||
// Skip `using ns` or `using ns as alias` lines (MVP)
|
// Skip `using ns` or `using ns as alias` lines (MVP)
|
||||||
if std::env::var("NYASH_CLI_VERBOSE").ok().as_deref() == Some("1") {
|
if crate::config::env::cli_verbose() {
|
||||||
eprintln!("[using] stripped line: {}", line);
|
eprintln!("[using] stripped line: {}", line);
|
||||||
}
|
}
|
||||||
// Parse namespace or path and optional alias
|
// Parse namespace or path and optional alias
|
||||||
@ -587,7 +577,7 @@ impl NyashRunner {
|
|||||||
if std::env::var("NYASH_USING_STRICT").ok().as_deref() == Some("1") {
|
if std::env::var("NYASH_USING_STRICT").ok().as_deref() == Some("1") {
|
||||||
eprintln!("❌ using: path not found: {}", path);
|
eprintln!("❌ using: path not found: {}", path);
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
} else if std::env::var("NYASH_CLI_VERBOSE").ok().as_deref() == Some("1") {
|
} else if crate::config::env::cli_verbose() {
|
||||||
eprintln!("[using] path not found (continuing): {}", path);
|
eprintln!("[using] path not found (continuing): {}", path);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -612,7 +602,7 @@ impl NyashRunner {
|
|||||||
// Register modules with resolver (aliases/modules/paths)
|
// Register modules with resolver (aliases/modules/paths)
|
||||||
let using_ctx = self.init_using_context();
|
let using_ctx = self.init_using_context();
|
||||||
let strict = std::env::var("NYASH_USING_STRICT").ok().as_deref() == Some("1");
|
let strict = std::env::var("NYASH_USING_STRICT").ok().as_deref() == Some("1");
|
||||||
let verbose = std::env::var("NYASH_CLI_VERBOSE").ok().as_deref() == Some("1");
|
let verbose = crate::config::env::cli_verbose();
|
||||||
let ctx_dir = std::path::Path::new(filename).parent();
|
let ctx_dir = std::path::Path::new(filename).parent();
|
||||||
for (ns_or_alias, alias_or_path) in used_names {
|
for (ns_or_alias, alias_or_path) in used_names {
|
||||||
if let Some(path) = alias_or_path {
|
if let Some(path) = alias_or_path {
|
||||||
@ -656,7 +646,7 @@ impl NyashRunner {
|
|||||||
if std::env::var("NYASH_USING_STRICT").ok().as_deref() == Some("1") {
|
if std::env::var("NYASH_USING_STRICT").ok().as_deref() == Some("1") {
|
||||||
eprintln!("❌ import: path not found: {} (from {})", p.display(), filename);
|
eprintln!("❌ import: path not found: {} (from {})", p.display(), filename);
|
||||||
process::exit(1);
|
process::exit(1);
|
||||||
} else if std::env::var("NYASH_CLI_VERBOSE").ok().as_deref() == Some("1") || std::env::var("NYASH_IMPORT_TRACE").ok().as_deref() == Some("1") {
|
} else if crate::config::env::cli_verbose() || std::env::var("NYASH_IMPORT_TRACE").ok().as_deref() == Some("1") {
|
||||||
eprintln!("[import] path not found (continuing): {}", p.display());
|
eprintln!("[import] path not found (continuing): {}", p.display());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -673,8 +663,8 @@ impl NyashRunner {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if std::env::var("NYASH_CLI_VERBOSE").ok().as_deref() == Some("1") && !quiet_pipe {
|
if crate::config::env::cli_verbose() && !quiet_pipe {
|
||||||
println!("✅ Parse successful!");
|
if crate::config::env::cli_verbose() { println!("✅ Parse successful!"); }
|
||||||
}
|
}
|
||||||
|
|
||||||
// Execute the AST
|
// Execute the AST
|
||||||
@ -682,8 +672,8 @@ impl NyashRunner {
|
|||||||
eprintln!("🔍 DEBUG: Starting execution...");
|
eprintln!("🔍 DEBUG: Starting execution...");
|
||||||
match interpreter.execute(ast) {
|
match interpreter.execute(ast) {
|
||||||
Ok(result) => {
|
Ok(result) => {
|
||||||
if std::env::var("NYASH_CLI_VERBOSE").ok().as_deref() == Some("1") && !quiet_pipe {
|
if crate::config::env::cli_verbose() && !quiet_pipe {
|
||||||
println!("✅ Execution completed successfully!");
|
if crate::config::env::cli_verbose() { println!("✅ Execution completed successfully!"); }
|
||||||
}
|
}
|
||||||
// Normalize display via semantics: prefer numeric, then string, then fallback
|
// Normalize display via semantics: prefer numeric, then string, then fallback
|
||||||
let disp = {
|
let disp = {
|
||||||
|
|||||||
@ -36,7 +36,7 @@ impl NyashRunner {
|
|||||||
// Optional dump via env verbose
|
// Optional dump via env verbose
|
||||||
super::json_v0_bridge::maybe_dump_mir(&module);
|
super::json_v0_bridge::maybe_dump_mir(&module);
|
||||||
// Optional: delegate to PyVM when NYASH_PIPE_USE_PYVM=1
|
// Optional: delegate to PyVM when NYASH_PIPE_USE_PYVM=1
|
||||||
if std::env::var("NYASH_PIPE_USE_PYVM").ok().as_deref() == Some("1") {
|
if crate::config::env::pipe_use_pyvm() {
|
||||||
let py = which::which("python3").ok();
|
let py = which::which("python3").ok();
|
||||||
if let Some(py3) = py {
|
if let Some(py3) = py {
|
||||||
let runner = std::path::Path::new("tools/pyvm_runner.py");
|
let runner = std::path::Path::new("tools/pyvm_runner.py");
|
||||||
@ -93,4 +93,3 @@ impl NyashRunner {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -25,7 +25,7 @@ impl NyashRunner {
|
|||||||
Err(e) => { eprintln!("[ny-compiler] read error: {}", e); return false; }
|
Err(e) => { eprintln!("[ny-compiler] read error: {}", e); return false; }
|
||||||
};
|
};
|
||||||
// Optional Phase-15: strip `using` lines and register modules (same policy as execute_nyash_file)
|
// Optional Phase-15: strip `using` lines and register modules (same policy as execute_nyash_file)
|
||||||
let enable_using = std::env::var("NYASH_ENABLE_USING").ok().as_deref() == Some("1");
|
let enable_using = crate::config::env::enable_using();
|
||||||
let mut code_ref: std::borrow::Cow<'_, str> = std::borrow::Cow::Borrowed(&code);
|
let mut code_ref: std::borrow::Cow<'_, str> = std::borrow::Cow::Borrowed(&code);
|
||||||
if enable_using {
|
if enable_using {
|
||||||
let mut out = String::with_capacity(code.len());
|
let mut out = String::with_capacity(code.len());
|
||||||
@ -33,7 +33,7 @@ impl NyashRunner {
|
|||||||
for line in code.lines() {
|
for line in code.lines() {
|
||||||
let t = line.trim_start();
|
let t = line.trim_start();
|
||||||
if t.starts_with("using ") {
|
if t.starts_with("using ") {
|
||||||
if std::env::var("NYASH_CLI_VERBOSE").ok().as_deref() == Some("1") {
|
if crate::config::env::cli_verbose() {
|
||||||
eprintln!("[using] stripped(line→selfhost): {}", line);
|
eprintln!("[using] stripped(line→selfhost): {}", line);
|
||||||
}
|
}
|
||||||
let rest0 = t.strip_prefix("using ").unwrap().trim();
|
let rest0 = t.strip_prefix("using ").unwrap().trim();
|
||||||
@ -73,7 +73,7 @@ impl NyashRunner {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Write to tmp/ny_parser_input.ny (as expected by Ny parser v0), unless forced to reuse existing tmp
|
// Write to tmp/ny_parser_input.ny (as expected by Ny parser v0), unless forced to reuse existing tmp
|
||||||
let use_tmp_only = std::env::var("NYASH_NY_COMPILER_USE_TMP_ONLY").ok().as_deref() == Some("1");
|
let use_tmp_only = crate::config::env::ny_compiler_use_tmp_only();
|
||||||
let tmp_dir = std::path::Path::new("tmp");
|
let tmp_dir = std::path::Path::new("tmp");
|
||||||
if let Err(e) = std::fs::create_dir_all(tmp_dir) {
|
if let Err(e) = std::fs::create_dir_all(tmp_dir) {
|
||||||
eprintln!("[ny-compiler] mkdir tmp failed: {}", e);
|
eprintln!("[ny-compiler] mkdir tmp failed: {}", e);
|
||||||
@ -100,19 +100,19 @@ impl NyashRunner {
|
|||||||
let mut cmd = std::process::Command::new(&exe);
|
let mut cmd = std::process::Command::new(&exe);
|
||||||
cmd.arg("--backend").arg("vm").arg(parser_prog);
|
cmd.arg("--backend").arg("vm").arg(parser_prog);
|
||||||
// Forward minimal args to child parser program
|
// Forward minimal args to child parser program
|
||||||
if std::env::var("NYASH_NY_COMPILER_MIN_JSON").ok().as_deref() == Some("1") {
|
if crate::config::env::ny_compiler_min_json() {
|
||||||
cmd.arg("--").arg("--min-json");
|
cmd.arg("--").arg("--min-json");
|
||||||
}
|
}
|
||||||
// Always feed input via tmp file written by the parent pipeline
|
// Always feed input via tmp file written by the parent pipeline
|
||||||
cmd.arg("--").arg("--read-tmp");
|
cmd.arg("--").arg("--read-tmp");
|
||||||
if std::env::var("NYASH_NY_COMPILER_STAGE3").ok().as_deref() == Some("1") {
|
if crate::config::env::ny_compiler_stage3() {
|
||||||
cmd.arg("--").arg("--stage3");
|
cmd.arg("--").arg("--stage3");
|
||||||
}
|
}
|
||||||
// Suppress parent noise and keep only JSON from child
|
// Suppress parent noise and keep only JSON from child
|
||||||
cmd.env_remove("NYASH_USE_NY_COMPILER");
|
cmd.env_remove("NYASH_USE_NY_COMPILER");
|
||||||
cmd.env_remove("NYASH_CLI_VERBOSE");
|
cmd.env_remove("NYASH_CLI_VERBOSE");
|
||||||
cmd.env("NYASH_JSON_ONLY", "1");
|
cmd.env("NYASH_JSON_ONLY", "1");
|
||||||
let timeout_ms: u64 = std::env::var("NYASH_NY_COMPILER_TIMEOUT_MS").ok().and_then(|s| s.parse().ok()).unwrap_or(2000);
|
let timeout_ms: u64 = crate::config::env::ny_compiler_timeout_ms();
|
||||||
let mut cmd = cmd.stdout(std::process::Stdio::piped()).stderr(std::process::Stdio::piped());
|
let mut cmd = cmd.stdout(std::process::Stdio::piped()).stderr(std::process::Stdio::piped());
|
||||||
if let Ok(mut child) = cmd.spawn() {
|
if let Ok(mut child) = cmd.spawn() {
|
||||||
let mut ch_stdout = child.stdout.take();
|
let mut ch_stdout = child.stdout.take();
|
||||||
@ -146,10 +146,10 @@ impl NyashRunner {
|
|||||||
match super::json_v0_bridge::parse_json_v0_to_module(&json_line) {
|
match super::json_v0_bridge::parse_json_v0_to_module(&json_line) {
|
||||||
Ok(module) => {
|
Ok(module) => {
|
||||||
super::json_v0_bridge::maybe_dump_mir(&module);
|
super::json_v0_bridge::maybe_dump_mir(&module);
|
||||||
let emit_only = std::env::var("NYASH_NY_COMPILER_EMIT_ONLY").unwrap_or_else(|_| "1".to_string()) == "1";
|
let emit_only = crate::config::env::ny_compiler_emit_only();
|
||||||
if emit_only { return false; }
|
if emit_only { return false; }
|
||||||
// Prefer PyVM path when requested
|
// Prefer PyVM path when requested
|
||||||
if std::env::var("NYASH_VM_USE_PY").ok().as_deref() == Some("1") {
|
if crate::config::env::vm_use_py() {
|
||||||
if let Ok(py3) = which::which("python3") {
|
if let Ok(py3) = which::which("python3") {
|
||||||
let runner = std::path::Path::new("tools/pyvm_runner.py");
|
let runner = std::path::Path::new("tools/pyvm_runner.py");
|
||||||
if runner.exists() {
|
if runner.exists() {
|
||||||
|
|||||||
@ -1,18 +1,12 @@
|
|||||||
/*!
|
//! Runner tracing helpers (verbose-guarded)
|
||||||
* Runner trace utilities — centralized verbose logging
|
|
||||||
*/
|
|
||||||
|
|
||||||
/// Returns true when runner-level verbose tracing is enabled.
|
/// Return whether CLI verbose logging is enabled
|
||||||
/// Controlled by `NYASH_CLI_VERBOSE=1` or `NYASH_RESOLVE_TRACE=1`.
|
pub fn cli_verbose() -> bool { crate::config::env::cli_verbose() }
|
||||||
pub fn enabled() -> bool {
|
|
||||||
std::env::var("NYASH_CLI_VERBOSE").ok().as_deref() == Some("1")
|
#[macro_export]
|
||||||
|| std::env::var("NYASH_RESOLVE_TRACE").ok().as_deref() == Some("1")
|
macro_rules! cli_v {
|
||||||
}
|
($($arg:tt)*) => {{
|
||||||
|
if crate::config::env::cli_verbose() { eprintln!($($arg)*); }
|
||||||
/// Emit a single-line trace message when enabled.
|
}};
|
||||||
pub fn log<S: AsRef<str>>(msg: S) {
|
|
||||||
if enabled() {
|
|
||||||
eprintln!("{}", msg.as_ref());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user