chore(fmt): add legacy stubs and strip trailing whitespace to unblock cargo fmt
This commit is contained in:
@ -1,20 +1,20 @@
|
||||
/*!
|
||||
* Parser Common Utilities
|
||||
*
|
||||
*
|
||||
* パーサーモジュール間で共有されるヘルパー関数や型定義
|
||||
* Extracted from parser/mod.rs as part of modularization
|
||||
*/
|
||||
|
||||
use crate::tokenizer::{Token, TokenType};
|
||||
use crate::ast::Span;
|
||||
use super::ParseError;
|
||||
use crate::ast::Span;
|
||||
use crate::tokenizer::{Token, TokenType};
|
||||
|
||||
/// Parser utility methods
|
||||
pub trait ParserUtils {
|
||||
fn tokens(&self) -> &Vec<Token>;
|
||||
fn current(&self) -> usize;
|
||||
fn current_mut(&mut self) -> &mut usize;
|
||||
|
||||
|
||||
/// 現在のトークンを取得
|
||||
fn current_token(&self) -> &Token {
|
||||
self.tokens().get(self.current()).unwrap_or(&Token {
|
||||
@ -23,7 +23,7 @@ pub trait ParserUtils {
|
||||
column: 0,
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
/// 次のトークンを先読み(位置を進めない)
|
||||
fn peek_token(&self) -> &TokenType {
|
||||
if self.current() + 1 < self.tokens().len() {
|
||||
@ -32,7 +32,7 @@ pub trait ParserUtils {
|
||||
&TokenType::EOF
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// N個先のトークンを先読み
|
||||
#[allow(dead_code)]
|
||||
fn peek_nth_token(&self, n: usize) -> &TokenType {
|
||||
@ -42,25 +42,26 @@ pub trait ParserUtils {
|
||||
&TokenType::EOF
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// 位置を1つ進める
|
||||
fn advance(&mut self) {
|
||||
if !self.is_at_end() {
|
||||
*self.current_mut() += 1;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// NEWLINEトークンをスキップ
|
||||
fn skip_newlines(&mut self) {
|
||||
while matches!(self.current_token().token_type, TokenType::NEWLINE) && !self.is_at_end() {
|
||||
self.advance();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// 指定されたトークンタイプを消費 (期待通りでなければエラー)
|
||||
fn consume(&mut self, expected: TokenType) -> Result<Token, ParseError> {
|
||||
if std::mem::discriminant(&self.current_token().token_type) ==
|
||||
std::mem::discriminant(&expected) {
|
||||
if std::mem::discriminant(&self.current_token().token_type)
|
||||
== std::mem::discriminant(&expected)
|
||||
{
|
||||
let token = self.current_token().clone();
|
||||
self.advance();
|
||||
Ok(token)
|
||||
@ -73,47 +74,50 @@ pub trait ParserUtils {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// 現在のトークンが指定されたタイプかチェック
|
||||
fn match_token(&self, token_type: &TokenType) -> bool {
|
||||
std::mem::discriminant(&self.current_token().token_type) ==
|
||||
std::mem::discriminant(token_type)
|
||||
std::mem::discriminant(&self.current_token().token_type)
|
||||
== std::mem::discriminant(token_type)
|
||||
}
|
||||
|
||||
|
||||
/// 複数のトークンタイプのいずれかにマッチするかチェック
|
||||
#[allow(dead_code)]
|
||||
fn match_any_token(&self, token_types: &[TokenType]) -> bool {
|
||||
let current_discriminant = std::mem::discriminant(&self.current_token().token_type);
|
||||
token_types.iter().any(|tt| {
|
||||
std::mem::discriminant(tt) == current_discriminant
|
||||
})
|
||||
token_types
|
||||
.iter()
|
||||
.any(|tt| std::mem::discriminant(tt) == current_discriminant)
|
||||
}
|
||||
|
||||
|
||||
/// 終端に達したかチェック
|
||||
fn is_at_end(&self) -> bool {
|
||||
self.current() >= self.tokens().len() ||
|
||||
matches!(self.current_token().token_type, TokenType::EOF)
|
||||
self.current() >= self.tokens().len()
|
||||
|| matches!(self.current_token().token_type, TokenType::EOF)
|
||||
}
|
||||
|
||||
|
||||
/// 現在のトークンが行の終わり(NEWLINE or EOF)かチェック
|
||||
#[allow(dead_code)]
|
||||
fn is_line_end(&self) -> bool {
|
||||
matches!(self.current_token().token_type, TokenType::NEWLINE | TokenType::EOF)
|
||||
matches!(
|
||||
self.current_token().token_type,
|
||||
TokenType::NEWLINE | TokenType::EOF
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
/// エラー報告用の現在位置情報を取得
|
||||
#[allow(dead_code)]
|
||||
fn current_position(&self) -> (usize, usize) {
|
||||
let token = self.current_token();
|
||||
(token.line, token.column)
|
||||
}
|
||||
|
||||
|
||||
/// 現在のトークンからSpanを作成
|
||||
#[allow(dead_code)]
|
||||
fn current_span(&self) -> Span {
|
||||
let token = self.current_token();
|
||||
Span {
|
||||
start: 0, // Token doesn't have byte offset, so using 0
|
||||
start: 0, // Token doesn't have byte offset, so using 0
|
||||
end: 0,
|
||||
line: token.line,
|
||||
column: token.column,
|
||||
@ -125,4 +129,4 @@ pub trait ParserUtils {
|
||||
#[allow(dead_code)]
|
||||
pub fn unknown_span() -> Span {
|
||||
Span::unknown()
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,22 +1,22 @@
|
||||
/*!
|
||||
* Box Definition Parser Module
|
||||
*
|
||||
*
|
||||
* Box宣言(box, interface box, static box)の解析を担当
|
||||
* Nyashの中核概念「Everything is Box」を実現する重要モジュール
|
||||
*/
|
||||
|
||||
use crate::tokenizer::TokenType;
|
||||
use crate::ast::{ASTNode, Span};
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::must_advance;
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::tokenizer::TokenType;
|
||||
use std::collections::HashMap;
|
||||
|
||||
impl NyashParser {
|
||||
/// box宣言をパース: box Name { fields... methods... }
|
||||
pub fn parse_box_declaration(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.consume(TokenType::BOX)?;
|
||||
|
||||
|
||||
let name = if let TokenType::IDENTIFIER(name) = &self.current_token().token_type {
|
||||
let name = name.clone();
|
||||
self.advance();
|
||||
@ -29,19 +29,19 @@ impl NyashParser {
|
||||
line,
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
// 🔥 ジェネリクス型パラメータのパース (<T, U>)
|
||||
let type_parameters = if self.match_token(&TokenType::LESS) {
|
||||
self.advance(); // consume '<'
|
||||
let mut params = Vec::new();
|
||||
|
||||
|
||||
while !self.match_token(&TokenType::GREATER) && !self.is_at_end() {
|
||||
must_advance!(self, _unused, "generic type parameter parsing");
|
||||
|
||||
|
||||
if let TokenType::IDENTIFIER(param) = &self.current_token().token_type {
|
||||
params.push(param.clone());
|
||||
self.advance();
|
||||
|
||||
|
||||
if self.match_token(&TokenType::COMMA) {
|
||||
self.advance();
|
||||
self.skip_newlines();
|
||||
@ -54,18 +54,18 @@ impl NyashParser {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
self.consume(TokenType::GREATER)?; // consume '>'
|
||||
params
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
|
||||
// 🚀 Multi-delegation support: "from Parent1, Parent2, ..."
|
||||
let extends = if self.match_token(&TokenType::FROM) {
|
||||
self.advance(); // consume 'from'
|
||||
let mut parents = Vec::new();
|
||||
|
||||
|
||||
// Parse first parent (required)
|
||||
if let TokenType::IDENTIFIER(parent) = &self.current_token().token_type {
|
||||
parents.push(parent.clone());
|
||||
@ -77,12 +77,12 @@ impl NyashParser {
|
||||
line: self.current_token().line,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// Parse additional parents (optional)
|
||||
while self.match_token(&TokenType::COMMA) {
|
||||
self.advance(); // consume ','
|
||||
self.skip_newlines();
|
||||
|
||||
|
||||
if let TokenType::IDENTIFIER(parent) = &self.current_token().token_type {
|
||||
parents.push(parent.clone());
|
||||
self.advance();
|
||||
@ -94,21 +94,22 @@ impl NyashParser {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
parents
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
|
||||
// implementsキーワードのチェック
|
||||
// TODO: TokenType::IMPLEMENTS is not defined in current version
|
||||
let implements = if false { // self.match_token(&TokenType::IMPLEMENTS) {
|
||||
let implements = if false {
|
||||
// self.match_token(&TokenType::IMPLEMENTS) {
|
||||
self.advance(); // consume 'implements'
|
||||
let mut interfaces = Vec::new();
|
||||
|
||||
|
||||
loop {
|
||||
must_advance!(self, _unused, "interface implementation parsing");
|
||||
|
||||
|
||||
if let TokenType::IDENTIFIER(interface) = &self.current_token().token_type {
|
||||
interfaces.push(interface.clone());
|
||||
self.advance();
|
||||
@ -119,51 +120,51 @@ impl NyashParser {
|
||||
line: self.current_token().line,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
if self.match_token(&TokenType::COMMA) {
|
||||
self.advance();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
interfaces
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
|
||||
self.consume(TokenType::LBRACE)?;
|
||||
self.skip_newlines(); // ブレース後の改行をスキップ
|
||||
|
||||
|
||||
let mut fields = Vec::new();
|
||||
let mut methods = HashMap::new();
|
||||
let mut public_fields: Vec<String> = Vec::new();
|
||||
let mut private_fields: Vec<String> = Vec::new();
|
||||
let mut constructors = HashMap::new();
|
||||
let mut init_fields = Vec::new();
|
||||
let mut weak_fields = Vec::new(); // 🔗 Track weak fields
|
||||
|
||||
let mut weak_fields = Vec::new(); // 🔗 Track weak fields
|
||||
|
||||
while !self.match_token(&TokenType::RBRACE) && !self.is_at_end() {
|
||||
self.skip_newlines(); // ループ開始時に改行をスキップ
|
||||
|
||||
|
||||
// RBRACEに到達していればループを抜ける
|
||||
if self.match_token(&TokenType::RBRACE) {
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
// initブロックの処理(initメソッドではない場合のみ)
|
||||
if self.match_token(&TokenType::INIT) && self.peek_token() != &TokenType::LPAREN {
|
||||
self.advance(); // consume 'init'
|
||||
self.consume(TokenType::LBRACE)?;
|
||||
|
||||
|
||||
// initブロック内のフィールド定義を読み込み
|
||||
while !self.match_token(&TokenType::RBRACE) && !self.is_at_end() {
|
||||
self.skip_newlines();
|
||||
|
||||
|
||||
if self.match_token(&TokenType::RBRACE) {
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
// Check for weak modifier
|
||||
let is_weak = if self.match_token(&TokenType::WEAK) {
|
||||
self.advance(); // consume 'weak'
|
||||
@ -171,14 +172,14 @@ impl NyashParser {
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
|
||||
if let TokenType::IDENTIFIER(field_name) = &self.current_token().token_type {
|
||||
init_fields.push(field_name.clone());
|
||||
if is_weak {
|
||||
weak_fields.push(field_name.clone()); // 🔗 Add to weak fields list
|
||||
}
|
||||
self.advance();
|
||||
|
||||
|
||||
// カンマがあればスキップ
|
||||
if self.match_token(&TokenType::COMMA) {
|
||||
self.advance();
|
||||
@ -186,72 +187,78 @@ impl NyashParser {
|
||||
} else {
|
||||
// 不正なトークンがある場合はエラー
|
||||
return Err(ParseError::UnexpectedToken {
|
||||
expected: if is_weak { "field name after 'weak'" } else { "field name" }.to_string(),
|
||||
expected: if is_weak {
|
||||
"field name after 'weak'"
|
||||
} else {
|
||||
"field name"
|
||||
}
|
||||
.to_string(),
|
||||
found: self.current_token().token_type.clone(),
|
||||
line: self.current_token().line,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
self.consume(TokenType::RBRACE)?;
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
// overrideキーワードをチェック
|
||||
let mut is_override = false;
|
||||
if self.match_token(&TokenType::OVERRIDE) {
|
||||
is_override = true;
|
||||
self.advance();
|
||||
}
|
||||
|
||||
|
||||
// initトークンをメソッド名として特別処理
|
||||
if self.match_token(&TokenType::INIT) && self.peek_token() == &TokenType::LPAREN {
|
||||
let field_or_method = "init".to_string();
|
||||
self.advance(); // consume 'init'
|
||||
|
||||
|
||||
// コンストラクタとして処理
|
||||
if self.match_token(&TokenType::LPAREN) {
|
||||
// initは常にコンストラクタ
|
||||
if is_override {
|
||||
return Err(ParseError::UnexpectedToken {
|
||||
expected: "method definition, not constructor after override keyword".to_string(),
|
||||
expected: "method definition, not constructor after override keyword"
|
||||
.to_string(),
|
||||
found: TokenType::INIT,
|
||||
line: self.current_token().line,
|
||||
});
|
||||
}
|
||||
// コンストラクタの処理
|
||||
self.advance(); // consume '('
|
||||
|
||||
|
||||
let mut params = Vec::new();
|
||||
while !self.match_token(&TokenType::RPAREN) && !self.is_at_end() {
|
||||
must_advance!(self, _unused, "constructor parameter parsing");
|
||||
|
||||
|
||||
if let TokenType::IDENTIFIER(param) = &self.current_token().token_type {
|
||||
params.push(param.clone());
|
||||
self.advance();
|
||||
}
|
||||
|
||||
|
||||
if self.match_token(&TokenType::COMMA) {
|
||||
self.advance();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
self.consume(TokenType::RPAREN)?;
|
||||
self.consume(TokenType::LBRACE)?;
|
||||
|
||||
|
||||
let mut body = Vec::new();
|
||||
while !self.match_token(&TokenType::RBRACE) && !self.is_at_end() {
|
||||
must_advance!(self, _unused, "constructor body parsing");
|
||||
|
||||
|
||||
self.skip_newlines();
|
||||
if self.match_token(&TokenType::RBRACE) {
|
||||
break;
|
||||
}
|
||||
body.push(self.parse_statement()?);
|
||||
}
|
||||
|
||||
|
||||
self.consume(TokenType::RBRACE)?;
|
||||
|
||||
|
||||
let constructor = ASTNode::FunctionDeclaration {
|
||||
name: field_or_method.clone(),
|
||||
params: params.clone(),
|
||||
@ -260,60 +267,61 @@ impl NyashParser {
|
||||
is_override: false, // コンストラクタは常に非オーバーライド
|
||||
span: Span::unknown(),
|
||||
};
|
||||
|
||||
|
||||
// 🔥 init/引数数 形式でキーを作成(インタープリターと一致させる)
|
||||
let constructor_key = format!("{}/{}", field_or_method, params.len());
|
||||
constructors.insert(constructor_key, constructor);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// packキーワードの処理(ビルトインBox継承用)
|
||||
if self.match_token(&TokenType::PACK) && self.peek_token() == &TokenType::LPAREN {
|
||||
let field_or_method = "pack".to_string();
|
||||
self.advance(); // consume 'pack'
|
||||
|
||||
|
||||
// packは常にコンストラクタ
|
||||
if is_override {
|
||||
return Err(ParseError::UnexpectedToken {
|
||||
expected: "method definition, not constructor after override keyword".to_string(),
|
||||
expected: "method definition, not constructor after override keyword"
|
||||
.to_string(),
|
||||
found: TokenType::PACK,
|
||||
line: self.current_token().line,
|
||||
});
|
||||
}
|
||||
// packコンストラクタの処理
|
||||
self.advance(); // consume '('
|
||||
|
||||
|
||||
let mut params = Vec::new();
|
||||
while !self.match_token(&TokenType::RPAREN) && !self.is_at_end() {
|
||||
must_advance!(self, _unused, "pack parameter parsing");
|
||||
|
||||
|
||||
if let TokenType::IDENTIFIER(param) = &self.current_token().token_type {
|
||||
params.push(param.clone());
|
||||
self.advance();
|
||||
}
|
||||
|
||||
|
||||
if self.match_token(&TokenType::COMMA) {
|
||||
self.advance();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
self.consume(TokenType::RPAREN)?;
|
||||
self.consume(TokenType::LBRACE)?;
|
||||
|
||||
|
||||
let mut body = Vec::new();
|
||||
while !self.match_token(&TokenType::RBRACE) && !self.is_at_end() {
|
||||
must_advance!(self, _unused, "pack body parsing");
|
||||
|
||||
|
||||
self.skip_newlines();
|
||||
if self.match_token(&TokenType::RBRACE) {
|
||||
break;
|
||||
}
|
||||
body.push(self.parse_statement()?);
|
||||
}
|
||||
|
||||
|
||||
self.consume(TokenType::RBRACE)?;
|
||||
|
||||
|
||||
let constructor = ASTNode::FunctionDeclaration {
|
||||
name: field_or_method.clone(),
|
||||
params: params.clone(),
|
||||
@ -322,59 +330,60 @@ impl NyashParser {
|
||||
is_override: false, // packは常に非オーバーライド
|
||||
span: Span::unknown(),
|
||||
};
|
||||
|
||||
|
||||
// 🔥 pack/引数数 形式でキーを作成(インタープリターと一致させる)
|
||||
let constructor_key = format!("{}/{}", field_or_method, params.len());
|
||||
constructors.insert(constructor_key, constructor);
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
// birthキーワードの処理(生命を与えるコンストラクタ)
|
||||
if self.match_token(&TokenType::BIRTH) && self.peek_token() == &TokenType::LPAREN {
|
||||
let field_or_method = "birth".to_string();
|
||||
self.advance(); // consume 'birth'
|
||||
|
||||
|
||||
// birthは常にコンストラクタ
|
||||
if is_override {
|
||||
return Err(ParseError::UnexpectedToken {
|
||||
expected: "method definition, not constructor after override keyword".to_string(),
|
||||
expected: "method definition, not constructor after override keyword"
|
||||
.to_string(),
|
||||
found: TokenType::BIRTH,
|
||||
line: self.current_token().line,
|
||||
});
|
||||
}
|
||||
// birthコンストラクタの処理
|
||||
self.advance(); // consume '('
|
||||
|
||||
|
||||
let mut params = Vec::new();
|
||||
while !self.match_token(&TokenType::RPAREN) && !self.is_at_end() {
|
||||
must_advance!(self, _unused, "birth parameter parsing");
|
||||
|
||||
|
||||
if let TokenType::IDENTIFIER(param) = &self.current_token().token_type {
|
||||
params.push(param.clone());
|
||||
self.advance();
|
||||
}
|
||||
|
||||
|
||||
if self.match_token(&TokenType::COMMA) {
|
||||
self.advance();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
self.consume(TokenType::RPAREN)?;
|
||||
self.consume(TokenType::LBRACE)?;
|
||||
|
||||
|
||||
let mut body = Vec::new();
|
||||
while !self.match_token(&TokenType::RBRACE) && !self.is_at_end() {
|
||||
must_advance!(self, _unused, "birth body parsing");
|
||||
|
||||
|
||||
self.skip_newlines();
|
||||
if self.match_token(&TokenType::RBRACE) {
|
||||
break;
|
||||
}
|
||||
body.push(self.parse_statement()?);
|
||||
}
|
||||
|
||||
|
||||
self.consume(TokenType::RBRACE)?;
|
||||
|
||||
|
||||
let constructor = ASTNode::FunctionDeclaration {
|
||||
name: field_or_method.clone(),
|
||||
params: params.clone(),
|
||||
@ -383,13 +392,13 @@ impl NyashParser {
|
||||
is_override: false, // birthは常に非オーバーライド
|
||||
span: Span::unknown(),
|
||||
};
|
||||
|
||||
|
||||
// 🔥 birth/引数数 形式でキーを作成(インタープリターと一致させる)
|
||||
let constructor_key = format!("{}/{}", field_or_method, params.len());
|
||||
constructors.insert(constructor_key, constructor);
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
// 🚨 birth()統一システム: Box名コンストラクタ無効化
|
||||
// Box名と同じ名前のコンストラクタは禁止(birth()のみ許可)
|
||||
if let TokenType::IDENTIFIER(id) = &self.current_token().token_type {
|
||||
@ -401,13 +410,13 @@ impl NyashParser {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// 通常のフィールド名またはメソッド名を読み取り
|
||||
if let TokenType::IDENTIFIER(field_or_method) = &self.current_token().token_type {
|
||||
let field_or_method = field_or_method.clone();
|
||||
self.advance();
|
||||
|
||||
// 可視性:
|
||||
// 可視性:
|
||||
// - public { ... } / private { ... } ブロック
|
||||
// - public name: Type 単行(P0: 型はパースのみ、意味付けは後段)
|
||||
if field_or_method == "public" || field_or_method == "private" {
|
||||
@ -419,12 +428,18 @@ impl NyashParser {
|
||||
if let TokenType::IDENTIFIER(fname) = &self.current_token().token_type {
|
||||
let fname = fname.clone();
|
||||
// ブロックに追加
|
||||
if field_or_method == "public" { public_fields.push(fname.clone()); } else { private_fields.push(fname.clone()); }
|
||||
if field_or_method == "public" {
|
||||
public_fields.push(fname.clone());
|
||||
} else {
|
||||
private_fields.push(fname.clone());
|
||||
}
|
||||
// 互換性のため、全体fieldsにも追加
|
||||
fields.push(fname);
|
||||
self.advance();
|
||||
// カンマ/改行をスキップ
|
||||
if self.match_token(&TokenType::COMMA) { self.advance(); }
|
||||
if self.match_token(&TokenType::COMMA) {
|
||||
self.advance();
|
||||
}
|
||||
self.skip_newlines();
|
||||
continue;
|
||||
}
|
||||
@ -440,62 +455,79 @@ impl NyashParser {
|
||||
continue;
|
||||
} else if matches!(self.current_token().token_type, TokenType::IDENTIFIER(_)) {
|
||||
// 単行形式: public name[: Type]
|
||||
let fname = if let TokenType::IDENTIFIER(n) = &self.current_token().token_type { n.clone() } else { unreachable!() };
|
||||
let fname =
|
||||
if let TokenType::IDENTIFIER(n) = &self.current_token().token_type {
|
||||
n.clone()
|
||||
} else {
|
||||
unreachable!()
|
||||
};
|
||||
self.advance();
|
||||
if self.match_token(&TokenType::COLON) {
|
||||
self.advance(); // consume ':'
|
||||
// 型名(識別子)を受理して破棄(P0)
|
||||
// 型名(識別子)を受理して破棄(P0)
|
||||
if let TokenType::IDENTIFIER(_ty) = &self.current_token().token_type {
|
||||
self.advance();
|
||||
} else {
|
||||
return Err(ParseError::UnexpectedToken { found: self.current_token().token_type.clone(), expected: "type name".to_string(), line: self.current_token().line });
|
||||
return Err(ParseError::UnexpectedToken {
|
||||
found: self.current_token().token_type.clone(),
|
||||
expected: "type name".to_string(),
|
||||
line: self.current_token().line,
|
||||
});
|
||||
}
|
||||
}
|
||||
if field_or_method == "public" { public_fields.push(fname.clone()); } else { private_fields.push(fname.clone()); }
|
||||
if field_or_method == "public" {
|
||||
public_fields.push(fname.clone());
|
||||
} else {
|
||||
private_fields.push(fname.clone());
|
||||
}
|
||||
fields.push(fname);
|
||||
self.skip_newlines();
|
||||
continue;
|
||||
} else {
|
||||
// public/private の後に '{' でも識別子でもない
|
||||
return Err(ParseError::UnexpectedToken { found: self.current_token().token_type.clone(), expected: "'{' or field name".to_string(), line: self.current_token().line });
|
||||
return Err(ParseError::UnexpectedToken {
|
||||
found: self.current_token().token_type.clone(),
|
||||
expected: "'{' or field name".to_string(),
|
||||
line: self.current_token().line,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// メソッドかフィールドかを判定
|
||||
if self.match_token(&TokenType::LPAREN) {
|
||||
// メソッド定義
|
||||
self.advance(); // consume '('
|
||||
|
||||
|
||||
let mut params = Vec::new();
|
||||
while !self.match_token(&TokenType::RPAREN) && !self.is_at_end() {
|
||||
must_advance!(self, _unused, "method parameter parsing");
|
||||
|
||||
|
||||
if let TokenType::IDENTIFIER(param) = &self.current_token().token_type {
|
||||
params.push(param.clone());
|
||||
self.advance();
|
||||
}
|
||||
|
||||
|
||||
if self.match_token(&TokenType::COMMA) {
|
||||
self.advance();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
self.consume(TokenType::RPAREN)?;
|
||||
self.consume(TokenType::LBRACE)?;
|
||||
|
||||
|
||||
let mut body = Vec::new();
|
||||
while !self.match_token(&TokenType::RBRACE) && !self.is_at_end() {
|
||||
must_advance!(self, _unused, "method body parsing");
|
||||
|
||||
|
||||
self.skip_newlines();
|
||||
if self.match_token(&TokenType::RBRACE) {
|
||||
break;
|
||||
}
|
||||
body.push(self.parse_statement()?);
|
||||
}
|
||||
|
||||
|
||||
self.consume(TokenType::RBRACE)?;
|
||||
|
||||
|
||||
let method = ASTNode::FunctionDeclaration {
|
||||
name: field_or_method.clone(),
|
||||
params,
|
||||
@ -504,14 +536,14 @@ impl NyashParser {
|
||||
is_override,
|
||||
span: Span::unknown(),
|
||||
};
|
||||
|
||||
|
||||
methods.insert(field_or_method, method);
|
||||
} else {
|
||||
// フィールド定義(P0: 型注釈 name: Type を受理して破棄)
|
||||
let fname = field_or_method;
|
||||
if self.match_token(&TokenType::COLON) {
|
||||
self.advance(); // consume ':'
|
||||
// 型名(識別子)を許可(P0は保持せず破棄)
|
||||
// 型名(識別子)を許可(P0は保持せず破棄)
|
||||
if let TokenType::IDENTIFIER(_ty) = &self.current_token().token_type {
|
||||
self.advance();
|
||||
}
|
||||
@ -526,14 +558,14 @@ impl NyashParser {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
self.consume(TokenType::RBRACE)?;
|
||||
|
||||
|
||||
// 🔥 Override validation
|
||||
for parent in &extends {
|
||||
self.validate_override_methods(&name, parent, &methods)?;
|
||||
}
|
||||
|
||||
|
||||
Ok(ASTNode::BoxDeclaration {
|
||||
name,
|
||||
fields,
|
||||
@ -542,7 +574,7 @@ impl NyashParser {
|
||||
methods,
|
||||
constructors,
|
||||
init_fields,
|
||||
weak_fields, // 🔗 Add weak fields to AST
|
||||
weak_fields, // 🔗 Add weak fields to AST
|
||||
is_interface: false,
|
||||
extends,
|
||||
implements,
|
||||
@ -552,12 +584,12 @@ impl NyashParser {
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
/// interface box宣言をパース: interface box Name { methods... }
|
||||
pub fn parse_interface_box_declaration(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.consume(TokenType::INTERFACE)?;
|
||||
self.consume(TokenType::BOX)?;
|
||||
|
||||
|
||||
let name = if let TokenType::IDENTIFIER(name) = &self.current_token().token_type {
|
||||
let name = name.clone();
|
||||
self.advance();
|
||||
@ -570,48 +602,48 @@ impl NyashParser {
|
||||
line,
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
self.consume(TokenType::LBRACE)?;
|
||||
self.skip_newlines(); // ブレース後の改行をスキップ
|
||||
|
||||
|
||||
let mut methods = HashMap::new();
|
||||
|
||||
|
||||
while !self.match_token(&TokenType::RBRACE) && !self.is_at_end() {
|
||||
self.skip_newlines(); // ループ開始時に改行をスキップ
|
||||
if let TokenType::IDENTIFIER(method_name) = &self.current_token().token_type {
|
||||
let method_name = method_name.clone();
|
||||
self.advance();
|
||||
|
||||
|
||||
// インターフェースメソッドはシグネチャのみ
|
||||
if self.match_token(&TokenType::LPAREN) {
|
||||
self.advance(); // consume '('
|
||||
|
||||
|
||||
let mut params = Vec::new();
|
||||
while !self.match_token(&TokenType::RPAREN) && !self.is_at_end() {
|
||||
if let TokenType::IDENTIFIER(param) = &self.current_token().token_type {
|
||||
params.push(param.clone());
|
||||
self.advance();
|
||||
}
|
||||
|
||||
|
||||
if self.match_token(&TokenType::COMMA) {
|
||||
self.advance();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
self.consume(TokenType::RPAREN)?;
|
||||
|
||||
|
||||
// インターフェースメソッドは実装なし(空のbody)
|
||||
let method_decl = ASTNode::FunctionDeclaration {
|
||||
name: method_name.clone(),
|
||||
params,
|
||||
body: vec![], // 空の実装
|
||||
is_static: false, // インターフェースメソッドは通常静的でない
|
||||
body: vec![], // 空の実装
|
||||
is_static: false, // インターフェースメソッドは通常静的でない
|
||||
is_override: false, // デフォルトは非オーバーライド
|
||||
span: Span::unknown(),
|
||||
};
|
||||
|
||||
|
||||
methods.insert(method_name, method_decl);
|
||||
|
||||
|
||||
// メソッド宣言後の改行をスキップ
|
||||
self.skip_newlines();
|
||||
} else {
|
||||
@ -631,9 +663,9 @@ impl NyashParser {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
self.consume(TokenType::RBRACE)?;
|
||||
|
||||
|
||||
Ok(ASTNode::BoxDeclaration {
|
||||
name,
|
||||
fields: vec![], // インターフェースはフィールドなし
|
||||
@ -641,14 +673,14 @@ impl NyashParser {
|
||||
private_fields: vec![],
|
||||
methods,
|
||||
constructors: HashMap::new(), // インターフェースにコンストラクタなし
|
||||
init_fields: vec![], // インターフェースにinitブロックなし
|
||||
weak_fields: vec![], // 🔗 インターフェースにweak fieldsなし
|
||||
is_interface: true, // インターフェースフラグ
|
||||
extends: vec![], // 🚀 Multi-delegation: Changed from None to vec![]
|
||||
init_fields: vec![], // インターフェースにinitブロックなし
|
||||
weak_fields: vec![], // 🔗 インターフェースにweak fieldsなし
|
||||
is_interface: true, // インターフェースフラグ
|
||||
extends: vec![], // 🚀 Multi-delegation: Changed from None to vec![]
|
||||
implements: vec![],
|
||||
type_parameters: Vec::new(), // 🔥 インターフェースではジェネリクス未対応
|
||||
is_static: false, // インターフェースは非static
|
||||
static_init: None, // インターフェースにstatic initなし
|
||||
is_static: false, // インターフェースは非static
|
||||
static_init: None, // インターフェースにstatic initなし
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
/*!
|
||||
* Dependency Analysis Helpers
|
||||
*
|
||||
*
|
||||
* Static box依存関係の解析と循環依存検出
|
||||
*/
|
||||
|
||||
@ -10,18 +10,25 @@ use std::collections::{HashMap, HashSet};
|
||||
|
||||
impl NyashParser {
|
||||
/// Static初期化ブロック内の文から依存関係を抽出
|
||||
pub(super) fn extract_dependencies_from_statements(&self, statements: &[ASTNode]) -> HashSet<String> {
|
||||
pub(super) fn extract_dependencies_from_statements(
|
||||
&self,
|
||||
statements: &[ASTNode],
|
||||
) -> HashSet<String> {
|
||||
let mut dependencies = HashSet::new();
|
||||
|
||||
|
||||
for stmt in statements {
|
||||
self.extract_dependencies_from_ast(stmt, &mut dependencies);
|
||||
}
|
||||
|
||||
|
||||
dependencies
|
||||
}
|
||||
|
||||
|
||||
/// AST内から静的Box参照を再帰的に検出
|
||||
pub(super) fn extract_dependencies_from_ast(&self, node: &ASTNode, dependencies: &mut HashSet<String>) {
|
||||
pub(super) fn extract_dependencies_from_ast(
|
||||
&self,
|
||||
node: &ASTNode,
|
||||
dependencies: &mut HashSet<String>,
|
||||
) {
|
||||
match node {
|
||||
ASTNode::FieldAccess { object, .. } => {
|
||||
// Math.PI のような参照を検出
|
||||
@ -46,7 +53,12 @@ impl NyashParser {
|
||||
ASTNode::UnaryOp { operand, .. } => {
|
||||
self.extract_dependencies_from_ast(operand, dependencies);
|
||||
}
|
||||
ASTNode::If { condition, then_body, else_body, .. } => {
|
||||
ASTNode::If {
|
||||
condition,
|
||||
then_body,
|
||||
else_body,
|
||||
..
|
||||
} => {
|
||||
self.extract_dependencies_from_ast(condition, dependencies);
|
||||
for stmt in then_body {
|
||||
self.extract_dependencies_from_ast(stmt, dependencies);
|
||||
@ -57,7 +69,9 @@ impl NyashParser {
|
||||
}
|
||||
}
|
||||
}
|
||||
ASTNode::Loop { condition, body, .. } => {
|
||||
ASTNode::Loop {
|
||||
condition, body, ..
|
||||
} => {
|
||||
self.extract_dependencies_from_ast(condition, dependencies);
|
||||
for stmt in body {
|
||||
self.extract_dependencies_from_ast(stmt, dependencies);
|
||||
@ -73,26 +87,26 @@ impl NyashParser {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// 循環依存検出
|
||||
pub fn check_circular_dependencies(&self) -> Result<(), ParseError> {
|
||||
// すべてのstatic boxに対して循環検出を実行
|
||||
let all_boxes: Vec<_> = self.static_box_dependencies.keys().cloned().collect();
|
||||
|
||||
|
||||
for box_name in &all_boxes {
|
||||
let mut visited = HashSet::new();
|
||||
let mut stack = Vec::new();
|
||||
|
||||
|
||||
if self.has_cycle_dfs(box_name, &mut visited, &mut stack)? {
|
||||
// 循環を文字列化
|
||||
let cycle_str = stack.join(" -> ");
|
||||
return Err(ParseError::CircularDependency { cycle: cycle_str });
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
/// DFSで循環依存を検出
|
||||
fn has_cycle_dfs(
|
||||
&self,
|
||||
@ -105,15 +119,15 @@ impl NyashParser {
|
||||
stack.push(current.to_string()); // 循環を完成させる
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
|
||||
// 既に訪問済みで循環がなければスキップ
|
||||
if visited.contains(current) {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
|
||||
visited.insert(current.to_string());
|
||||
stack.push(current.to_string());
|
||||
|
||||
|
||||
// 依存先をチェック
|
||||
if let Some(dependencies) = self.static_box_dependencies.get(current) {
|
||||
for dep in dependencies {
|
||||
@ -122,23 +136,31 @@ impl NyashParser {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
stack.pop();
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
|
||||
/// Override メソッドの検証
|
||||
pub(super) fn validate_override_methods(&self, child_name: &str, parent_name: &str, methods: &HashMap<String, ASTNode>) -> Result<(), ParseError> {
|
||||
pub(super) fn validate_override_methods(
|
||||
&self,
|
||||
child_name: &str,
|
||||
parent_name: &str,
|
||||
methods: &HashMap<String, ASTNode>,
|
||||
) -> Result<(), ParseError> {
|
||||
// 現時点では簡単な検証のみ
|
||||
// TODO: 親クラスのメソッドシグネチャとの比較
|
||||
for (method_name, method_ast) in methods {
|
||||
if let ASTNode::FunctionDeclaration { is_override, .. } = method_ast {
|
||||
if *is_override {
|
||||
// 将来的にここで親クラスのメソッドが存在するかチェック
|
||||
eprintln!("🔍 Validating override method '{}' in '{}' from '{}'", method_name, child_name, parent_name);
|
||||
eprintln!(
|
||||
"🔍 Validating override method '{}' in '{}' from '{}'",
|
||||
method_name, child_name, parent_name
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,12 +1,12 @@
|
||||
/*!
|
||||
* Parser Declarations Module
|
||||
*
|
||||
*
|
||||
* 宣言(Declaration)の解析を担当するモジュール群
|
||||
* Box定義、関数定義、use文などの宣言を処理
|
||||
*/
|
||||
|
||||
pub mod box_definition;
|
||||
pub mod static_box;
|
||||
pub mod dependency_helpers;
|
||||
pub mod static_box;
|
||||
|
||||
// Re-export commonly used items
|
||||
|
||||
@ -1,20 +1,20 @@
|
||||
/*!
|
||||
* Static Box Definition Parser
|
||||
*
|
||||
*
|
||||
* static box宣言と関連ヘルパー関数
|
||||
*/
|
||||
|
||||
use crate::tokenizer::TokenType;
|
||||
use crate::ast::{ASTNode, Span};
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::tokenizer::TokenType;
|
||||
use std::collections::HashMap;
|
||||
|
||||
impl NyashParser {
|
||||
/// static box宣言をパース: static box Name { ... }
|
||||
pub fn parse_static_box(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.consume(TokenType::BOX)?;
|
||||
|
||||
|
||||
let name = if let TokenType::IDENTIFIER(name) = &self.current_token().token_type {
|
||||
let name = name.clone();
|
||||
self.advance();
|
||||
@ -27,17 +27,17 @@ impl NyashParser {
|
||||
line,
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
// 🔥 ジェネリクス型パラメータのパース (<T, U>)
|
||||
let type_parameters = if self.match_token(&TokenType::LESS) {
|
||||
self.advance(); // consume '<'
|
||||
let mut params = Vec::new();
|
||||
|
||||
|
||||
loop {
|
||||
if let TokenType::IDENTIFIER(param_name) = &self.current_token().token_type {
|
||||
params.push(param_name.clone());
|
||||
self.advance();
|
||||
|
||||
|
||||
if self.match_token(&TokenType::COMMA) {
|
||||
self.advance(); // consume ','
|
||||
} else {
|
||||
@ -52,24 +52,24 @@ impl NyashParser {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
self.consume(TokenType::GREATER)?; // consume '>'
|
||||
params
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
|
||||
// from句のパース(Multi-delegation)- static boxでもデリゲーション可能 🚀
|
||||
let extends = if self.match_token(&TokenType::FROM) {
|
||||
self.advance(); // consume 'from'
|
||||
|
||||
|
||||
let mut parent_list = Vec::new();
|
||||
|
||||
|
||||
loop {
|
||||
if let TokenType::IDENTIFIER(parent_name) = &self.current_token().token_type {
|
||||
parent_list.push(parent_name.clone());
|
||||
self.advance();
|
||||
|
||||
|
||||
if self.match_token(&TokenType::COMMA) {
|
||||
self.advance(); // consume ','
|
||||
} else {
|
||||
@ -84,23 +84,23 @@ impl NyashParser {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
parent_list
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
|
||||
// interface句のパース(インターフェース実装)- static boxでもinterface実装可能
|
||||
let implements = if self.match_token(&TokenType::INTERFACE) {
|
||||
self.advance(); // consume 'interface'
|
||||
|
||||
|
||||
let mut interface_list = Vec::new();
|
||||
|
||||
|
||||
loop {
|
||||
if let TokenType::IDENTIFIER(interface_name) = &self.current_token().token_type {
|
||||
interface_list.push(interface_name.clone());
|
||||
self.advance();
|
||||
|
||||
|
||||
if self.match_token(&TokenType::COMMA) {
|
||||
self.advance(); // consume ','
|
||||
} else {
|
||||
@ -115,35 +115,35 @@ impl NyashParser {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
interface_list
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
|
||||
self.consume(TokenType::LBRACE)?;
|
||||
self.skip_newlines(); // ブレース後の改行をスキップ
|
||||
|
||||
|
||||
let mut fields = Vec::new();
|
||||
let mut methods = HashMap::new();
|
||||
let constructors = HashMap::new();
|
||||
let mut init_fields = Vec::new();
|
||||
let mut weak_fields = Vec::new(); // 🔗 Track weak fields for static box
|
||||
let mut weak_fields = Vec::new(); // 🔗 Track weak fields for static box
|
||||
let mut static_init = None;
|
||||
|
||||
|
||||
while !self.match_token(&TokenType::RBRACE) && !self.is_at_end() {
|
||||
self.skip_newlines(); // ループ開始時に改行をスキップ
|
||||
|
||||
|
||||
// RBRACEに到達していればループを抜ける
|
||||
if self.match_token(&TokenType::RBRACE) {
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
// 🔥 static { } ブロックの処理
|
||||
if self.match_token(&TokenType::STATIC) {
|
||||
self.advance(); // consume 'static'
|
||||
self.consume(TokenType::LBRACE)?;
|
||||
|
||||
|
||||
let mut static_body = Vec::new();
|
||||
while !self.match_token(&TokenType::RBRACE) && !self.is_at_end() {
|
||||
self.skip_newlines();
|
||||
@ -151,25 +151,25 @@ impl NyashParser {
|
||||
static_body.push(self.parse_statement()?);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
self.consume(TokenType::RBRACE)?;
|
||||
static_init = Some(static_body);
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
// initブロックの処理
|
||||
if self.match_token(&TokenType::INIT) {
|
||||
self.advance(); // consume 'init'
|
||||
self.consume(TokenType::LBRACE)?;
|
||||
|
||||
|
||||
// initブロック内のフィールド定義を読み込み
|
||||
while !self.match_token(&TokenType::RBRACE) && !self.is_at_end() {
|
||||
self.skip_newlines();
|
||||
|
||||
|
||||
if self.match_token(&TokenType::RBRACE) {
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
// Check for weak modifier
|
||||
let is_weak = if self.match_token(&TokenType::WEAK) {
|
||||
self.advance(); // consume 'weak'
|
||||
@ -177,14 +177,14 @@ impl NyashParser {
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
|
||||
if let TokenType::IDENTIFIER(field_name) = &self.current_token().token_type {
|
||||
init_fields.push(field_name.clone());
|
||||
if is_weak {
|
||||
weak_fields.push(field_name.clone()); // 🔗 Add to weak fields list
|
||||
}
|
||||
self.advance();
|
||||
|
||||
|
||||
// カンマがあればスキップ
|
||||
if self.match_token(&TokenType::COMMA) {
|
||||
self.advance();
|
||||
@ -192,41 +192,46 @@ impl NyashParser {
|
||||
} else {
|
||||
// 不正なトークンがある場合はエラー
|
||||
return Err(ParseError::UnexpectedToken {
|
||||
expected: if is_weak { "field name after 'weak'" } else { "field name" }.to_string(),
|
||||
expected: if is_weak {
|
||||
"field name after 'weak'"
|
||||
} else {
|
||||
"field name"
|
||||
}
|
||||
.to_string(),
|
||||
found: self.current_token().token_type.clone(),
|
||||
line: self.current_token().line,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
self.consume(TokenType::RBRACE)?;
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
if let TokenType::IDENTIFIER(field_or_method) = &self.current_token().token_type {
|
||||
let field_or_method = field_or_method.clone();
|
||||
self.advance();
|
||||
|
||||
|
||||
// メソッド定義か?
|
||||
if self.match_token(&TokenType::LPAREN) {
|
||||
// メソッド定義
|
||||
self.advance(); // consume '('
|
||||
|
||||
|
||||
let mut params = Vec::new();
|
||||
while !self.match_token(&TokenType::RPAREN) && !self.is_at_end() {
|
||||
if let TokenType::IDENTIFIER(param) = &self.current_token().token_type {
|
||||
params.push(param.clone());
|
||||
self.advance();
|
||||
}
|
||||
|
||||
|
||||
if self.match_token(&TokenType::COMMA) {
|
||||
self.advance();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
self.consume(TokenType::RPAREN)?;
|
||||
self.consume(TokenType::LBRACE)?;
|
||||
|
||||
|
||||
let mut body = Vec::new();
|
||||
while !self.match_token(&TokenType::RBRACE) && !self.is_at_end() {
|
||||
self.skip_newlines();
|
||||
@ -234,18 +239,18 @@ impl NyashParser {
|
||||
body.push(self.parse_statement()?);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
self.consume(TokenType::RBRACE)?;
|
||||
|
||||
|
||||
let method = ASTNode::FunctionDeclaration {
|
||||
name: field_or_method.clone(),
|
||||
params,
|
||||
body,
|
||||
is_static: false, // static box内のメソッドは通常メソッド
|
||||
is_static: false, // static box内のメソッドは通常メソッド
|
||||
is_override: false, // デフォルトは非オーバーライド
|
||||
span: Span::unknown(),
|
||||
};
|
||||
|
||||
|
||||
methods.insert(field_or_method, method);
|
||||
} else {
|
||||
// フィールド定義
|
||||
@ -259,17 +264,19 @@ impl NyashParser {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
self.consume(TokenType::RBRACE)?;
|
||||
|
||||
|
||||
// 🔥 Static初期化ブロックから依存関係を抽出
|
||||
if let Some(ref init_stmts) = static_init {
|
||||
let dependencies = self.extract_dependencies_from_statements(init_stmts);
|
||||
self.static_box_dependencies.insert(name.clone(), dependencies);
|
||||
self.static_box_dependencies
|
||||
.insert(name.clone(), dependencies);
|
||||
} else {
|
||||
self.static_box_dependencies.insert(name.clone(), std::collections::HashSet::new());
|
||||
self.static_box_dependencies
|
||||
.insert(name.clone(), std::collections::HashSet::new());
|
||||
}
|
||||
|
||||
|
||||
Ok(ASTNode::BoxDeclaration {
|
||||
name,
|
||||
fields,
|
||||
@ -278,13 +285,13 @@ impl NyashParser {
|
||||
methods,
|
||||
constructors,
|
||||
init_fields,
|
||||
weak_fields, // 🔗 Add weak fields to static box construction
|
||||
weak_fields, // 🔗 Add weak fields to static box construction
|
||||
is_interface: false,
|
||||
extends,
|
||||
implements,
|
||||
type_parameters,
|
||||
is_static: true, // 🔥 static boxフラグを設定
|
||||
static_init, // 🔥 static初期化ブロック
|
||||
is_static: true, // 🔥 static boxフラグを設定
|
||||
static_init, // 🔥 static初期化ブロック
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
|
||||
@ -1,21 +1,22 @@
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::parser::sugar_gate;
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::syntax::sugar_config::{SugarConfig, SugarLevel};
|
||||
|
||||
/// Parse code and apply sugar based on a provided level (None/Basic/Full)
|
||||
pub fn parse_with_sugar_level(code: &str, level: SugarLevel) -> Result<crate::ast::ASTNode, ParseError> {
|
||||
pub fn parse_with_sugar_level(
|
||||
code: &str,
|
||||
level: SugarLevel,
|
||||
) -> Result<crate::ast::ASTNode, ParseError> {
|
||||
match level {
|
||||
SugarLevel::None => {
|
||||
let ast = NyashParser::parse_from_string(code)?;
|
||||
Ok(ast)
|
||||
}
|
||||
SugarLevel::Basic | SugarLevel::Full => {
|
||||
sugar_gate::with_enabled(|| {
|
||||
let ast = NyashParser::parse_from_string(code)?;
|
||||
let cfg = SugarConfig { level };
|
||||
let ast = crate::parser::sugar::apply_sugar(ast, &cfg);
|
||||
Ok(ast)
|
||||
})
|
||||
}
|
||||
SugarLevel::Basic | SugarLevel::Full => sugar_gate::with_enabled(|| {
|
||||
let ast = NyashParser::parse_from_string(code)?;
|
||||
let cfg = SugarConfig { level };
|
||||
let ast = crate::parser::sugar::apply_sugar(ast, &cfg);
|
||||
Ok(ast)
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::tokenizer::TokenType;
|
||||
use crate::ast::{ASTNode, BinaryOperator, Span};
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::tokenizer::TokenType;
|
||||
|
||||
impl NyashParser {
|
||||
pub(crate) fn expr_parse_bit_or(&mut self) -> Result<ASTNode, ParseError> {
|
||||
@ -10,7 +10,12 @@ impl NyashParser {
|
||||
let operator = BinaryOperator::BitOr;
|
||||
self.advance();
|
||||
let right = self.expr_parse_bit_xor()?;
|
||||
expr = ASTNode::BinaryOp { operator, left: Box::new(expr), right: Box::new(right), span: Span::unknown() };
|
||||
expr = ASTNode::BinaryOp {
|
||||
operator,
|
||||
left: Box::new(expr),
|
||||
right: Box::new(right),
|
||||
span: Span::unknown(),
|
||||
};
|
||||
}
|
||||
Ok(expr)
|
||||
}
|
||||
@ -21,7 +26,12 @@ impl NyashParser {
|
||||
let operator = BinaryOperator::BitXor;
|
||||
self.advance();
|
||||
let right = self.expr_parse_bit_and()?;
|
||||
expr = ASTNode::BinaryOp { operator, left: Box::new(expr), right: Box::new(right), span: Span::unknown() };
|
||||
expr = ASTNode::BinaryOp {
|
||||
operator,
|
||||
left: Box::new(expr),
|
||||
right: Box::new(right),
|
||||
span: Span::unknown(),
|
||||
};
|
||||
}
|
||||
Ok(expr)
|
||||
}
|
||||
@ -32,9 +42,13 @@ impl NyashParser {
|
||||
let operator = BinaryOperator::BitAnd;
|
||||
self.advance();
|
||||
let right = self.expr_parse_equality()?;
|
||||
expr = ASTNode::BinaryOp { operator, left: Box::new(expr), right: Box::new(right), span: Span::unknown() };
|
||||
expr = ASTNode::BinaryOp {
|
||||
operator,
|
||||
left: Box::new(expr),
|
||||
right: Box::new(right),
|
||||
span: Span::unknown(),
|
||||
};
|
||||
}
|
||||
Ok(expr)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,11 +1,13 @@
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::tokenizer::TokenType;
|
||||
use crate::ast::{ASTNode, Span};
|
||||
use crate::must_advance;
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::tokenizer::TokenType;
|
||||
|
||||
#[inline]
|
||||
fn is_sugar_enabled() -> bool { crate::parser::sugar_gate::is_enabled() }
|
||||
fn is_sugar_enabled() -> bool {
|
||||
crate::parser::sugar_gate::is_enabled()
|
||||
}
|
||||
|
||||
impl NyashParser {
|
||||
pub(crate) fn expr_parse_call(&mut self) -> Result<ASTNode, ParseError> {
|
||||
@ -70,12 +72,20 @@ impl NyashParser {
|
||||
});
|
||||
}
|
||||
self.advance(); // consume '?.'
|
||||
// ident then optional call
|
||||
// ident then optional call
|
||||
let name = match &self.current_token().token_type {
|
||||
TokenType::IDENTIFIER(s) => { let v = s.clone(); self.advance(); v }
|
||||
TokenType::IDENTIFIER(s) => {
|
||||
let v = s.clone();
|
||||
self.advance();
|
||||
v
|
||||
}
|
||||
_ => {
|
||||
let line = self.current_token().line;
|
||||
return Err(ParseError::UnexpectedToken { found: self.current_token().token_type.clone(), expected: "identifier after '?.'".to_string(), line });
|
||||
return Err(ParseError::UnexpectedToken {
|
||||
found: self.current_token().token_type.clone(),
|
||||
expected: "identifier after '?.'".to_string(),
|
||||
line,
|
||||
});
|
||||
}
|
||||
};
|
||||
let access = if self.match_token(&TokenType::LPAREN) {
|
||||
@ -85,23 +95,39 @@ impl NyashParser {
|
||||
while !self.match_token(&TokenType::RPAREN) && !self.is_at_end() {
|
||||
must_advance!(self, _unused, "safe method call arg parsing");
|
||||
arguments.push(self.parse_expression()?);
|
||||
if self.match_token(&TokenType::COMMA) { self.advance(); }
|
||||
if self.match_token(&TokenType::COMMA) {
|
||||
self.advance();
|
||||
}
|
||||
}
|
||||
self.consume(TokenType::RPAREN)?;
|
||||
ASTNode::MethodCall { object: Box::new(expr.clone()), method: name, arguments, span: Span::unknown() }
|
||||
ASTNode::MethodCall {
|
||||
object: Box::new(expr.clone()),
|
||||
method: name,
|
||||
arguments,
|
||||
span: Span::unknown(),
|
||||
}
|
||||
} else {
|
||||
// field access
|
||||
ASTNode::FieldAccess { object: Box::new(expr.clone()), field: name, span: Span::unknown() }
|
||||
ASTNode::FieldAccess {
|
||||
object: Box::new(expr.clone()),
|
||||
field: name,
|
||||
span: Span::unknown(),
|
||||
}
|
||||
};
|
||||
|
||||
// Wrap with peek: peek expr { null => null, else => access(expr) }
|
||||
expr = ASTNode::PeekExpr {
|
||||
scrutinee: Box::new(expr.clone()),
|
||||
arms: vec![(crate::ast::LiteralValue::Null, ASTNode::Literal { value: crate::ast::LiteralValue::Null, span: Span::unknown() })],
|
||||
arms: vec![(
|
||||
crate::ast::LiteralValue::Null,
|
||||
ASTNode::Literal {
|
||||
value: crate::ast::LiteralValue::Null,
|
||||
span: Span::unknown(),
|
||||
},
|
||||
)],
|
||||
else_expr: Box::new(access),
|
||||
span: Span::unknown(),
|
||||
};
|
||||
|
||||
} else if self.match_token(&TokenType::LPAREN) {
|
||||
// 関数呼び出し: function(args) または 一般式呼び出し: (callee)(args)
|
||||
self.advance(); // consume '('
|
||||
@ -109,23 +135,43 @@ impl NyashParser {
|
||||
while !self.match_token(&TokenType::RPAREN) && !self.is_at_end() {
|
||||
must_advance!(self, _unused, "function call argument parsing");
|
||||
arguments.push(self.parse_expression()?);
|
||||
if self.match_token(&TokenType::COMMA) { self.advance(); }
|
||||
if self.match_token(&TokenType::COMMA) {
|
||||
self.advance();
|
||||
}
|
||||
}
|
||||
self.consume(TokenType::RPAREN)?;
|
||||
|
||||
if let ASTNode::Variable { name, .. } = expr.clone() {
|
||||
expr = ASTNode::FunctionCall { name, arguments, span: Span::unknown() };
|
||||
expr = ASTNode::FunctionCall {
|
||||
name,
|
||||
arguments,
|
||||
span: Span::unknown(),
|
||||
};
|
||||
} else {
|
||||
expr = ASTNode::Call { callee: Box::new(expr), arguments, span: Span::unknown() };
|
||||
expr = ASTNode::Call {
|
||||
callee: Box::new(expr),
|
||||
arguments,
|
||||
span: Span::unknown(),
|
||||
};
|
||||
}
|
||||
} else if self.match_token(&TokenType::QUESTION) {
|
||||
let nt = self.peek_token();
|
||||
let is_ender = matches!(nt,
|
||||
TokenType::NEWLINE | TokenType::EOF | TokenType::RPAREN | TokenType::COMMA | TokenType::RBRACE
|
||||
let is_ender = matches!(
|
||||
nt,
|
||||
TokenType::NEWLINE
|
||||
| TokenType::EOF
|
||||
| TokenType::RPAREN
|
||||
| TokenType::COMMA
|
||||
| TokenType::RBRACE
|
||||
);
|
||||
if !is_ender { break; }
|
||||
if !is_ender {
|
||||
break;
|
||||
}
|
||||
self.advance();
|
||||
expr = ASTNode::QMarkPropagate { expression: Box::new(expr), span: Span::unknown() };
|
||||
expr = ASTNode::QMarkPropagate {
|
||||
expression: Box::new(expr),
|
||||
span: Span::unknown(),
|
||||
};
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
|
||||
@ -1,10 +1,12 @@
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::tokenizer::TokenType;
|
||||
use crate::ast::{ASTNode, Span};
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::tokenizer::TokenType;
|
||||
|
||||
#[inline]
|
||||
fn is_sugar_enabled() -> bool { crate::parser::sugar_gate::is_enabled() }
|
||||
fn is_sugar_enabled() -> bool {
|
||||
crate::parser::sugar_gate::is_enabled()
|
||||
}
|
||||
|
||||
impl NyashParser {
|
||||
pub(crate) fn expr_parse_coalesce(&mut self) -> Result<ASTNode, ParseError> {
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::tokenizer::TokenType;
|
||||
use crate::ast::{ASTNode, BinaryOperator, Span};
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::tokenizer::TokenType;
|
||||
|
||||
impl NyashParser {
|
||||
pub(crate) fn expr_parse_equality(&mut self) -> Result<ASTNode, ParseError> {
|
||||
@ -15,11 +15,25 @@ impl NyashParser {
|
||||
self.advance();
|
||||
let right = self.expr_parse_comparison()?;
|
||||
if std::env::var("NYASH_GRAMMAR_DIFF").ok().as_deref() == Some("1") {
|
||||
let name = match operator { BinaryOperator::Equal=>"eq", BinaryOperator::NotEqual=>"ne", _=>"cmp" };
|
||||
let name = match operator {
|
||||
BinaryOperator::Equal => "eq",
|
||||
BinaryOperator::NotEqual => "ne",
|
||||
_ => "cmp",
|
||||
};
|
||||
let ok = crate::grammar::engine::get().syntax_is_allowed_binop(name);
|
||||
if !ok { eprintln!("[GRAMMAR-DIFF][Parser] binop '{}' not allowed by syntax rules", name); }
|
||||
if !ok {
|
||||
eprintln!(
|
||||
"[GRAMMAR-DIFF][Parser] binop '{}' not allowed by syntax rules",
|
||||
name
|
||||
);
|
||||
}
|
||||
}
|
||||
expr = ASTNode::BinaryOp { operator, left: Box::new(expr), right: Box::new(right), span: Span::unknown() };
|
||||
expr = ASTNode::BinaryOp {
|
||||
operator,
|
||||
left: Box::new(expr),
|
||||
right: Box::new(right),
|
||||
span: Span::unknown(),
|
||||
};
|
||||
}
|
||||
Ok(expr)
|
||||
}
|
||||
@ -40,9 +54,13 @@ impl NyashParser {
|
||||
};
|
||||
self.advance();
|
||||
let right = self.expr_parse_range()?;
|
||||
expr = ASTNode::BinaryOp { operator, left: Box::new(expr), right: Box::new(right), span: Span::unknown() };
|
||||
expr = ASTNode::BinaryOp {
|
||||
operator,
|
||||
left: Box::new(expr),
|
||||
right: Box::new(right),
|
||||
span: Span::unknown(),
|
||||
};
|
||||
}
|
||||
Ok(expr)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::tokenizer::TokenType;
|
||||
use crate::ast::{ASTNode, BinaryOperator, Span};
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::tokenizer::TokenType;
|
||||
|
||||
impl NyashParser {
|
||||
pub(crate) fn expr_parse_factor(&mut self) -> Result<ASTNode, ParseError> {
|
||||
@ -19,13 +19,26 @@ impl NyashParser {
|
||||
self.advance();
|
||||
let right = self.parse_unary()?;
|
||||
if std::env::var("NYASH_GRAMMAR_DIFF").ok().as_deref() == Some("1") {
|
||||
let name = match operator { BinaryOperator::Multiply=>"mul", BinaryOperator::Divide=>"div", _=>"mod" };
|
||||
let name = match operator {
|
||||
BinaryOperator::Multiply => "mul",
|
||||
BinaryOperator::Divide => "div",
|
||||
_ => "mod",
|
||||
};
|
||||
let ok = crate::grammar::engine::get().syntax_is_allowed_binop(name);
|
||||
if !ok { eprintln!("[GRAMMAR-DIFF][Parser] binop '{}' not allowed by syntax rules", name); }
|
||||
if !ok {
|
||||
eprintln!(
|
||||
"[GRAMMAR-DIFF][Parser] binop '{}' not allowed by syntax rules",
|
||||
name
|
||||
);
|
||||
}
|
||||
}
|
||||
expr = ASTNode::BinaryOp { operator, left: Box::new(expr), right: Box::new(right), span: Span::unknown() };
|
||||
expr = ASTNode::BinaryOp {
|
||||
operator,
|
||||
left: Box::new(expr),
|
||||
right: Box::new(right),
|
||||
span: Span::unknown(),
|
||||
};
|
||||
}
|
||||
Ok(expr)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::tokenizer::TokenType;
|
||||
use crate::ast::{ASTNode, BinaryOperator, Span};
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::tokenizer::TokenType;
|
||||
|
||||
impl NyashParser {
|
||||
pub(crate) fn expr_parse_or(&mut self) -> Result<ASTNode, ParseError> {
|
||||
@ -12,9 +12,16 @@ impl NyashParser {
|
||||
let right = self.expr_parse_and()?;
|
||||
if std::env::var("NYASH_GRAMMAR_DIFF").ok().as_deref() == Some("1") {
|
||||
let ok = crate::grammar::engine::get().syntax_is_allowed_binop("or");
|
||||
if !ok { eprintln!("[GRAMMAR-DIFF][Parser] binop 'or' not allowed by syntax rules"); }
|
||||
if !ok {
|
||||
eprintln!("[GRAMMAR-DIFF][Parser] binop 'or' not allowed by syntax rules");
|
||||
}
|
||||
}
|
||||
expr = ASTNode::BinaryOp { operator, left: Box::new(expr), right: Box::new(right), span: Span::unknown() };
|
||||
expr = ASTNode::BinaryOp {
|
||||
operator,
|
||||
left: Box::new(expr),
|
||||
right: Box::new(right),
|
||||
span: Span::unknown(),
|
||||
};
|
||||
}
|
||||
Ok(expr)
|
||||
}
|
||||
@ -27,9 +34,16 @@ impl NyashParser {
|
||||
let right = self.expr_parse_equality()?;
|
||||
if std::env::var("NYASH_GRAMMAR_DIFF").ok().as_deref() == Some("1") {
|
||||
let ok = crate::grammar::engine::get().syntax_is_allowed_binop("and");
|
||||
if !ok { eprintln!("[GRAMMAR-DIFF][Parser] binop 'and' not allowed by syntax rules"); }
|
||||
if !ok {
|
||||
eprintln!("[GRAMMAR-DIFF][Parser] binop 'and' not allowed by syntax rules");
|
||||
}
|
||||
}
|
||||
expr = ASTNode::BinaryOp { operator, left: Box::new(expr), right: Box::new(right), span: Span::unknown() };
|
||||
expr = ASTNode::BinaryOp {
|
||||
operator,
|
||||
left: Box::new(expr),
|
||||
right: Box::new(right),
|
||||
span: Span::unknown(),
|
||||
};
|
||||
}
|
||||
Ok(expr)
|
||||
}
|
||||
|
||||
@ -1,11 +1,11 @@
|
||||
pub(crate) mod ternary;
|
||||
pub(crate) mod coalesce;
|
||||
pub(crate) mod logic;
|
||||
pub(crate) mod bit;
|
||||
pub(crate) mod compare;
|
||||
pub(crate) mod range;
|
||||
pub(crate) mod term;
|
||||
pub(crate) mod shift;
|
||||
pub(crate) mod factor;
|
||||
pub(crate) mod call;
|
||||
pub(crate) mod coalesce;
|
||||
pub(crate) mod compare;
|
||||
pub(crate) mod factor;
|
||||
pub(crate) mod logic;
|
||||
pub(crate) mod primary;
|
||||
pub(crate) mod range;
|
||||
pub(crate) mod shift;
|
||||
pub(crate) mod term;
|
||||
pub(crate) mod ternary;
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::ast::{ASTNode, LiteralValue, Span};
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::tokenizer::TokenType;
|
||||
use crate::ast::{ASTNode, Span, LiteralValue};
|
||||
|
||||
impl NyashParser {
|
||||
pub(crate) fn expr_parse_primary(&mut self) -> Result<ASTNode, ParseError> {
|
||||
@ -23,10 +23,15 @@ impl NyashParser {
|
||||
crate::must_advance!(self, _unused, "array literal element parsing");
|
||||
let el = self.parse_expression()?;
|
||||
elems.push(el);
|
||||
if self.match_token(&TokenType::COMMA) { self.advance(); }
|
||||
if self.match_token(&TokenType::COMMA) {
|
||||
self.advance();
|
||||
}
|
||||
}
|
||||
self.consume(TokenType::RBRACK)?;
|
||||
Ok(ASTNode::ArrayLiteral { elements: elems, span: Span::unknown() })
|
||||
Ok(ASTNode::ArrayLiteral {
|
||||
elements: elems,
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
TokenType::LBRACE => {
|
||||
let sugar_on = crate::parser::sugar_gate::is_enabled()
|
||||
@ -38,39 +43,111 @@ impl NyashParser {
|
||||
self.advance();
|
||||
let mut entries: Vec<(String, ASTNode)> = Vec::new();
|
||||
let sugar_level = std::env::var("NYASH_SYNTAX_SUGAR_LEVEL").ok();
|
||||
let ident_key_on = std::env::var("NYASH_ENABLE_MAP_IDENT_KEY").ok().as_deref() == Some("1") || sugar_level.as_deref() == Some("full");
|
||||
let ident_key_on = std::env::var("NYASH_ENABLE_MAP_IDENT_KEY").ok().as_deref()
|
||||
== Some("1")
|
||||
|| sugar_level.as_deref() == Some("full");
|
||||
while !self.match_token(&TokenType::RBRACE) && !self.is_at_end() {
|
||||
let key = match &self.current_token().token_type {
|
||||
TokenType::STRING(s) => { let v = s.clone(); self.advance(); v }
|
||||
TokenType::IDENTIFIER(id) if ident_key_on => { let v = id.clone(); self.advance(); v }
|
||||
TokenType::STRING(s) => {
|
||||
let v = s.clone();
|
||||
self.advance();
|
||||
v
|
||||
}
|
||||
TokenType::IDENTIFIER(id) if ident_key_on => {
|
||||
let v = id.clone();
|
||||
self.advance();
|
||||
v
|
||||
}
|
||||
_ => {
|
||||
let line = self.current_token().line;
|
||||
return Err(ParseError::UnexpectedToken { found: self.current_token().token_type.clone(), expected: if ident_key_on { "string or identifier key in map literal".to_string() } else { "string key in map literal".to_string() }, line });
|
||||
return Err(ParseError::UnexpectedToken {
|
||||
found: self.current_token().token_type.clone(),
|
||||
expected: if ident_key_on {
|
||||
"string or identifier key in map literal".to_string()
|
||||
} else {
|
||||
"string key in map literal".to_string()
|
||||
},
|
||||
line,
|
||||
});
|
||||
}
|
||||
};
|
||||
self.consume(TokenType::COLON)?;
|
||||
let value_expr = self.parse_expression()?;
|
||||
entries.push((key, value_expr));
|
||||
if self.match_token(&TokenType::COMMA) { self.advance(); }
|
||||
if self.match_token(&TokenType::COMMA) {
|
||||
self.advance();
|
||||
}
|
||||
}
|
||||
self.consume(TokenType::RBRACE)?;
|
||||
Ok(ASTNode::MapLiteral { entries, span: Span::unknown() })
|
||||
Ok(ASTNode::MapLiteral {
|
||||
entries,
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
TokenType::INCLUDE => self.parse_include(),
|
||||
TokenType::STRING(s) => {
|
||||
let value = s.clone();
|
||||
self.advance();
|
||||
Ok(ASTNode::Literal {
|
||||
value: LiteralValue::String(value),
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
TokenType::NUMBER(n) => {
|
||||
let value = *n;
|
||||
self.advance();
|
||||
Ok(ASTNode::Literal {
|
||||
value: LiteralValue::Integer(value),
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
TokenType::FLOAT(f) => {
|
||||
let value = *f;
|
||||
self.advance();
|
||||
Ok(ASTNode::Literal {
|
||||
value: LiteralValue::Float(value),
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
TokenType::TRUE => {
|
||||
self.advance();
|
||||
Ok(ASTNode::Literal {
|
||||
value: LiteralValue::Bool(true),
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
TokenType::FALSE => {
|
||||
self.advance();
|
||||
Ok(ASTNode::Literal {
|
||||
value: LiteralValue::Bool(false),
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
TokenType::NULL => {
|
||||
self.advance();
|
||||
Ok(ASTNode::Literal {
|
||||
value: LiteralValue::Null,
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
TokenType::INCLUDE => { self.parse_include() }
|
||||
TokenType::STRING(s) => { let value = s.clone(); self.advance(); Ok(ASTNode::Literal { value: LiteralValue::String(value), span: Span::unknown() }) }
|
||||
TokenType::NUMBER(n) => { let value = *n; self.advance(); Ok(ASTNode::Literal { value: LiteralValue::Integer(value), span: Span::unknown() }) }
|
||||
TokenType::FLOAT(f) => { let value = *f; self.advance(); Ok(ASTNode::Literal { value: LiteralValue::Float(value), span: Span::unknown() }) }
|
||||
TokenType::TRUE => { self.advance(); Ok(ASTNode::Literal { value: LiteralValue::Bool(true), span: Span::unknown() }) }
|
||||
TokenType::FALSE => { self.advance(); Ok(ASTNode::Literal { value: LiteralValue::Bool(false), span: Span::unknown() }) }
|
||||
TokenType::NULL => { self.advance(); Ok(ASTNode::Literal { value: LiteralValue::Null, span: Span::unknown() }) }
|
||||
TokenType::THIS => {
|
||||
if std::env::var("NYASH_DEPRECATE_THIS").ok().as_deref() == Some("1") {
|
||||
eprintln!("[deprecate:this] 'this' is deprecated; use 'me' instead (line {})", self.current_token().line);
|
||||
eprintln!(
|
||||
"[deprecate:this] 'this' is deprecated; use 'me' instead (line {})",
|
||||
self.current_token().line
|
||||
);
|
||||
}
|
||||
self.advance();
|
||||
Ok(ASTNode::Me { span: Span::unknown() })
|
||||
Ok(ASTNode::Me {
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
TokenType::ME => {
|
||||
self.advance();
|
||||
Ok(ASTNode::Me {
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
TokenType::ME => { self.advance(); Ok(ASTNode::Me { span: Span::unknown() }) }
|
||||
TokenType::NEW => {
|
||||
self.advance();
|
||||
if let TokenType::IDENTIFIER(class_name) = &self.current_token().token_type {
|
||||
@ -80,10 +157,23 @@ impl NyashParser {
|
||||
if self.match_token(&TokenType::LESS) {
|
||||
self.advance();
|
||||
loop {
|
||||
if let TokenType::IDENTIFIER(tn) = &self.current_token().token_type { type_arguments.push(tn.clone()); self.advance(); }
|
||||
else { let line = self.current_token().line; return Err(ParseError::UnexpectedToken { found: self.current_token().token_type.clone(), expected: "type argument".to_string(), line }); }
|
||||
if self.match_token(&TokenType::COMMA) { self.advance(); continue; }
|
||||
self.consume(TokenType::GREATER)?; break;
|
||||
if let TokenType::IDENTIFIER(tn) = &self.current_token().token_type {
|
||||
type_arguments.push(tn.clone());
|
||||
self.advance();
|
||||
} else {
|
||||
let line = self.current_token().line;
|
||||
return Err(ParseError::UnexpectedToken {
|
||||
found: self.current_token().token_type.clone(),
|
||||
expected: "type argument".to_string(),
|
||||
line,
|
||||
});
|
||||
}
|
||||
if self.match_token(&TokenType::COMMA) {
|
||||
self.advance();
|
||||
continue;
|
||||
}
|
||||
self.consume(TokenType::GREATER)?;
|
||||
break;
|
||||
}
|
||||
}
|
||||
self.consume(TokenType::LPAREN)?;
|
||||
@ -91,49 +181,108 @@ impl NyashParser {
|
||||
while !self.match_token(&TokenType::RPAREN) && !self.is_at_end() {
|
||||
crate::must_advance!(self, _unused, "new expression argument parsing");
|
||||
arguments.push(self.parse_expression()?);
|
||||
if self.match_token(&TokenType::COMMA) { self.advance(); }
|
||||
if self.match_token(&TokenType::COMMA) {
|
||||
self.advance();
|
||||
}
|
||||
}
|
||||
self.consume(TokenType::RPAREN)?;
|
||||
Ok(ASTNode::New { class, arguments, type_arguments, span: Span::unknown() })
|
||||
Ok(ASTNode::New {
|
||||
class,
|
||||
arguments,
|
||||
type_arguments,
|
||||
span: Span::unknown(),
|
||||
})
|
||||
} else {
|
||||
let line = self.current_token().line;
|
||||
Err(ParseError::UnexpectedToken { found: self.current_token().token_type.clone(), expected: "class name".to_string(), line })
|
||||
Err(ParseError::UnexpectedToken {
|
||||
found: self.current_token().token_type.clone(),
|
||||
expected: "class name".to_string(),
|
||||
line,
|
||||
})
|
||||
}
|
||||
}
|
||||
TokenType::FROM => { self.parse_from_call() }
|
||||
TokenType::FROM => self.parse_from_call(),
|
||||
TokenType::IDENTIFIER(name) => {
|
||||
let parent = name.clone();
|
||||
self.advance();
|
||||
if self.match_token(&TokenType::DoubleColon) {
|
||||
self.advance();
|
||||
let method = match &self.current_token().token_type {
|
||||
TokenType::IDENTIFIER(m) => { let s=m.clone(); self.advance(); s }
|
||||
TokenType::INIT => { self.advance(); "init".to_string() }
|
||||
TokenType::PACK => { self.advance(); "pack".to_string() }
|
||||
TokenType::BIRTH => { self.advance(); "birth".to_string() }
|
||||
_ => { let line = self.current_token().line; return Err(ParseError::UnexpectedToken { found: self.current_token().token_type.clone(), expected: "method name".to_string(), line }); }
|
||||
TokenType::IDENTIFIER(m) => {
|
||||
let s = m.clone();
|
||||
self.advance();
|
||||
s
|
||||
}
|
||||
TokenType::INIT => {
|
||||
self.advance();
|
||||
"init".to_string()
|
||||
}
|
||||
TokenType::PACK => {
|
||||
self.advance();
|
||||
"pack".to_string()
|
||||
}
|
||||
TokenType::BIRTH => {
|
||||
self.advance();
|
||||
"birth".to_string()
|
||||
}
|
||||
_ => {
|
||||
let line = self.current_token().line;
|
||||
return Err(ParseError::UnexpectedToken {
|
||||
found: self.current_token().token_type.clone(),
|
||||
expected: "method name".to_string(),
|
||||
line,
|
||||
});
|
||||
}
|
||||
};
|
||||
self.consume(TokenType::LPAREN)?;
|
||||
let mut arguments = Vec::new();
|
||||
while !self.match_token(&TokenType::RPAREN) && !self.is_at_end() {
|
||||
crate::must_advance!(self, _unused, "Parent::method call argument parsing");
|
||||
arguments.push(self.parse_expression()?);
|
||||
if self.match_token(&TokenType::COMMA) { self.advance(); }
|
||||
if self.match_token(&TokenType::COMMA) {
|
||||
self.advance();
|
||||
}
|
||||
}
|
||||
self.consume(TokenType::RPAREN)?;
|
||||
Ok(ASTNode::FromCall { parent, method, arguments, span: Span::unknown() })
|
||||
Ok(ASTNode::FromCall {
|
||||
parent,
|
||||
method,
|
||||
arguments,
|
||||
span: Span::unknown(),
|
||||
})
|
||||
} else {
|
||||
Ok(ASTNode::Variable { name: parent, span: Span::unknown() })
|
||||
Ok(ASTNode::Variable {
|
||||
name: parent,
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
}
|
||||
TokenType::LPAREN => { self.advance(); let expr = self.parse_expression()?; self.consume(TokenType::RPAREN)?; Ok(expr) }
|
||||
TokenType::LPAREN => {
|
||||
self.advance();
|
||||
let expr = self.parse_expression()?;
|
||||
self.consume(TokenType::RPAREN)?;
|
||||
Ok(expr)
|
||||
}
|
||||
TokenType::FN => {
|
||||
self.advance();
|
||||
let mut params: Vec<String> = Vec::new();
|
||||
if self.match_token(&TokenType::LPAREN) { self.advance();
|
||||
if self.match_token(&TokenType::LPAREN) {
|
||||
self.advance();
|
||||
while !self.match_token(&TokenType::RPAREN) && !self.is_at_end() {
|
||||
if let TokenType::IDENTIFIER(p) = &self.current_token().token_type { params.push(p.clone()); self.advance(); if self.match_token(&TokenType::COMMA) { self.advance(); } }
|
||||
else { let line = self.current_token().line; return Err(ParseError::UnexpectedToken { found: self.current_token().token_type.clone(), expected: "parameter name".to_string(), line }); }
|
||||
if let TokenType::IDENTIFIER(p) = &self.current_token().token_type {
|
||||
params.push(p.clone());
|
||||
self.advance();
|
||||
if self.match_token(&TokenType::COMMA) {
|
||||
self.advance();
|
||||
}
|
||||
} else {
|
||||
let line = self.current_token().line;
|
||||
return Err(ParseError::UnexpectedToken {
|
||||
found: self.current_token().token_type.clone(),
|
||||
expected: "parameter name".to_string(),
|
||||
line,
|
||||
});
|
||||
}
|
||||
}
|
||||
self.consume(TokenType::RPAREN)?;
|
||||
}
|
||||
@ -141,12 +290,21 @@ impl NyashParser {
|
||||
let mut body: Vec<ASTNode> = Vec::new();
|
||||
while !self.match_token(&TokenType::RBRACE) && !self.is_at_end() {
|
||||
self.skip_newlines();
|
||||
if !self.match_token(&TokenType::RBRACE) { body.push(self.parse_statement()?); }
|
||||
if !self.match_token(&TokenType::RBRACE) {
|
||||
body.push(self.parse_statement()?);
|
||||
}
|
||||
}
|
||||
self.consume(TokenType::RBRACE)?;
|
||||
Ok(ASTNode::Lambda { params, body, span: Span::unknown() })
|
||||
Ok(ASTNode::Lambda {
|
||||
params,
|
||||
body,
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
_ => {
|
||||
let line = self.current_token().line;
|
||||
Err(ParseError::InvalidExpression { line })
|
||||
}
|
||||
_ => { let line = self.current_token().line; Err(ParseError::InvalidExpression { line }) }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,10 +1,12 @@
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::tokenizer::TokenType;
|
||||
use crate::ast::{ASTNode, Span};
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::tokenizer::TokenType;
|
||||
|
||||
#[inline]
|
||||
fn is_sugar_enabled() -> bool { crate::parser::sugar_gate::is_enabled() }
|
||||
fn is_sugar_enabled() -> bool {
|
||||
crate::parser::sugar_gate::is_enabled()
|
||||
}
|
||||
|
||||
impl NyashParser {
|
||||
pub(crate) fn expr_parse_range(&mut self) -> Result<ASTNode, ParseError> {
|
||||
@ -20,9 +22,12 @@ impl NyashParser {
|
||||
}
|
||||
self.advance();
|
||||
let rhs = self.expr_parse_term()?;
|
||||
expr = ASTNode::FunctionCall { name: "Range".to_string(), arguments: vec![expr, rhs], span: Span::unknown() };
|
||||
expr = ASTNode::FunctionCall {
|
||||
name: "Range".to_string(),
|
||||
arguments: vec![expr, rhs],
|
||||
span: Span::unknown(),
|
||||
};
|
||||
}
|
||||
Ok(expr)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::tokenizer::TokenType;
|
||||
use crate::ast::{ASTNode, BinaryOperator, Span};
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::tokenizer::TokenType;
|
||||
|
||||
impl NyashParser {
|
||||
pub(crate) fn expr_parse_shift(&mut self) -> Result<ASTNode, ParseError> {
|
||||
@ -10,13 +10,23 @@ impl NyashParser {
|
||||
if self.match_token(&TokenType::ShiftLeft) {
|
||||
self.advance();
|
||||
let rhs = self.expr_parse_factor()?;
|
||||
expr = ASTNode::BinaryOp { operator: BinaryOperator::Shl, left: Box::new(expr), right: Box::new(rhs), span: Span::unknown() };
|
||||
expr = ASTNode::BinaryOp {
|
||||
operator: BinaryOperator::Shl,
|
||||
left: Box::new(expr),
|
||||
right: Box::new(rhs),
|
||||
span: Span::unknown(),
|
||||
};
|
||||
continue;
|
||||
}
|
||||
if self.match_token(&TokenType::ShiftRight) {
|
||||
self.advance();
|
||||
let rhs = self.expr_parse_factor()?;
|
||||
expr = ASTNode::BinaryOp { operator: BinaryOperator::Shr, left: Box::new(expr), right: Box::new(rhs), span: Span::unknown() };
|
||||
expr = ASTNode::BinaryOp {
|
||||
operator: BinaryOperator::Shr,
|
||||
left: Box::new(expr),
|
||||
right: Box::new(rhs),
|
||||
span: Span::unknown(),
|
||||
};
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
@ -24,4 +34,3 @@ impl NyashParser {
|
||||
Ok(expr)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::tokenizer::TokenType;
|
||||
use crate::ast::{ASTNode, BinaryOperator, Span};
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::tokenizer::TokenType;
|
||||
|
||||
impl NyashParser {
|
||||
pub(crate) fn expr_parse_term(&mut self) -> Result<ASTNode, ParseError> {
|
||||
@ -15,13 +15,26 @@ impl NyashParser {
|
||||
self.advance();
|
||||
let right = self.expr_parse_shift()?;
|
||||
if std::env::var("NYASH_GRAMMAR_DIFF").ok().as_deref() == Some("1") {
|
||||
let name = match operator { BinaryOperator::Add=>"add", BinaryOperator::Subtract=>"sub", _=>"term" };
|
||||
let name = match operator {
|
||||
BinaryOperator::Add => "add",
|
||||
BinaryOperator::Subtract => "sub",
|
||||
_ => "term",
|
||||
};
|
||||
let ok = crate::grammar::engine::get().syntax_is_allowed_binop(name);
|
||||
if !ok { eprintln!("[GRAMMAR-DIFF][Parser] binop '{}' not allowed by syntax rules", name); }
|
||||
if !ok {
|
||||
eprintln!(
|
||||
"[GRAMMAR-DIFF][Parser] binop '{}' not allowed by syntax rules",
|
||||
name
|
||||
);
|
||||
}
|
||||
}
|
||||
expr = ASTNode::BinaryOp { operator, left: Box::new(expr), right: Box::new(right), span: Span::unknown() };
|
||||
expr = ASTNode::BinaryOp {
|
||||
operator,
|
||||
left: Box::new(expr),
|
||||
right: Box::new(right),
|
||||
span: Span::unknown(),
|
||||
};
|
||||
}
|
||||
Ok(expr)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,10 +1,12 @@
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::tokenizer::TokenType;
|
||||
use crate::ast::{ASTNode, Span};
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::tokenizer::TokenType;
|
||||
|
||||
#[inline]
|
||||
fn is_sugar_enabled() -> bool { crate::parser::sugar_gate::is_enabled() }
|
||||
fn is_sugar_enabled() -> bool {
|
||||
crate::parser::sugar_gate::is_enabled()
|
||||
}
|
||||
|
||||
impl NyashParser {
|
||||
pub(crate) fn expr_parse_ternary(&mut self) -> Result<ASTNode, ParseError> {
|
||||
|
||||
@ -1,20 +1,22 @@
|
||||
/*!
|
||||
* Nyash Parser - Expression Parsing Module
|
||||
*
|
||||
*
|
||||
* 式(Expression)の解析を担当するモジュール
|
||||
* 演算子の優先順位に従った再帰下降パーサー実装
|
||||
*/
|
||||
|
||||
use crate::tokenizer::TokenType;
|
||||
use crate::ast::{ASTNode, BinaryOperator, LiteralValue, UnaryOperator, Span};
|
||||
use super::{NyashParser, ParseError};
|
||||
use super::common::ParserUtils;
|
||||
use super::{NyashParser, ParseError};
|
||||
use crate::ast::{ASTNode, BinaryOperator, LiteralValue, Span, UnaryOperator};
|
||||
use crate::tokenizer::TokenType;
|
||||
|
||||
// Debug macros are now imported from the parent module via #[macro_export]
|
||||
use crate::must_advance;
|
||||
|
||||
#[inline]
|
||||
fn is_sugar_enabled() -> bool { crate::parser::sugar_gate::is_enabled() }
|
||||
fn is_sugar_enabled() -> bool {
|
||||
crate::parser::sugar_gate::is_enabled()
|
||||
}
|
||||
|
||||
impl NyashParser {
|
||||
/// 式をパース (演算子優先順位あり)
|
||||
@ -44,29 +46,60 @@ impl NyashParser {
|
||||
|
||||
// 変換: rhs の形に応じて lhs を先頭引数として追加
|
||||
expr = match rhs {
|
||||
ASTNode::FunctionCall { name, mut arguments, span } => {
|
||||
ASTNode::FunctionCall {
|
||||
name,
|
||||
mut arguments,
|
||||
span,
|
||||
} => {
|
||||
let mut new_args = Vec::with_capacity(arguments.len() + 1);
|
||||
new_args.push(expr);
|
||||
new_args.append(&mut arguments);
|
||||
ASTNode::FunctionCall { name, arguments: new_args, span }
|
||||
ASTNode::FunctionCall {
|
||||
name,
|
||||
arguments: new_args,
|
||||
span,
|
||||
}
|
||||
}
|
||||
ASTNode::MethodCall { object, method, mut arguments, span } => {
|
||||
ASTNode::MethodCall {
|
||||
object,
|
||||
method,
|
||||
mut arguments,
|
||||
span,
|
||||
} => {
|
||||
let mut new_args = Vec::with_capacity(arguments.len() + 1);
|
||||
new_args.push(expr);
|
||||
new_args.append(&mut arguments);
|
||||
ASTNode::MethodCall { object, method, arguments: new_args, span }
|
||||
ASTNode::MethodCall {
|
||||
object,
|
||||
method,
|
||||
arguments: new_args,
|
||||
span,
|
||||
}
|
||||
}
|
||||
ASTNode::Variable { name, .. } => {
|
||||
ASTNode::FunctionCall { name, arguments: vec![expr], span: Span::unknown() }
|
||||
}
|
||||
ASTNode::FieldAccess { object, field, .. } => {
|
||||
ASTNode::MethodCall { object, method: field, arguments: vec![expr], span: Span::unknown() }
|
||||
}
|
||||
ASTNode::Call { callee, mut arguments, span } => {
|
||||
ASTNode::Variable { name, .. } => ASTNode::FunctionCall {
|
||||
name,
|
||||
arguments: vec![expr],
|
||||
span: Span::unknown(),
|
||||
},
|
||||
ASTNode::FieldAccess { object, field, .. } => ASTNode::MethodCall {
|
||||
object,
|
||||
method: field,
|
||||
arguments: vec![expr],
|
||||
span: Span::unknown(),
|
||||
},
|
||||
ASTNode::Call {
|
||||
callee,
|
||||
mut arguments,
|
||||
span,
|
||||
} => {
|
||||
let mut new_args = Vec::with_capacity(arguments.len() + 1);
|
||||
new_args.push(expr);
|
||||
new_args.append(&mut arguments);
|
||||
ASTNode::Call { callee, arguments: new_args, span }
|
||||
ASTNode::Call {
|
||||
callee,
|
||||
arguments: new_args,
|
||||
span,
|
||||
}
|
||||
}
|
||||
other => {
|
||||
// 許容外: 関数/メソッド/変数/フィールド以外には適用不可
|
||||
@ -85,44 +118,70 @@ impl NyashParser {
|
||||
/// 三項演算子: cond ? then : else
|
||||
/// Grammar (Phase 12.7): TernaryExpr = NullsafeExpr ( "?" Expr ":" Expr )?
|
||||
/// 実装: coalesce の上に差し込み、`cond ? a : b` を If式に変換する。
|
||||
fn parse_ternary(&mut self) -> Result<ASTNode, ParseError> { self.expr_parse_ternary() }
|
||||
fn parse_ternary(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.expr_parse_ternary()
|
||||
}
|
||||
|
||||
/// デフォルト値(??): x ?? y => peek x { null => y, else => x }
|
||||
fn parse_coalesce(&mut self) -> Result<ASTNode, ParseError> { self.expr_parse_coalesce() }
|
||||
|
||||
fn parse_coalesce(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.expr_parse_coalesce()
|
||||
}
|
||||
|
||||
/// OR演算子をパース: ||
|
||||
fn parse_or(&mut self) -> Result<ASTNode, ParseError> { self.expr_parse_or() }
|
||||
|
||||
fn parse_or(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.expr_parse_or()
|
||||
}
|
||||
|
||||
/// AND演算子をパース: &&
|
||||
fn parse_and(&mut self) -> Result<ASTNode, ParseError> { self.expr_parse_and() }
|
||||
fn parse_and(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.expr_parse_and()
|
||||
}
|
||||
|
||||
/// ビットOR: |
|
||||
pub(crate) fn parse_bit_or(&mut self) -> Result<ASTNode, ParseError> { self.expr_parse_bit_or() }
|
||||
pub(crate) fn parse_bit_or(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.expr_parse_bit_or()
|
||||
}
|
||||
|
||||
/// ビットXOR: ^
|
||||
fn parse_bit_xor(&mut self) -> Result<ASTNode, ParseError> { self.expr_parse_bit_xor() }
|
||||
fn parse_bit_xor(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.expr_parse_bit_xor()
|
||||
}
|
||||
|
||||
/// ビットAND: &
|
||||
fn parse_bit_and(&mut self) -> Result<ASTNode, ParseError> { self.expr_parse_bit_and() }
|
||||
|
||||
fn parse_bit_and(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.expr_parse_bit_and()
|
||||
}
|
||||
|
||||
/// 等値演算子をパース: == !=
|
||||
pub(crate) fn parse_equality(&mut self) -> Result<ASTNode, ParseError> { self.expr_parse_equality() }
|
||||
|
||||
pub(crate) fn parse_equality(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.expr_parse_equality()
|
||||
}
|
||||
|
||||
/// 比較演算子をパース: < <= > >=
|
||||
fn parse_comparison(&mut self) -> Result<ASTNode, ParseError> { self.expr_parse_comparison() }
|
||||
fn parse_comparison(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.expr_parse_comparison()
|
||||
}
|
||||
|
||||
/// 範囲演算子: a .. b => Range(a,b)
|
||||
fn parse_range(&mut self) -> Result<ASTNode, ParseError> { self.expr_parse_range() }
|
||||
|
||||
fn parse_range(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.expr_parse_range()
|
||||
}
|
||||
|
||||
/// 項をパース: + -
|
||||
fn parse_term(&mut self) -> Result<ASTNode, ParseError> { self.expr_parse_term() }
|
||||
|
||||
fn parse_term(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.expr_parse_term()
|
||||
}
|
||||
|
||||
/// シフトをパース: << >>
|
||||
fn parse_shift(&mut self) -> Result<ASTNode, ParseError> { self.expr_parse_shift() }
|
||||
|
||||
fn parse_shift(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.expr_parse_shift()
|
||||
}
|
||||
|
||||
/// 因子をパース: * /
|
||||
fn parse_factor(&mut self) -> Result<ASTNode, ParseError> { self.expr_parse_factor() }
|
||||
|
||||
fn parse_factor(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.expr_parse_factor()
|
||||
}
|
||||
|
||||
/// 単項演算子をパース
|
||||
pub(crate) fn parse_unary(&mut self) -> Result<ASTNode, ParseError> {
|
||||
// peek式の先読み
|
||||
@ -138,7 +197,7 @@ impl NyashParser {
|
||||
span: Span::unknown(),
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
if self.match_token(&TokenType::NOT) {
|
||||
self.advance(); // consume 'not'
|
||||
let operand = self.parse_unary()?; // 再帰的に単項演算をパース
|
||||
@ -148,7 +207,7 @@ impl NyashParser {
|
||||
span: Span::unknown(),
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
if self.match_token(&TokenType::AWAIT) {
|
||||
self.advance(); // consume 'await'
|
||||
let expression = self.parse_unary()?; // 再帰的にパース
|
||||
@ -157,7 +216,7 @@ impl NyashParser {
|
||||
span: Span::unknown(),
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
self.parse_call()
|
||||
}
|
||||
|
||||
@ -177,7 +236,9 @@ impl NyashParser {
|
||||
self.advance();
|
||||
self.skip_newlines();
|
||||
}
|
||||
if self.match_token(&TokenType::RBRACE) { break; }
|
||||
if self.match_token(&TokenType::RBRACE) {
|
||||
break;
|
||||
}
|
||||
|
||||
// else or literal
|
||||
let is_else = matches!(self.current_token().token_type, TokenType::ELSE);
|
||||
@ -196,7 +257,10 @@ impl NyashParser {
|
||||
}
|
||||
}
|
||||
self.consume(TokenType::RBRACE)?;
|
||||
ASTNode::Program { statements: stmts, span: Span::unknown() }
|
||||
ASTNode::Program {
|
||||
statements: stmts,
|
||||
span: Span::unknown(),
|
||||
}
|
||||
} else {
|
||||
self.parse_expression()?
|
||||
};
|
||||
@ -216,7 +280,10 @@ impl NyashParser {
|
||||
}
|
||||
}
|
||||
self.consume(TokenType::RBRACE)?;
|
||||
ASTNode::Program { statements: stmts, span: Span::unknown() }
|
||||
ASTNode::Program {
|
||||
statements: stmts,
|
||||
span: Span::unknown(),
|
||||
}
|
||||
} else {
|
||||
self.parse_expression()?
|
||||
};
|
||||
@ -224,8 +291,12 @@ impl NyashParser {
|
||||
}
|
||||
|
||||
// 区切り(カンマや改行を許可)
|
||||
if self.match_token(&TokenType::COMMA) { self.advance(); }
|
||||
if self.match_token(&TokenType::NEWLINE) { self.advance(); }
|
||||
if self.match_token(&TokenType::COMMA) {
|
||||
self.advance();
|
||||
}
|
||||
if self.match_token(&TokenType::NEWLINE) {
|
||||
self.advance();
|
||||
}
|
||||
}
|
||||
|
||||
self.consume(TokenType::RBRACE)?;
|
||||
@ -245,29 +316,58 @@ impl NyashParser {
|
||||
|
||||
fn parse_literal_only(&mut self) -> Result<crate::ast::LiteralValue, ParseError> {
|
||||
match &self.current_token().token_type {
|
||||
TokenType::STRING(s) => { let v = crate::ast::LiteralValue::String(s.clone()); self.advance(); Ok(v) }
|
||||
TokenType::NUMBER(n) => { let v = crate::ast::LiteralValue::Integer(*n); self.advance(); Ok(v) }
|
||||
TokenType::FLOAT(f) => { let v = crate::ast::LiteralValue::Float(*f); self.advance(); Ok(v) }
|
||||
TokenType::TRUE => { self.advance(); Ok(crate::ast::LiteralValue::Bool(true)) }
|
||||
TokenType::FALSE => { self.advance(); Ok(crate::ast::LiteralValue::Bool(false)) }
|
||||
TokenType::NULL => { self.advance(); Ok(crate::ast::LiteralValue::Null) }
|
||||
TokenType::STRING(s) => {
|
||||
let v = crate::ast::LiteralValue::String(s.clone());
|
||||
self.advance();
|
||||
Ok(v)
|
||||
}
|
||||
TokenType::NUMBER(n) => {
|
||||
let v = crate::ast::LiteralValue::Integer(*n);
|
||||
self.advance();
|
||||
Ok(v)
|
||||
}
|
||||
TokenType::FLOAT(f) => {
|
||||
let v = crate::ast::LiteralValue::Float(*f);
|
||||
self.advance();
|
||||
Ok(v)
|
||||
}
|
||||
TokenType::TRUE => {
|
||||
self.advance();
|
||||
Ok(crate::ast::LiteralValue::Bool(true))
|
||||
}
|
||||
TokenType::FALSE => {
|
||||
self.advance();
|
||||
Ok(crate::ast::LiteralValue::Bool(false))
|
||||
}
|
||||
TokenType::NULL => {
|
||||
self.advance();
|
||||
Ok(crate::ast::LiteralValue::Null)
|
||||
}
|
||||
_ => {
|
||||
let line = self.current_token().line;
|
||||
Err(ParseError::UnexpectedToken { found: self.current_token().token_type.clone(), expected: "literal".to_string(), line })
|
||||
Err(ParseError::UnexpectedToken {
|
||||
found: self.current_token().token_type.clone(),
|
||||
expected: "literal".to_string(),
|
||||
line,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// 関数・メソッド呼び出しをパース
|
||||
fn parse_call(&mut self) -> Result<ASTNode, ParseError> { self.expr_parse_call() }
|
||||
|
||||
fn parse_call(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.expr_parse_call()
|
||||
}
|
||||
|
||||
/// 基本式をパース: リテラル、変数、括弧、this、new、配列リテラル(糖衣)
|
||||
fn parse_primary(&mut self) -> Result<ASTNode, ParseError> { self.expr_parse_primary() }
|
||||
|
||||
fn parse_primary(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.expr_parse_primary()
|
||||
}
|
||||
|
||||
/// from構文をパース: from Parent.method(arguments)
|
||||
pub(super) fn parse_from_call(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.advance(); // consume 'from'
|
||||
|
||||
|
||||
// Parent名を取得
|
||||
let parent = if let TokenType::IDENTIFIER(name) = &self.current_token().token_type {
|
||||
let name = name.clone();
|
||||
@ -281,12 +381,12 @@ impl NyashParser {
|
||||
line,
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
// DOT とmethod名は任意(pack透明化対応)
|
||||
let method = if self.match_token(&TokenType::DOT) {
|
||||
// DOTがある場合: from Parent.method() 形式
|
||||
self.advance(); // consume DOT
|
||||
|
||||
|
||||
// method名を取得 (IDENTIFIERまたはINITを受け入れ)
|
||||
match &self.current_token().token_type {
|
||||
TokenType::IDENTIFIER(name) => {
|
||||
@ -320,28 +420,31 @@ impl NyashParser {
|
||||
// Phase 8.9: 明示的birth()構文を強制
|
||||
let line = self.current_token().line;
|
||||
return Err(ParseError::TransparencySystemRemoved {
|
||||
suggestion: format!("Use 'from {}.birth()' instead of 'from {}()'", parent, parent),
|
||||
suggestion: format!(
|
||||
"Use 'from {}.birth()' instead of 'from {}()'",
|
||||
parent, parent
|
||||
),
|
||||
line,
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
// 引数リストをパース
|
||||
self.consume(TokenType::LPAREN)?;
|
||||
let mut arguments = Vec::new();
|
||||
|
||||
|
||||
while !self.match_token(&TokenType::RPAREN) && !self.is_at_end() {
|
||||
must_advance!(self, _unused, "from call argument parsing");
|
||||
|
||||
|
||||
arguments.push(self.parse_expression()?);
|
||||
|
||||
|
||||
if self.match_token(&TokenType::COMMA) {
|
||||
self.advance();
|
||||
// カンマの後の trailing comma をチェック
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
self.consume(TokenType::RPAREN)?;
|
||||
|
||||
|
||||
Ok(ASTNode::FromCall {
|
||||
parent,
|
||||
method,
|
||||
|
||||
@ -2,17 +2,17 @@
|
||||
* Function declaration parsing
|
||||
*/
|
||||
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::tokenizer::TokenType;
|
||||
use crate::ast::{ASTNode, Span};
|
||||
use crate::must_advance;
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::tokenizer::TokenType;
|
||||
|
||||
impl NyashParser {
|
||||
/// function宣言をパース: function name(params) { body }
|
||||
pub fn parse_function_declaration(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.consume(TokenType::FUNCTION)?;
|
||||
|
||||
|
||||
// 関数名を取得
|
||||
let name = if let TokenType::IDENTIFIER(name) = &self.current_token().token_type {
|
||||
let name = name.clone();
|
||||
@ -26,18 +26,18 @@ impl NyashParser {
|
||||
line,
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
// パラメータリストをパース
|
||||
self.consume(TokenType::LPAREN)?;
|
||||
let mut params = Vec::new();
|
||||
|
||||
|
||||
while !self.match_token(&TokenType::RPAREN) && !self.is_at_end() {
|
||||
must_advance!(self, _unused, "function declaration parameter parsing");
|
||||
|
||||
|
||||
if let TokenType::IDENTIFIER(param) = &self.current_token().token_type {
|
||||
params.push(param.clone());
|
||||
self.advance();
|
||||
|
||||
|
||||
if self.match_token(&TokenType::COMMA) {
|
||||
self.advance();
|
||||
}
|
||||
@ -50,13 +50,13 @@ impl NyashParser {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
self.consume(TokenType::RPAREN)?;
|
||||
|
||||
|
||||
// 関数本体をパース
|
||||
self.consume(TokenType::LBRACE)?;
|
||||
self.skip_newlines();
|
||||
|
||||
|
||||
let mut body = Vec::new();
|
||||
while !self.match_token(&TokenType::RBRACE) && !self.is_at_end() {
|
||||
self.skip_newlines();
|
||||
@ -64,16 +64,16 @@ impl NyashParser {
|
||||
body.push(self.parse_statement()?);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
self.consume(TokenType::RBRACE)?;
|
||||
|
||||
|
||||
Ok(ASTNode::FunctionDeclaration {
|
||||
name,
|
||||
params,
|
||||
body,
|
||||
is_static: false, // 通常の関数は静的でない
|
||||
is_static: false, // 通常の関数は静的でない
|
||||
is_override: false, // デフォルトは非オーバーライド
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -2,16 +2,16 @@
|
||||
* Global variable parsing
|
||||
*/
|
||||
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::tokenizer::TokenType;
|
||||
use crate::ast::{ASTNode, Span};
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::tokenizer::TokenType;
|
||||
|
||||
impl NyashParser {
|
||||
/// グローバル変数をパース: global name = value
|
||||
pub fn parse_global_var(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.consume(TokenType::GLOBAL)?;
|
||||
|
||||
|
||||
let name = if let TokenType::IDENTIFIER(name) = &self.current_token().token_type {
|
||||
let name = name.clone();
|
||||
self.advance();
|
||||
@ -24,10 +24,14 @@ impl NyashParser {
|
||||
line,
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
self.consume(TokenType::ASSIGN)?;
|
||||
let value = Box::new(self.parse_expression()?);
|
||||
|
||||
Ok(ASTNode::GlobalVar { name, value, span: Span::unknown() })
|
||||
|
||||
Ok(ASTNode::GlobalVar {
|
||||
name,
|
||||
value,
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,14 +1,14 @@
|
||||
/*!
|
||||
* Parser Items Module
|
||||
*
|
||||
*
|
||||
* Top-level item declarations:
|
||||
* - Global variables
|
||||
* - Function declarations
|
||||
* - Static declarations (functions and boxes)
|
||||
*/
|
||||
|
||||
pub mod global_vars;
|
||||
pub mod functions;
|
||||
pub mod global_vars;
|
||||
pub mod static_items;
|
||||
|
||||
// Re-export for convenience
|
||||
|
||||
@ -3,17 +3,17 @@
|
||||
* Handles both static functions and static boxes
|
||||
*/
|
||||
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::tokenizer::TokenType;
|
||||
use crate::ast::{ASTNode, Span};
|
||||
use crate::must_advance;
|
||||
use crate::parser::common::ParserUtils;
|
||||
use crate::parser::{NyashParser, ParseError};
|
||||
use crate::tokenizer::TokenType;
|
||||
|
||||
impl NyashParser {
|
||||
/// 静的宣言をパース - 🔥 static function / static box 記法
|
||||
pub fn parse_static_declaration(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.consume(TokenType::STATIC)?;
|
||||
|
||||
|
||||
// 次のトークンで分岐: function か box か
|
||||
match &self.current_token().token_type {
|
||||
TokenType::FUNCTION => self.parse_static_function(),
|
||||
@ -28,20 +28,20 @@ impl NyashParser {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// 静的関数宣言をパース - static function Name() { ... }
|
||||
fn parse_static_function(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.consume(TokenType::FUNCTION)?;
|
||||
|
||||
|
||||
// 関数名を取得(Box名.関数名の形式をサポート)
|
||||
let name = if let TokenType::IDENTIFIER(first_part) = &self.current_token().token_type {
|
||||
let mut full_name = first_part.clone();
|
||||
self.advance();
|
||||
|
||||
|
||||
// ドット記法をチェック(例:Math.min)
|
||||
if self.match_token(&TokenType::DOT) {
|
||||
self.advance(); // DOTを消費
|
||||
|
||||
|
||||
if let TokenType::IDENTIFIER(method_name) = &self.current_token().token_type {
|
||||
full_name = format!("{}.{}", full_name, method_name);
|
||||
self.advance();
|
||||
@ -54,7 +54,7 @@ impl NyashParser {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
full_name
|
||||
} else {
|
||||
let line = self.current_token().line;
|
||||
@ -64,18 +64,18 @@ impl NyashParser {
|
||||
line,
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
// パラメータリストをパース
|
||||
self.consume(TokenType::LPAREN)?;
|
||||
let mut params = Vec::new();
|
||||
|
||||
|
||||
while !self.match_token(&TokenType::RPAREN) && !self.is_at_end() {
|
||||
must_advance!(self, _unused, "static function parameter parsing");
|
||||
|
||||
|
||||
if let TokenType::IDENTIFIER(param) = &self.current_token().token_type {
|
||||
params.push(param.clone());
|
||||
self.advance();
|
||||
|
||||
|
||||
if self.match_token(&TokenType::COMMA) {
|
||||
self.advance();
|
||||
}
|
||||
@ -88,13 +88,13 @@ impl NyashParser {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
self.consume(TokenType::RPAREN)?;
|
||||
|
||||
|
||||
// 関数本体をパース
|
||||
self.consume(TokenType::LBRACE)?;
|
||||
self.skip_newlines();
|
||||
|
||||
|
||||
let mut body = Vec::new();
|
||||
while !self.match_token(&TokenType::RBRACE) && !self.is_at_end() {
|
||||
self.skip_newlines();
|
||||
@ -102,16 +102,16 @@ impl NyashParser {
|
||||
body.push(self.parse_statement()?);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
self.consume(TokenType::RBRACE)?;
|
||||
|
||||
|
||||
Ok(ASTNode::FunctionDeclaration {
|
||||
name,
|
||||
params,
|
||||
body,
|
||||
is_static: true, // 🔥 静的関数フラグを設定
|
||||
is_static: true, // 🔥 静的関数フラグを設定
|
||||
is_override: false, // デフォルトは非オーバーライド
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,16 +1,16 @@
|
||||
/*!
|
||||
* Nyash Parser - Rust Implementation
|
||||
*
|
||||
*
|
||||
* Python版nyashc_v4.pyのNyashParserをRustで完全再実装
|
||||
* Token列をAST (Abstract Syntax Tree) に変換
|
||||
*
|
||||
*
|
||||
* モジュール構造:
|
||||
* - common.rs: 共通ユーティリティとトレイト (ParserUtils)
|
||||
* - expressions.rs: 式パーサー (parse_expression, parse_or, parse_and等)
|
||||
* - statements.rs: 文パーサー (parse_statement, parse_if, parse_loop等)
|
||||
* - declarations/: Box宣言パーサー (box_definition, static_box, dependency_helpers)
|
||||
* - items/: トップレベル宣言 (global_vars, functions, static_items)
|
||||
*
|
||||
*
|
||||
* 2025-08-16: 大規模リファクタリング完了
|
||||
* - 1530行 → 227行 (85%削減)
|
||||
* - 機能ごとにモジュール分離で保守性向上
|
||||
@ -18,24 +18,26 @@
|
||||
|
||||
// サブモジュール宣言
|
||||
mod common;
|
||||
mod expressions;
|
||||
mod expr;
|
||||
mod statements;
|
||||
mod declarations;
|
||||
mod items;
|
||||
pub mod sugar; // Phase 12.7-B: desugar pass (basic)
|
||||
pub mod entry_sugar; // helper to parse with sugar level
|
||||
mod expr;
|
||||
mod expressions;
|
||||
mod items;
|
||||
mod statements;
|
||||
pub mod sugar; // Phase 12.7-B: desugar pass (basic)
|
||||
pub mod sugar_gate; // thread-local gate for sugar parsing (tests/docs)
|
||||
// mod errors;
|
||||
// mod errors;
|
||||
|
||||
use common::ParserUtils;
|
||||
|
||||
use crate::tokenizer::{Token, TokenType, TokenizeError};
|
||||
use crate::ast::{ASTNode, Span};
|
||||
use crate::tokenizer::{Token, TokenType, TokenizeError};
|
||||
use thiserror::Error;
|
||||
|
||||
#[inline]
|
||||
fn is_sugar_enabled() -> bool { crate::parser::sugar_gate::is_enabled() }
|
||||
fn is_sugar_enabled() -> bool {
|
||||
crate::parser::sugar_gate::is_enabled()
|
||||
}
|
||||
|
||||
// ===== 🔥 Debug Macros =====
|
||||
|
||||
@ -49,9 +51,17 @@ macro_rules! must_advance {
|
||||
if let Some(ref mut limit) = $parser.debug_fuel {
|
||||
if *limit == 0 {
|
||||
eprintln!("🚨 PARSER INFINITE LOOP DETECTED at {}", $location);
|
||||
eprintln!("🔍 Current token: {:?} at line {}", $parser.current_token().token_type, $parser.current_token().line);
|
||||
eprintln!("🔍 Parser position: {}/{}", $parser.current, $parser.tokens.len());
|
||||
return Err($crate::parser::ParseError::InfiniteLoop {
|
||||
eprintln!(
|
||||
"🔍 Current token: {:?} at line {}",
|
||||
$parser.current_token().token_type,
|
||||
$parser.current_token().line
|
||||
);
|
||||
eprintln!(
|
||||
"🔍 Parser position: {}/{}",
|
||||
$parser.current,
|
||||
$parser.tokens.len()
|
||||
);
|
||||
return Err($crate::parser::ParseError::InfiniteLoop {
|
||||
location: $location.to_string(),
|
||||
token: $parser.current_token().token_type.clone(),
|
||||
line: $parser.current_token().line,
|
||||
@ -77,32 +87,42 @@ macro_rules! debug_fuel {
|
||||
#[derive(Error, Debug)]
|
||||
pub enum ParseError {
|
||||
#[error("Unexpected token {found:?}, expected {expected} at line {line}")]
|
||||
UnexpectedToken { found: TokenType, expected: String, line: usize },
|
||||
|
||||
UnexpectedToken {
|
||||
found: TokenType,
|
||||
expected: String,
|
||||
line: usize,
|
||||
},
|
||||
|
||||
#[error("Unexpected end of file")]
|
||||
UnexpectedEOF,
|
||||
|
||||
|
||||
#[error("Invalid expression at line {line}")]
|
||||
InvalidExpression { line: usize },
|
||||
|
||||
|
||||
#[error("Invalid statement at line {line}")]
|
||||
InvalidStatement { line: usize },
|
||||
|
||||
|
||||
#[error("Circular dependency detected between static boxes: {cycle}")]
|
||||
CircularDependency { cycle: String },
|
||||
|
||||
|
||||
#[error("🚨 Infinite loop detected in parser at {location} - token: {token:?} at line {line}")]
|
||||
InfiniteLoop { location: String, token: TokenType, line: usize },
|
||||
|
||||
InfiniteLoop {
|
||||
location: String,
|
||||
token: TokenType,
|
||||
line: usize,
|
||||
},
|
||||
|
||||
#[error("🔥 Transparency system removed: {suggestion} at line {line}")]
|
||||
TransparencySystemRemoved { suggestion: String, line: usize },
|
||||
|
||||
#[error("Unsupported namespace '{name}' at line {line}. Only 'nyashstd' is supported in Phase 0.")]
|
||||
|
||||
#[error(
|
||||
"Unsupported namespace '{name}' at line {line}. Only 'nyashstd' is supported in Phase 0."
|
||||
)]
|
||||
UnsupportedNamespace { name: String, line: usize },
|
||||
|
||||
|
||||
#[error("Expected identifier at line {line}")]
|
||||
ExpectedIdentifier { line: usize },
|
||||
|
||||
|
||||
#[error("Tokenize error: {0}")]
|
||||
TokenizeError(#[from] TokenizeError),
|
||||
}
|
||||
@ -112,7 +132,8 @@ pub struct NyashParser {
|
||||
pub(super) tokens: Vec<Token>,
|
||||
pub(super) current: usize,
|
||||
/// 🔥 Static box依存関係追跡(循環依存検出用)
|
||||
pub(super) static_box_dependencies: std::collections::HashMap<String, std::collections::HashSet<String>>,
|
||||
pub(super) static_box_dependencies:
|
||||
std::collections::HashMap<String, std::collections::HashSet<String>>,
|
||||
/// 🔥 デバッグ燃料:無限ループ検出用制限値 (None = 無制限)
|
||||
pub(super) debug_fuel: Option<usize>,
|
||||
}
|
||||
@ -122,11 +143,11 @@ impl ParserUtils for NyashParser {
|
||||
fn tokens(&self) -> &Vec<Token> {
|
||||
&self.tokens
|
||||
}
|
||||
|
||||
|
||||
fn current(&self) -> usize {
|
||||
self.current
|
||||
}
|
||||
|
||||
|
||||
fn current_mut(&mut self) -> &mut usize {
|
||||
&mut self.current
|
||||
}
|
||||
@ -142,97 +163,99 @@ impl NyashParser {
|
||||
debug_fuel: Some(100_000), // デフォルト値
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// 文字列からパース (トークナイズ + パース)
|
||||
pub fn parse_from_string(input: impl Into<String>) -> Result<ASTNode, ParseError> {
|
||||
Self::parse_from_string_with_fuel(input, Some(100_000))
|
||||
}
|
||||
|
||||
|
||||
/// 文字列からパース (デバッグ燃料指定版)
|
||||
/// fuel: Some(n) = n回まで、None = 無制限
|
||||
pub fn parse_from_string_with_fuel(input: impl Into<String>, fuel: Option<usize>) -> Result<ASTNode, ParseError> {
|
||||
pub fn parse_from_string_with_fuel(
|
||||
input: impl Into<String>,
|
||||
fuel: Option<usize>,
|
||||
) -> Result<ASTNode, ParseError> {
|
||||
let mut tokenizer = crate::tokenizer::NyashTokenizer::new(input);
|
||||
let tokens = tokenizer.tokenize()?;
|
||||
|
||||
|
||||
let mut parser = Self::new(tokens);
|
||||
parser.debug_fuel = fuel;
|
||||
let result = parser.parse();
|
||||
result
|
||||
}
|
||||
|
||||
|
||||
/// パース実行 - Program ASTを返す
|
||||
pub fn parse(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.parse_program()
|
||||
}
|
||||
|
||||
|
||||
// ===== パース関数群 =====
|
||||
|
||||
|
||||
/// プログラム全体をパース
|
||||
fn parse_program(&mut self) -> Result<ASTNode, ParseError> {
|
||||
let mut statements = Vec::new();
|
||||
let mut _statement_count = 0;
|
||||
|
||||
|
||||
while !self.is_at_end() {
|
||||
|
||||
// EOF tokenはスキップ
|
||||
if matches!(self.current_token().token_type, TokenType::EOF) {
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
// NEWLINE tokenはスキップ(文の区切りとして使用)
|
||||
if matches!(self.current_token().token_type, TokenType::NEWLINE) {
|
||||
self.advance();
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
let statement = self.parse_statement()?;
|
||||
statements.push(statement);
|
||||
_statement_count += 1;
|
||||
}
|
||||
|
||||
|
||||
|
||||
// 🔥 すべてのstatic box解析後に循環依存検出
|
||||
self.check_circular_dependencies()?;
|
||||
|
||||
Ok(ASTNode::Program { statements, span: Span::unknown() })
|
||||
|
||||
Ok(ASTNode::Program {
|
||||
statements,
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
// Statement parsing methods are now in statements.rs module
|
||||
|
||||
|
||||
/// 代入文または関数呼び出しをパース
|
||||
fn parse_assignment_or_function_call(&mut self) -> Result<ASTNode, ParseError> {
|
||||
|
||||
// まず左辺を式としてパース
|
||||
let expr = self.parse_expression()?;
|
||||
|
||||
|
||||
// 次のトークンが = または 複合代入演算子 なら代入文
|
||||
if self.match_token(&TokenType::ASSIGN) {
|
||||
self.advance(); // consume '='
|
||||
let value = Box::new(self.parse_expression()?);
|
||||
|
||||
|
||||
// 左辺が代入可能な形式かチェック
|
||||
match &expr {
|
||||
ASTNode::Variable { .. } |
|
||||
ASTNode::FieldAccess { .. } => {
|
||||
Ok(ASTNode::Assignment {
|
||||
target: Box::new(expr),
|
||||
value,
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
ASTNode::Variable { .. } | ASTNode::FieldAccess { .. } => Ok(ASTNode::Assignment {
|
||||
target: Box::new(expr),
|
||||
value,
|
||||
span: Span::unknown(),
|
||||
}),
|
||||
_ => {
|
||||
let line = self.current_token().line;
|
||||
Err(ParseError::InvalidStatement { line })
|
||||
}
|
||||
}
|
||||
} else if self.match_token(&TokenType::PlusAssign) ||
|
||||
self.match_token(&TokenType::MinusAssign) ||
|
||||
self.match_token(&TokenType::MulAssign) ||
|
||||
self.match_token(&TokenType::DivAssign) {
|
||||
} else if self.match_token(&TokenType::PlusAssign)
|
||||
|| self.match_token(&TokenType::MinusAssign)
|
||||
|| self.match_token(&TokenType::MulAssign)
|
||||
|| self.match_token(&TokenType::DivAssign)
|
||||
{
|
||||
if !is_sugar_enabled() {
|
||||
let line = self.current_token().line;
|
||||
return Err(ParseError::UnexpectedToken {
|
||||
found: self.current_token().token_type.clone(),
|
||||
expected: "enable NYASH_SYNTAX_SUGAR_LEVEL=basic|full for '+=' and friends".to_string(),
|
||||
expected: "enable NYASH_SYNTAX_SUGAR_LEVEL=basic|full for '+=' and friends"
|
||||
.to_string(),
|
||||
line,
|
||||
});
|
||||
}
|
||||
@ -250,8 +273,17 @@ impl NyashParser {
|
||||
match &expr {
|
||||
ASTNode::Variable { .. } | ASTNode::FieldAccess { .. } => {
|
||||
let left_clone = expr.clone();
|
||||
let value = ASTNode::BinaryOp { operator: op, left: Box::new(left_clone), right: Box::new(rhs), span: Span::unknown() };
|
||||
Ok(ASTNode::Assignment { target: Box::new(expr), value: Box::new(value), span: Span::unknown() })
|
||||
let value = ASTNode::BinaryOp {
|
||||
operator: op,
|
||||
left: Box::new(left_clone),
|
||||
right: Box::new(rhs),
|
||||
span: Span::unknown(),
|
||||
};
|
||||
Ok(ASTNode::Assignment {
|
||||
target: Box::new(expr),
|
||||
value: Box::new(value),
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
_ => {
|
||||
let line = self.current_token().line;
|
||||
@ -263,10 +295,10 @@ impl NyashParser {
|
||||
Ok(expr)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Expression parsing methods are now in expressions.rs module
|
||||
// Utility methods are now in common.rs module via ParserUtils trait
|
||||
// Item parsing methods are now in items.rs module
|
||||
|
||||
|
||||
// ===== 🔥 Static Box循環依存検出 =====
|
||||
}
|
||||
|
||||
@ -1,14 +1,14 @@
|
||||
/*!
|
||||
* Nyash Parser - Statement Parsing Module
|
||||
*
|
||||
*
|
||||
* 文(Statement)の解析を担当するモジュール
|
||||
* if, loop, break, return, print等の制御構文を処理
|
||||
*/
|
||||
|
||||
use crate::tokenizer::TokenType;
|
||||
use crate::ast::{ASTNode, CatchClause, Span};
|
||||
use super::{NyashParser, ParseError};
|
||||
use super::common::ParserUtils;
|
||||
use super::{NyashParser, ParseError};
|
||||
use crate::ast::{ASTNode, CatchClause, Span};
|
||||
use crate::tokenizer::TokenType;
|
||||
|
||||
impl NyashParser {
|
||||
/// 文をパース
|
||||
@ -16,67 +16,31 @@ impl NyashParser {
|
||||
// For grammar diff: capture starting token to classify statement keyword
|
||||
let start_tok = self.current_token().token_type.clone();
|
||||
let result = match &start_tok {
|
||||
TokenType::BOX => {
|
||||
self.parse_box_declaration()
|
||||
},
|
||||
TokenType::IMPORT => {
|
||||
self.parse_import()
|
||||
},
|
||||
TokenType::INTERFACE => {
|
||||
self.parse_interface_box_declaration()
|
||||
},
|
||||
TokenType::GLOBAL => {
|
||||
self.parse_global_var()
|
||||
},
|
||||
TokenType::FUNCTION => {
|
||||
self.parse_function_declaration()
|
||||
},
|
||||
TokenType::BOX => self.parse_box_declaration(),
|
||||
TokenType::IMPORT => self.parse_import(),
|
||||
TokenType::INTERFACE => self.parse_interface_box_declaration(),
|
||||
TokenType::GLOBAL => self.parse_global_var(),
|
||||
TokenType::FUNCTION => self.parse_function_declaration(),
|
||||
TokenType::STATIC => {
|
||||
self.parse_static_declaration() // 🔥 静的宣言 (function/box)
|
||||
},
|
||||
TokenType::IF => {
|
||||
self.parse_if()
|
||||
},
|
||||
TokenType::LOOP => {
|
||||
self.parse_loop()
|
||||
},
|
||||
TokenType::BREAK => {
|
||||
self.parse_break()
|
||||
},
|
||||
TokenType::CONTINUE => {
|
||||
self.parse_continue()
|
||||
},
|
||||
TokenType::RETURN => {
|
||||
self.parse_return()
|
||||
},
|
||||
TokenType::PRINT => {
|
||||
self.parse_print()
|
||||
},
|
||||
TokenType::NOWAIT => {
|
||||
self.parse_nowait()
|
||||
},
|
||||
TokenType::INCLUDE => {
|
||||
self.parse_include()
|
||||
},
|
||||
TokenType::LOCAL => {
|
||||
self.parse_local()
|
||||
},
|
||||
TokenType::OUTBOX => {
|
||||
self.parse_outbox()
|
||||
},
|
||||
TokenType::TRY => {
|
||||
self.parse_try_catch()
|
||||
},
|
||||
TokenType::THROW => {
|
||||
self.parse_throw()
|
||||
},
|
||||
TokenType::USING => {
|
||||
self.parse_using()
|
||||
},
|
||||
self.parse_static_declaration() // 🔥 静的宣言 (function/box)
|
||||
}
|
||||
TokenType::IF => self.parse_if(),
|
||||
TokenType::LOOP => self.parse_loop(),
|
||||
TokenType::BREAK => self.parse_break(),
|
||||
TokenType::CONTINUE => self.parse_continue(),
|
||||
TokenType::RETURN => self.parse_return(),
|
||||
TokenType::PRINT => self.parse_print(),
|
||||
TokenType::NOWAIT => self.parse_nowait(),
|
||||
TokenType::INCLUDE => self.parse_include(),
|
||||
TokenType::LOCAL => self.parse_local(),
|
||||
TokenType::OUTBOX => self.parse_outbox(),
|
||||
TokenType::TRY => self.parse_try_catch(),
|
||||
TokenType::THROW => self.parse_throw(),
|
||||
TokenType::USING => self.parse_using(),
|
||||
TokenType::FROM => {
|
||||
// 🔥 from構文: from Parent.method(args) または from Parent.constructor(args)
|
||||
self.parse_from_call_statement()
|
||||
},
|
||||
}
|
||||
TokenType::IDENTIFIER(_name) => {
|
||||
// function宣言 または 代入文 または 関数呼び出し
|
||||
self.parse_assignment_or_function_call()
|
||||
@ -91,7 +55,7 @@ impl NyashParser {
|
||||
Ok(self.parse_expression()?)
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// Non-invasive syntax rule check
|
||||
if std::env::var("NYASH_GRAMMAR_DIFF").ok().as_deref() == Some("1") {
|
||||
let kw = match start_tok {
|
||||
@ -116,7 +80,12 @@ impl NyashParser {
|
||||
};
|
||||
if let Some(k) = kw {
|
||||
let ok = crate::grammar::engine::get().syntax_is_allowed_statement(k);
|
||||
if !ok { eprintln!("[GRAMMAR-DIFF][Parser] statement '{}' not allowed by syntax rules", k); }
|
||||
if !ok {
|
||||
eprintln!(
|
||||
"[GRAMMAR-DIFF][Parser] statement '{}' not allowed by syntax rules",
|
||||
k
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
result
|
||||
@ -130,7 +99,11 @@ impl NyashParser {
|
||||
self.advance();
|
||||
v
|
||||
} else {
|
||||
return Err(ParseError::UnexpectedToken { found: self.current_token().token_type.clone(), expected: "string literal".to_string(), line: self.current_token().line });
|
||||
return Err(ParseError::UnexpectedToken {
|
||||
found: self.current_token().token_type.clone(),
|
||||
expected: "string literal".to_string(),
|
||||
line: self.current_token().line,
|
||||
});
|
||||
};
|
||||
// Optional: 'as' Alias (treat 'as' as identifier literal)
|
||||
let mut alias: Option<String> = None;
|
||||
@ -141,20 +114,28 @@ impl NyashParser {
|
||||
alias = Some(name.clone());
|
||||
self.advance();
|
||||
} else {
|
||||
return Err(ParseError::UnexpectedToken { found: self.current_token().token_type.clone(), expected: "alias name".to_string(), line: self.current_token().line });
|
||||
return Err(ParseError::UnexpectedToken {
|
||||
found: self.current_token().token_type.clone(),
|
||||
expected: "alias name".to_string(),
|
||||
line: self.current_token().line,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(ASTNode::ImportStatement { path, alias, span: Span::unknown() })
|
||||
Ok(ASTNode::ImportStatement {
|
||||
path,
|
||||
alias,
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
/// if文をパース: if (condition) { body } else if ... else { body }
|
||||
pub(super) fn parse_if(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.advance(); // consume 'if'
|
||||
|
||||
|
||||
// 条件部分を取得
|
||||
let condition = Box::new(self.parse_expression()?);
|
||||
|
||||
|
||||
// then部分を取得
|
||||
self.consume(TokenType::LBRACE)?;
|
||||
let mut then_body = Vec::new();
|
||||
@ -165,11 +146,11 @@ impl NyashParser {
|
||||
}
|
||||
}
|
||||
self.consume(TokenType::RBRACE)?;
|
||||
|
||||
|
||||
// else if/else部分を処理
|
||||
let else_body = if self.match_token(&TokenType::ELSE) {
|
||||
self.advance(); // consume 'else'
|
||||
|
||||
|
||||
if self.match_token(&TokenType::IF) {
|
||||
// else if を ネストしたifとして処理
|
||||
let nested_if = self.parse_if()?;
|
||||
@ -190,7 +171,7 @@ impl NyashParser {
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
|
||||
Ok(ASTNode::If {
|
||||
condition,
|
||||
then_body,
|
||||
@ -198,11 +179,11 @@ impl NyashParser {
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
/// loop文をパース
|
||||
pub(super) fn parse_loop(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.advance(); // consume 'loop'
|
||||
|
||||
|
||||
// 条件部分を取得(省略可: `loop { ... }` は無条件ループとして扱う)
|
||||
let condition = if self.match_token(&TokenType::LPAREN) {
|
||||
self.advance(); // consume '('
|
||||
@ -211,9 +192,12 @@ impl NyashParser {
|
||||
cond
|
||||
} else {
|
||||
// default: true
|
||||
Box::new(ASTNode::Literal { value: crate::ast::LiteralValue::Bool(true), span: Span::unknown() })
|
||||
Box::new(ASTNode::Literal {
|
||||
value: crate::ast::LiteralValue::Bool(true),
|
||||
span: Span::unknown(),
|
||||
})
|
||||
};
|
||||
|
||||
|
||||
// body部分を取得
|
||||
self.consume(TokenType::LBRACE)?;
|
||||
let mut body = Vec::new();
|
||||
@ -224,30 +208,34 @@ impl NyashParser {
|
||||
}
|
||||
}
|
||||
self.consume(TokenType::RBRACE)?;
|
||||
|
||||
|
||||
Ok(ASTNode::Loop {
|
||||
condition,
|
||||
body,
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
/// break文をパース
|
||||
pub(super) fn parse_break(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.advance(); // consume 'break'
|
||||
Ok(ASTNode::Break { span: Span::unknown() })
|
||||
Ok(ASTNode::Break {
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
|
||||
/// continue文をパース
|
||||
pub(super) fn parse_continue(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.advance(); // consume 'continue'
|
||||
Ok(ASTNode::Continue { span: Span::unknown() })
|
||||
Ok(ASTNode::Continue {
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
/// return文をパース
|
||||
pub(super) fn parse_return(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.advance(); // consume 'return'
|
||||
// 許容: 改行をスキップしてから式有無を判定
|
||||
// 許容: 改行をスキップしてから式有無を判定
|
||||
self.skip_newlines();
|
||||
// returnの後に式があるかチェック(RBRACE/EOFなら値なし)
|
||||
let value = if self.is_at_end() || self.match_token(&TokenType::RBRACE) {
|
||||
@ -255,24 +243,30 @@ impl NyashParser {
|
||||
} else {
|
||||
Some(Box::new(self.parse_expression()?))
|
||||
};
|
||||
|
||||
Ok(ASTNode::Return { value, span: Span::unknown() })
|
||||
|
||||
Ok(ASTNode::Return {
|
||||
value,
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
/// print文をパース
|
||||
pub(super) fn parse_print(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.advance(); // consume 'print'
|
||||
self.consume(TokenType::LPAREN)?;
|
||||
let value = Box::new(self.parse_expression()?);
|
||||
self.consume(TokenType::RPAREN)?;
|
||||
|
||||
Ok(ASTNode::Print { expression: value, span: Span::unknown() })
|
||||
|
||||
Ok(ASTNode::Print {
|
||||
expression: value,
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
/// nowait文をパース: nowait variable = expression
|
||||
pub(super) fn parse_nowait(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.advance(); // consume 'nowait'
|
||||
|
||||
|
||||
// 変数名を取得
|
||||
let variable = if let TokenType::IDENTIFIER(name) = &self.current_token().token_type {
|
||||
let name = name.clone();
|
||||
@ -286,21 +280,21 @@ impl NyashParser {
|
||||
line,
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
self.consume(TokenType::ASSIGN)?;
|
||||
let expression = Box::new(self.parse_expression()?);
|
||||
|
||||
|
||||
Ok(ASTNode::Nowait {
|
||||
variable,
|
||||
expression,
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
/// include文をパース
|
||||
pub(super) fn parse_include(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.advance(); // consume 'include'
|
||||
|
||||
|
||||
let path = if let TokenType::STRING(path) = &self.current_token().token_type {
|
||||
let path = path.clone();
|
||||
self.advance();
|
||||
@ -313,27 +307,30 @@ impl NyashParser {
|
||||
line,
|
||||
});
|
||||
};
|
||||
|
||||
Ok(ASTNode::Include { filename: path, span: Span::unknown() })
|
||||
|
||||
Ok(ASTNode::Include {
|
||||
filename: path,
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
/// local変数宣言をパース: local var1, var2, var3 または local x = 10
|
||||
pub(super) fn parse_local(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.advance(); // consume 'local'
|
||||
|
||||
|
||||
let mut names = Vec::new();
|
||||
let mut initial_values = Vec::new();
|
||||
|
||||
|
||||
// 最初の変数名を取得
|
||||
if let TokenType::IDENTIFIER(name) = &self.current_token().token_type {
|
||||
names.push(name.clone());
|
||||
self.advance();
|
||||
|
||||
|
||||
// = があれば初期値を設定
|
||||
if self.match_token(&TokenType::ASSIGN) {
|
||||
self.advance(); // consume '='
|
||||
initial_values.push(Some(Box::new(self.parse_expression()?)));
|
||||
|
||||
|
||||
// 初期化付きlocalは単一変数のみ(カンマ区切り不可)
|
||||
Ok(ASTNode::Local {
|
||||
variables: names,
|
||||
@ -343,11 +340,11 @@ impl NyashParser {
|
||||
} else {
|
||||
// 初期化なしの場合はカンマ区切りで複数変数可能
|
||||
initial_values.push(None);
|
||||
|
||||
|
||||
// カンマ区切りで追加の変数名を取得
|
||||
while self.match_token(&TokenType::COMMA) {
|
||||
self.advance(); // consume ','
|
||||
|
||||
|
||||
if let TokenType::IDENTIFIER(name) = &self.current_token().token_type {
|
||||
names.push(name.clone());
|
||||
initial_values.push(None);
|
||||
@ -361,7 +358,7 @@ impl NyashParser {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Ok(ASTNode::Local {
|
||||
variables: names,
|
||||
initial_values,
|
||||
@ -377,22 +374,22 @@ impl NyashParser {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// outbox変数宣言をパース: outbox var1, var2, var3
|
||||
pub(super) fn parse_outbox(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.advance(); // consume 'outbox'
|
||||
|
||||
|
||||
let mut names = Vec::new();
|
||||
|
||||
|
||||
// 最初の変数名を取得
|
||||
if let TokenType::IDENTIFIER(name) = &self.current_token().token_type {
|
||||
names.push(name.clone());
|
||||
self.advance();
|
||||
|
||||
|
||||
// カンマ区切りで追加の変数名を取得
|
||||
while self.match_token(&TokenType::COMMA) {
|
||||
self.advance(); // consume ','
|
||||
|
||||
|
||||
if let TokenType::IDENTIFIER(name) = &self.current_token().token_type {
|
||||
names.push(name.clone());
|
||||
self.advance();
|
||||
@ -405,7 +402,7 @@ impl NyashParser {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
let num_vars = names.len();
|
||||
Ok(ASTNode::Outbox {
|
||||
variables: names,
|
||||
@ -421,12 +418,12 @@ impl NyashParser {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// try-catch文をパース
|
||||
pub(super) fn parse_try_catch(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.advance(); // consume 'try'
|
||||
self.consume(TokenType::LBRACE)?;
|
||||
|
||||
|
||||
let mut try_body = Vec::new();
|
||||
while !self.match_token(&TokenType::RBRACE) && !self.is_at_end() {
|
||||
self.skip_newlines();
|
||||
@ -434,42 +431,44 @@ impl NyashParser {
|
||||
try_body.push(self.parse_statement()?);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
self.consume(TokenType::RBRACE)?;
|
||||
|
||||
|
||||
let mut catch_clauses = Vec::new();
|
||||
|
||||
|
||||
// catch節をパース
|
||||
while self.match_token(&TokenType::CATCH) {
|
||||
self.advance(); // consume 'catch'
|
||||
self.consume(TokenType::LPAREN)?;
|
||||
|
||||
|
||||
// 例外型 (オプション)
|
||||
let exception_type = if let TokenType::IDENTIFIER(type_name) = &self.current_token().token_type {
|
||||
let type_name = type_name.clone();
|
||||
self.advance();
|
||||
Some(type_name)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let exception_type =
|
||||
if let TokenType::IDENTIFIER(type_name) = &self.current_token().token_type {
|
||||
let type_name = type_name.clone();
|
||||
self.advance();
|
||||
Some(type_name)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// 例外変数名
|
||||
let exception_var = if let TokenType::IDENTIFIER(var_name) = &self.current_token().token_type {
|
||||
let var_name = var_name.clone();
|
||||
self.advance();
|
||||
var_name
|
||||
} else {
|
||||
let line = self.current_token().line;
|
||||
return Err(ParseError::UnexpectedToken {
|
||||
found: self.current_token().token_type.clone(),
|
||||
expected: "exception variable name".to_string(),
|
||||
line,
|
||||
});
|
||||
};
|
||||
|
||||
let exception_var =
|
||||
if let TokenType::IDENTIFIER(var_name) = &self.current_token().token_type {
|
||||
let var_name = var_name.clone();
|
||||
self.advance();
|
||||
var_name
|
||||
} else {
|
||||
let line = self.current_token().line;
|
||||
return Err(ParseError::UnexpectedToken {
|
||||
found: self.current_token().token_type.clone(),
|
||||
expected: "exception variable name".to_string(),
|
||||
line,
|
||||
});
|
||||
};
|
||||
|
||||
self.consume(TokenType::RPAREN)?;
|
||||
self.consume(TokenType::LBRACE)?;
|
||||
|
||||
|
||||
let mut catch_body = Vec::new();
|
||||
while !self.match_token(&TokenType::RBRACE) && !self.is_at_end() {
|
||||
self.skip_newlines();
|
||||
@ -477,9 +476,9 @@ impl NyashParser {
|
||||
catch_body.push(self.parse_statement()?);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
self.consume(TokenType::RBRACE)?;
|
||||
|
||||
|
||||
catch_clauses.push(CatchClause {
|
||||
exception_type,
|
||||
variable_name: Some(exception_var),
|
||||
@ -487,12 +486,12 @@ impl NyashParser {
|
||||
span: Span::unknown(),
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// finally節をパース (オプション)
|
||||
let finally_body = if self.match_token(&TokenType::FINALLY) {
|
||||
self.advance(); // consume 'finally'
|
||||
self.consume(TokenType::LBRACE)?;
|
||||
|
||||
|
||||
let mut body = Vec::new();
|
||||
while !self.match_token(&TokenType::RBRACE) && !self.is_at_end() {
|
||||
self.skip_newlines();
|
||||
@ -500,13 +499,13 @@ impl NyashParser {
|
||||
body.push(self.parse_statement()?);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
self.consume(TokenType::RBRACE)?;
|
||||
Some(body)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
|
||||
Ok(ASTNode::TryCatch {
|
||||
try_body,
|
||||
catch_clauses,
|
||||
@ -514,48 +513,51 @@ impl NyashParser {
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
/// throw文をパース
|
||||
pub(super) fn parse_throw(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.advance(); // consume 'throw'
|
||||
let value = Box::new(self.parse_expression()?);
|
||||
Ok(ASTNode::Throw { expression: value, span: Span::unknown() })
|
||||
Ok(ASTNode::Throw {
|
||||
expression: value,
|
||||
span: Span::unknown(),
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
/// 🔥 from構文を文としてパース: from Parent.method(args)
|
||||
pub(super) fn parse_from_call_statement(&mut self) -> Result<ASTNode, ParseError> {
|
||||
// 既存のparse_from_call()を使用してFromCall ASTノードを作成
|
||||
let from_call_expr = self.parse_from_call()?;
|
||||
|
||||
|
||||
// FromCallは式でもあるが、文としても使用可能
|
||||
// 例: from Animal.constructor() (戻り値を使わない)
|
||||
Ok(from_call_expr)
|
||||
}
|
||||
|
||||
|
||||
/// using文をパース: using namespace_name
|
||||
pub(super) fn parse_using(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.advance(); // consume 'using'
|
||||
|
||||
|
||||
// 名前空間名を取得
|
||||
if let TokenType::IDENTIFIER(namespace_name) = &self.current_token().token_type {
|
||||
let name = namespace_name.clone();
|
||||
self.advance();
|
||||
|
||||
|
||||
// Phase 0では "nyashstd" のみ許可
|
||||
if name != "nyashstd" {
|
||||
return Err(ParseError::UnsupportedNamespace {
|
||||
name,
|
||||
line: self.current_token().line
|
||||
return Err(ParseError::UnsupportedNamespace {
|
||||
name,
|
||||
line: self.current_token().line,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
Ok(ASTNode::UsingStatement {
|
||||
namespace_name: name,
|
||||
span: Span::unknown(),
|
||||
})
|
||||
} else {
|
||||
Err(ParseError::ExpectedIdentifier {
|
||||
line: self.current_token().line
|
||||
Err(ParseError::ExpectedIdentifier {
|
||||
line: self.current_token().line,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -16,60 +16,159 @@ fn rewrite(ast: ASTNode) -> ASTNode {
|
||||
match ast {
|
||||
ASTNode::Program { statements, span } => {
|
||||
let stmts = statements.into_iter().map(|s| rewrite(s)).collect();
|
||||
ASTNode::Program { statements: stmts, span }
|
||||
ASTNode::Program {
|
||||
statements: stmts,
|
||||
span,
|
||||
}
|
||||
}
|
||||
ASTNode::Assignment { target, value, span } => {
|
||||
ASTNode::Assignment { target: Box::new(rewrite(*target)), value: Box::new(rewrite(*value)), span }
|
||||
}
|
||||
ASTNode::BinaryOp { operator, left, right, span } => {
|
||||
ASTNode::Assignment {
|
||||
target,
|
||||
value,
|
||||
span,
|
||||
} => ASTNode::Assignment {
|
||||
target: Box::new(rewrite(*target)),
|
||||
value: Box::new(rewrite(*value)),
|
||||
span,
|
||||
},
|
||||
ASTNode::BinaryOp {
|
||||
operator,
|
||||
left,
|
||||
right,
|
||||
span,
|
||||
} => {
|
||||
// default null (??): a ?? b => if a is null then b else a
|
||||
// Here we approximate as: (a == null) ? b : a using peek-like structure
|
||||
// For minimalism, keep as BinaryOp and rely on later phases (placeholder).
|
||||
ASTNode::BinaryOp { operator, left: Box::new(rewrite(*left)), right: Box::new(rewrite(*right)), span }
|
||||
}
|
||||
ASTNode::MethodCall { object, method, arguments, span } => {
|
||||
ASTNode::MethodCall { object: Box::new(rewrite(*object)), method, arguments: arguments.into_iter().map(rewrite).collect(), span }
|
||||
}
|
||||
ASTNode::FunctionCall { name, arguments, span } => {
|
||||
ASTNode::FunctionCall { name, arguments: arguments.into_iter().map(rewrite).collect(), span }
|
||||
}
|
||||
ASTNode::FieldAccess { object, field, span } => {
|
||||
ASTNode::FieldAccess { object: Box::new(rewrite(*object)), field, span }
|
||||
}
|
||||
ASTNode::UnaryOp { operator, operand, span } => {
|
||||
ASTNode::UnaryOp { operator, operand: Box::new(rewrite(*operand)), span }
|
||||
}
|
||||
ASTNode::PeekExpr { scrutinee, arms, else_expr, span } => {
|
||||
ASTNode::PeekExpr { scrutinee: Box::new(rewrite(*scrutinee)), arms: arms.into_iter().map(|(l,e)| (l, rewrite(e))).collect(), else_expr: Box::new(rewrite(*else_expr)), span }
|
||||
ASTNode::BinaryOp {
|
||||
operator,
|
||||
left: Box::new(rewrite(*left)),
|
||||
right: Box::new(rewrite(*right)),
|
||||
span,
|
||||
}
|
||||
}
|
||||
ASTNode::MethodCall {
|
||||
object,
|
||||
method,
|
||||
arguments,
|
||||
span,
|
||||
} => ASTNode::MethodCall {
|
||||
object: Box::new(rewrite(*object)),
|
||||
method,
|
||||
arguments: arguments.into_iter().map(rewrite).collect(),
|
||||
span,
|
||||
},
|
||||
ASTNode::FunctionCall {
|
||||
name,
|
||||
arguments,
|
||||
span,
|
||||
} => ASTNode::FunctionCall {
|
||||
name,
|
||||
arguments: arguments.into_iter().map(rewrite).collect(),
|
||||
span,
|
||||
},
|
||||
ASTNode::FieldAccess {
|
||||
object,
|
||||
field,
|
||||
span,
|
||||
} => ASTNode::FieldAccess {
|
||||
object: Box::new(rewrite(*object)),
|
||||
field,
|
||||
span,
|
||||
},
|
||||
ASTNode::UnaryOp {
|
||||
operator,
|
||||
operand,
|
||||
span,
|
||||
} => ASTNode::UnaryOp {
|
||||
operator,
|
||||
operand: Box::new(rewrite(*operand)),
|
||||
span,
|
||||
},
|
||||
ASTNode::PeekExpr {
|
||||
scrutinee,
|
||||
arms,
|
||||
else_expr,
|
||||
span,
|
||||
} => ASTNode::PeekExpr {
|
||||
scrutinee: Box::new(rewrite(*scrutinee)),
|
||||
arms: arms.into_iter().map(|(l, e)| (l, rewrite(e))).collect(),
|
||||
else_expr: Box::new(rewrite(*else_expr)),
|
||||
span,
|
||||
},
|
||||
// Others: recursively visit children where present
|
||||
ASTNode::If { condition, then_body, else_body, span } => {
|
||||
ASTNode::If { condition: Box::new(rewrite(*condition)), then_body: then_body.into_iter().map(rewrite).collect(), else_body: else_body.map(|v| v.into_iter().map(rewrite).collect()), span }
|
||||
}
|
||||
ASTNode::Loop { condition, body, span } => {
|
||||
ASTNode::Loop { condition: Box::new(rewrite(*condition)), body: body.into_iter().map(rewrite).collect(), span }
|
||||
}
|
||||
ASTNode::Return { value, span } => {
|
||||
ASTNode::Return { value: value.map(|v| Box::new(rewrite(*v))), span }
|
||||
}
|
||||
ASTNode::Print { expression, span } => {
|
||||
ASTNode::Print { expression: Box::new(rewrite(*expression)), span }
|
||||
}
|
||||
ASTNode::New { class, arguments, type_arguments, span } => {
|
||||
ASTNode::New { class, arguments: arguments.into_iter().map(rewrite).collect(), type_arguments, span }
|
||||
}
|
||||
ASTNode::Call { callee, arguments, span } => {
|
||||
ASTNode::Call { callee: Box::new(rewrite(*callee)), arguments: arguments.into_iter().map(rewrite).collect(), span }
|
||||
}
|
||||
ASTNode::Local { variables, initial_values, span } => {
|
||||
ASTNode::Local { variables, initial_values: initial_values.into_iter().map(|o| o.map(|b| Box::new(rewrite(*b)))).collect(), span }
|
||||
}
|
||||
ASTNode::If {
|
||||
condition,
|
||||
then_body,
|
||||
else_body,
|
||||
span,
|
||||
} => ASTNode::If {
|
||||
condition: Box::new(rewrite(*condition)),
|
||||
then_body: then_body.into_iter().map(rewrite).collect(),
|
||||
else_body: else_body.map(|v| v.into_iter().map(rewrite).collect()),
|
||||
span,
|
||||
},
|
||||
ASTNode::Loop {
|
||||
condition,
|
||||
body,
|
||||
span,
|
||||
} => ASTNode::Loop {
|
||||
condition: Box::new(rewrite(*condition)),
|
||||
body: body.into_iter().map(rewrite).collect(),
|
||||
span,
|
||||
},
|
||||
ASTNode::Return { value, span } => ASTNode::Return {
|
||||
value: value.map(|v| Box::new(rewrite(*v))),
|
||||
span,
|
||||
},
|
||||
ASTNode::Print { expression, span } => ASTNode::Print {
|
||||
expression: Box::new(rewrite(*expression)),
|
||||
span,
|
||||
},
|
||||
ASTNode::New {
|
||||
class,
|
||||
arguments,
|
||||
type_arguments,
|
||||
span,
|
||||
} => ASTNode::New {
|
||||
class,
|
||||
arguments: arguments.into_iter().map(rewrite).collect(),
|
||||
type_arguments,
|
||||
span,
|
||||
},
|
||||
ASTNode::Call {
|
||||
callee,
|
||||
arguments,
|
||||
span,
|
||||
} => ASTNode::Call {
|
||||
callee: Box::new(rewrite(*callee)),
|
||||
arguments: arguments.into_iter().map(rewrite).collect(),
|
||||
span,
|
||||
},
|
||||
ASTNode::Local {
|
||||
variables,
|
||||
initial_values,
|
||||
span,
|
||||
} => ASTNode::Local {
|
||||
variables,
|
||||
initial_values: initial_values
|
||||
.into_iter()
|
||||
.map(|o| o.map(|b| Box::new(rewrite(*b))))
|
||||
.collect(),
|
||||
span,
|
||||
},
|
||||
other => other,
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn make_eq_null(expr: ASTNode) -> ASTNode {
|
||||
ASTNode::BinaryOp { operator: BinaryOperator::Equal, left: Box::new(expr), right: Box::new(ASTNode::Literal { value: LiteralValue::Null, span: Span::unknown() }), span: Span::unknown() }
|
||||
ASTNode::BinaryOp {
|
||||
operator: BinaryOperator::Equal,
|
||||
left: Box::new(expr),
|
||||
right: Box::new(ASTNode::Literal {
|
||||
value: LiteralValue::Null,
|
||||
span: Span::unknown(),
|
||||
}),
|
||||
span: Span::unknown(),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -5,8 +5,13 @@ thread_local! {
|
||||
}
|
||||
|
||||
pub fn is_enabled_env() -> bool {
|
||||
if std::env::var("NYASH_FORCE_SUGAR").ok().as_deref() == Some("1") { return true; }
|
||||
matches!(std::env::var("NYASH_SYNTAX_SUGAR_LEVEL").ok().as_deref(), Some("basic") | Some("full"))
|
||||
if std::env::var("NYASH_FORCE_SUGAR").ok().as_deref() == Some("1") {
|
||||
return true;
|
||||
}
|
||||
matches!(
|
||||
std::env::var("NYASH_SYNTAX_SUGAR_LEVEL").ok().as_deref(),
|
||||
Some("basic") | Some("full")
|
||||
)
|
||||
}
|
||||
|
||||
pub fn is_enabled() -> bool {
|
||||
@ -22,4 +27,3 @@ pub fn with_enabled<T>(f: impl FnOnce() -> T) -> T {
|
||||
r
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user