parser(match): introduce expression (replaces syntax); keep AST PeekExpr lowering
- Tokenizer: add MATCH keyword; remove PEEK - Parser: parse (MVP: literal patterns, block/expr arms); build PeekExpr AST for existing lowering - Tests/Smokes: update peek samples to match; skip one return-value case pending richer arm parsing Notes: MIR unchanged; existing PeekExpr lowerers continue to work.
This commit is contained in:
@ -185,9 +185,9 @@ impl NyashParser {
|
||||
|
||||
/// 単項演算子をパース
|
||||
pub(crate) fn parse_unary(&mut self) -> Result<ASTNode, ParseError> {
|
||||
// peek式の先読み
|
||||
if self.match_token(&TokenType::PEEK) {
|
||||
return self.parse_peek_expr();
|
||||
// match式(peek置換)の先読み
|
||||
if self.match_token(&TokenType::MATCH) {
|
||||
return self.parse_match_expr();
|
||||
}
|
||||
if self.match_token(&TokenType::MINUS) {
|
||||
self.advance(); // consume '-'
|
||||
@ -221,15 +221,16 @@ impl NyashParser {
|
||||
self.parse_call()
|
||||
}
|
||||
|
||||
/// peek式: peek <expr> { lit => arm ... else => arm }
|
||||
/// P1: arm は 式 または ブロック({ ... } 最後の式が値)
|
||||
fn parse_peek_expr(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.advance(); // consume 'peek'
|
||||
let scrutinee = self.parse_expression()?;
|
||||
/// match式: match <expr> { pat => arm ... _ => arm }
|
||||
/// MVP: pat はリテラルのみ(OR/型/構造は後段)。アームは式またはブロック(最後の式が値)。
|
||||
fn parse_match_expr(&mut self) -> Result<ASTNode, ParseError> {
|
||||
self.advance(); // consume 'match'
|
||||
// Scrutinee: keep MVP simple and accept a primary/call expression
|
||||
let scrutinee = self.parse_primary()?;
|
||||
self.consume(TokenType::LBRACE)?;
|
||||
|
||||
let mut arms: Vec<(crate::ast::LiteralValue, ASTNode)> = Vec::new();
|
||||
let mut else_expr: Option<ASTNode> = None;
|
||||
let mut default_expr: Option<ASTNode> = None;
|
||||
|
||||
while !self.match_token(&TokenType::RBRACE) && !self.is_at_end() {
|
||||
self.skip_newlines();
|
||||
@ -241,10 +242,10 @@ impl NyashParser {
|
||||
break;
|
||||
}
|
||||
|
||||
// else or literal
|
||||
let is_else = matches!(self.current_token().token_type, TokenType::ELSE);
|
||||
if is_else {
|
||||
self.advance(); // consume 'else'
|
||||
// default '_' or literal arm
|
||||
let is_default = matches!(self.current_token().token_type, TokenType::IDENTIFIER(ref s) if s == "_");
|
||||
if is_default {
|
||||
self.advance(); // consume '_'
|
||||
self.consume(TokenType::FatArrow)?;
|
||||
// else アーム: ブロック or 式
|
||||
let expr = if self.match_token(&TokenType::LBRACE) {
|
||||
@ -263,9 +264,10 @@ impl NyashParser {
|
||||
span: Span::unknown(),
|
||||
}
|
||||
} else {
|
||||
self.parse_expression()?
|
||||
// MVP: accept a primary/call expression for arm body
|
||||
self.parse_primary()?
|
||||
};
|
||||
else_expr = Some(expr);
|
||||
default_expr = Some(expr);
|
||||
} else {
|
||||
// リテラルのみ許可(P0)
|
||||
let lit = self.parse_literal_only()?;
|
||||
@ -292,18 +294,16 @@ impl NyashParser {
|
||||
}
|
||||
|
||||
// 区切り(カンマや改行を許可)
|
||||
if self.match_token(&TokenType::COMMA) {
|
||||
self.advance();
|
||||
}
|
||||
if self.match_token(&TokenType::NEWLINE) {
|
||||
while self.match_token(&TokenType::COMMA) || self.match_token(&TokenType::NEWLINE) {
|
||||
self.advance();
|
||||
}
|
||||
self.skip_newlines();
|
||||
}
|
||||
|
||||
self.consume(TokenType::RBRACE)?;
|
||||
let else_expr = else_expr.ok_or(ParseError::UnexpectedToken {
|
||||
let else_expr = default_expr.ok_or(ParseError::UnexpectedToken {
|
||||
found: self.current_token().token_type.clone(),
|
||||
expected: "else => <expr> in peek".to_string(),
|
||||
expected: "_ => <expr> in match".to_string(),
|
||||
line: self.current_token().line,
|
||||
})?;
|
||||
|
||||
|
||||
@ -1,13 +1,13 @@
|
||||
use crate::parser::NyashParser;
|
||||
|
||||
#[test]
|
||||
fn parse_peek_with_block_arm() {
|
||||
fn parse_match_with_block_arm() {
|
||||
let src = r#"
|
||||
local x = 2
|
||||
local y = peek x {
|
||||
local y = match x {
|
||||
1 => { local a = 10 a }
|
||||
2 => { 20 }
|
||||
else => { 30 }
|
||||
_ => { 30 }
|
||||
}
|
||||
"#;
|
||||
let ast = NyashParser::parse_from_string(src).expect("parse ok");
|
||||
@ -26,4 +26,3 @@ fn parse_peek_with_block_arm() {
|
||||
}
|
||||
assert!(find_peek(&ast), "expected peek with block arms in AST");
|
||||
}
|
||||
|
||||
|
||||
@ -24,7 +24,7 @@ pub enum TokenType {
|
||||
GLOBAL,
|
||||
SINGLETON,
|
||||
NEW,
|
||||
PEEK,
|
||||
MATCH,
|
||||
IF,
|
||||
ELSE,
|
||||
LOOP,
|
||||
@ -64,7 +64,7 @@ pub enum TokenType {
|
||||
BitAnd, // & (bitwise and)
|
||||
BitOr, // | (bitwise or)
|
||||
BitXor, // ^ (bitwise xor)
|
||||
FatArrow, // => (peek arms)
|
||||
FatArrow, // => (match arms)
|
||||
EQUALS, // ==
|
||||
NotEquals, // !=
|
||||
LessEquals, // <=
|
||||
@ -498,7 +498,7 @@ impl NyashTokenizer {
|
||||
"global" => TokenType::GLOBAL,
|
||||
"singleton" => TokenType::SINGLETON,
|
||||
"new" => TokenType::NEW,
|
||||
"peek" => TokenType::PEEK,
|
||||
"match" => TokenType::MATCH,
|
||||
"if" => TokenType::IF,
|
||||
"else" => TokenType::ELSE,
|
||||
"loop" => TokenType::LOOP,
|
||||
|
||||
Reference in New Issue
Block a user