🧹 Major warning cleanup: 200+ → 102 warnings (50% reduction)
- Fix TokenType enum naming convention (SNAKE_CASE → CamelCase) - Remove 60+ unused imports across multiple modules - Clean up interpreter, backend, and box modules - LLVM build now passes with significantly fewer warnings
This commit is contained in:
@ -1,13 +1,10 @@
|
||||
use super::helpers::{as_float, as_int, map_type};
|
||||
use super::LLVMCompiler;
|
||||
use crate::backend::llvm::context::CodegenContext;
|
||||
use crate::box_trait::{BoolBox, IntegerBox, StringBox};
|
||||
use crate::boxes::{function_box::FunctionBox, math_box::FloatBox, null_box::NullBox};
|
||||
use crate::mir::function::MirModule;
|
||||
use crate::mir::instruction::{BinaryOp, CompareOp, ConstValue, MirInstruction, UnaryOp};
|
||||
use crate::mir::instruction::{ConstValue, MirInstruction, UnaryOp};
|
||||
use crate::mir::ValueId;
|
||||
use inkwell::context::Context;
|
||||
use inkwell::types::BasicType; // for as_basic_type_enum()
|
||||
use inkwell::{
|
||||
types::{BasicTypeEnum, FloatType, IntType, PointerType},
|
||||
values::{BasicValueEnum, FloatValue, FunctionValue, IntValue, PhiValue, PointerValue},
|
||||
|
||||
@ -7,18 +7,14 @@
|
||||
* Typical Callers: runner (VM backend), instruction handlers (vm_instructions)
|
||||
*/
|
||||
|
||||
use crate::mir::{ConstValue, ValueId, BasicBlockId, MirModule, MirFunction, MirInstruction};
|
||||
use crate::box_trait::{NyashBox, StringBox, IntegerBox, BoolBox, VoidBox};
|
||||
use crate::mir::{ValueId, BasicBlockId, MirModule};
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use crate::runtime::NyashRuntime;
|
||||
use crate::scope_tracker::ScopeTracker;
|
||||
// MirModule is already imported via crate::mir at top
|
||||
use crate::instance_v2::InstanceBox;
|
||||
use super::vm_phi::LoopExecutor;
|
||||
use std::time::Instant;
|
||||
use super::frame::ExecutionFrame;
|
||||
use super::control_flow;
|
||||
|
||||
// Phase 9.78a: Import necessary components for unified Box handling
|
||||
// TODO: Re-enable when interpreter refactoring is complete
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
use crate::box_trait::{NyashBox, StringBox, BoolBox, VoidBox, BoxCore, BoxBase};
|
||||
use crate::box_trait::{NyashBox, StringBox, BoolBox, BoxCore, BoxBase};
|
||||
use std::any::Any;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
use crate::box_trait::{NyashBox, StringBox, BoolBox, BoxCore, BoxBase};
|
||||
use crate::ast::ASTNode;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::{Arc, Weak};
|
||||
use std::sync::Weak;
|
||||
use std::any::Any;
|
||||
|
||||
#[derive(Debug)]
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
use crate::box_trait::{NyashBox, StringBox, BoolBox, IntegerBox, VoidBox, BoxCore, BoxBase};
|
||||
use crate::box_trait::{NyashBox, StringBox, BoolBox, BoxCore, BoxBase};
|
||||
use std::any::Any;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
||||
@ -38,7 +38,7 @@
|
||||
use crate::box_trait::{NyashBox, StringBox, BoolBox, BoxCore, BoxBase};
|
||||
use crate::boxes::IntentBox;
|
||||
use crate::method_box::MethodBox;
|
||||
use crate::boxes::result::{ResultBox, NyashResultBox};
|
||||
use crate::boxes::result::ResultBox;
|
||||
use crate::transport::{Transport, InProcessTransport};
|
||||
use std::any::Any;
|
||||
use std::sync::{RwLock, Arc};
|
||||
|
||||
@ -8,13 +8,12 @@
|
||||
use crate::ast::ASTNode;
|
||||
use crate::box_trait::{NyashBox, StringBox, IntegerBox, BoolBox, VoidBox, SharedNyashBox};
|
||||
use crate::instance_v2::InstanceBox;
|
||||
use crate::parser::ParseError;
|
||||
use super::BuiltinStdlib;
|
||||
use crate::runtime::{NyashRuntime, NyashRuntimeBuilder};
|
||||
use crate::box_factory::BoxFactory;
|
||||
use std::sync::{Arc, Mutex, RwLock};
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use super::{ControlFlow, BoxDeclaration, ConstructorContext, StaticBoxDefinition, StaticBoxState};
|
||||
use super::{ControlFlow, ConstructorContext, StaticBoxDefinition, StaticBoxState};
|
||||
use super::{RuntimeError, SharedState};
|
||||
use std::fs::OpenOptions;
|
||||
use std::io::Write;
|
||||
|
||||
@ -4,15 +4,13 @@
|
||||
|
||||
use super::*;
|
||||
use crate::ast::ASTNode;
|
||||
use crate::box_trait::{NyashBox, StringBox, IntegerBox, BoolBox, VoidBox};
|
||||
use crate::boxes::{ArrayBox, FloatBox, MapBox, FutureBox};
|
||||
use crate::boxes::{BufferBox, JSONBox, HttpClientBox, StreamBox, RegexBox, IntentBox, SocketBox};
|
||||
use crate::boxes::{HTTPServerBox, HTTPRequestBox, HTTPResponseBox, MathBox, TimeBox, DateTimeBox};
|
||||
use crate::box_trait::{NyashBox, StringBox, IntegerBox, VoidBox};
|
||||
use crate::boxes::MapBox;
|
||||
use crate::boxes::{IntentBox, SocketBox};
|
||||
use crate::boxes::{HTTPServerBox, HTTPRequestBox, HTTPResponseBox, DateTimeBox};
|
||||
use crate::boxes::{RandomBox, SoundBox, DebugBox};
|
||||
use crate::instance_v2::InstanceBox;
|
||||
use crate::channel_box::ChannelBox;
|
||||
use crate::interpreter::{NyashInterpreter, RuntimeError};
|
||||
use crate::interpreter::finalization;
|
||||
|
||||
// Debug macro gated by NYASH_DEBUG=1
|
||||
macro_rules! idebug {
|
||||
|
||||
@ -7,17 +7,14 @@
|
||||
|
||||
// Import all necessary dependencies
|
||||
use crate::ast::{ASTNode, CatchClause};
|
||||
use crate::box_trait::{NyashBox, StringBox, IntegerBox, BoolBox, VoidBox, ErrorBox, BoxCore};
|
||||
use crate::box_trait::{NyashBox, StringBox, BoolBox, VoidBox, ErrorBox, BoxCore};
|
||||
use crate::boxes::FutureBox;
|
||||
use crate::instance_v2::InstanceBox;
|
||||
use crate::channel_box::ChannelBox;
|
||||
use crate::boxes::math_box::{MathBox, RangeBox};
|
||||
use crate::boxes::time_box::{TimeBox, TimerBox};
|
||||
use crate::boxes::map_box::MapBox;
|
||||
use crate::boxes::math_box::MathBox;
|
||||
use crate::boxes::time_box::TimerBox;
|
||||
use crate::boxes::random_box::RandomBox;
|
||||
use crate::boxes::sound_box::SoundBox;
|
||||
use crate::boxes::debug_box::DebugBox;
|
||||
use crate::method_box::MethodBox;
|
||||
|
||||
// WASM-specific Box types (conditionally included)
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
|
||||
@ -27,7 +27,7 @@ impl NyashParser {
|
||||
fn parse_pipeline(&mut self) -> Result<ASTNode, ParseError> {
|
||||
let mut expr = self.parse_coalesce()?;
|
||||
|
||||
while self.match_token(&TokenType::PIPE_FORWARD) {
|
||||
while self.match_token(&TokenType::PipeForward) {
|
||||
if !is_sugar_enabled() {
|
||||
let line = self.current_token().line;
|
||||
return Err(ParseError::UnexpectedToken {
|
||||
@ -85,7 +85,7 @@ impl NyashParser {
|
||||
/// デフォルト値(??): x ?? y => peek x { null => y, else => x }
|
||||
fn parse_coalesce(&mut self) -> Result<ASTNode, ParseError> {
|
||||
let mut expr = self.parse_or()?;
|
||||
while self.match_token(&TokenType::QMARK_QMARK) {
|
||||
while self.match_token(&TokenType::QmarkQmark) {
|
||||
if !is_sugar_enabled() {
|
||||
let line = self.current_token().line;
|
||||
return Err(ParseError::UnexpectedToken {
|
||||
@ -157,7 +157,7 @@ impl NyashParser {
|
||||
/// ビットOR: |
|
||||
fn parse_bit_or(&mut self) -> Result<ASTNode, ParseError> {
|
||||
let mut expr = self.parse_bit_xor()?;
|
||||
while self.match_token(&TokenType::BIT_OR) {
|
||||
while self.match_token(&TokenType::BitOr) {
|
||||
let operator = BinaryOperator::BitOr;
|
||||
self.advance();
|
||||
let right = self.parse_bit_xor()?;
|
||||
@ -169,7 +169,7 @@ impl NyashParser {
|
||||
/// ビットXOR: ^
|
||||
fn parse_bit_xor(&mut self) -> Result<ASTNode, ParseError> {
|
||||
let mut expr = self.parse_bit_and()?;
|
||||
while self.match_token(&TokenType::BIT_XOR) {
|
||||
while self.match_token(&TokenType::BitXor) {
|
||||
let operator = BinaryOperator::BitXor;
|
||||
self.advance();
|
||||
let right = self.parse_bit_and()?;
|
||||
@ -181,7 +181,7 @@ impl NyashParser {
|
||||
/// ビットAND: &
|
||||
fn parse_bit_and(&mut self) -> Result<ASTNode, ParseError> {
|
||||
let mut expr = self.parse_equality()?;
|
||||
while self.match_token(&TokenType::BIT_AND) {
|
||||
while self.match_token(&TokenType::BitAnd) {
|
||||
let operator = BinaryOperator::BitAnd;
|
||||
self.advance();
|
||||
let right = self.parse_equality()?;
|
||||
@ -297,13 +297,13 @@ impl NyashParser {
|
||||
fn parse_shift(&mut self) -> Result<ASTNode, ParseError> {
|
||||
let mut expr = self.parse_factor()?;
|
||||
loop {
|
||||
if self.match_token(&TokenType::SHIFT_LEFT) {
|
||||
if self.match_token(&TokenType::ShiftLeft) {
|
||||
self.advance();
|
||||
let rhs = self.parse_factor()?;
|
||||
expr = ASTNode::BinaryOp { operator: BinaryOperator::Shl, left: Box::new(expr), right: Box::new(rhs), span: Span::unknown() };
|
||||
continue;
|
||||
}
|
||||
if self.match_token(&TokenType::SHIFT_RIGHT) {
|
||||
if self.match_token(&TokenType::ShiftRight) {
|
||||
self.advance();
|
||||
let rhs = self.parse_factor()?;
|
||||
expr = ASTNode::BinaryOp { operator: BinaryOperator::Shr, left: Box::new(expr), right: Box::new(rhs), span: Span::unknown() };
|
||||
@ -403,7 +403,7 @@ impl NyashParser {
|
||||
let is_else = matches!(self.current_token().token_type, TokenType::ELSE);
|
||||
if is_else {
|
||||
self.advance(); // consume 'else'
|
||||
self.consume(TokenType::FAT_ARROW)?;
|
||||
self.consume(TokenType::FatArrow)?;
|
||||
// else アーム: ブロック or 式
|
||||
let expr = if self.match_token(&TokenType::LBRACE) {
|
||||
// ブロックを式として扱う(最後の文の値が返る)
|
||||
@ -424,7 +424,7 @@ impl NyashParser {
|
||||
} else {
|
||||
// リテラルのみ許可(P0)
|
||||
let lit = self.parse_literal_only()?;
|
||||
self.consume(TokenType::FAT_ARROW)?;
|
||||
self.consume(TokenType::FatArrow)?;
|
||||
// アーム: ブロック or 式
|
||||
let expr = if self.match_token(&TokenType::LBRACE) {
|
||||
self.advance(); // consume '{'
|
||||
@ -532,7 +532,7 @@ impl NyashParser {
|
||||
line,
|
||||
});
|
||||
}
|
||||
} else if self.match_token(&TokenType::QMARK_DOT) {
|
||||
} else if self.match_token(&TokenType::QmarkDot) {
|
||||
if !is_sugar_enabled() {
|
||||
let line = self.current_token().line;
|
||||
return Err(ParseError::UnexpectedToken {
|
||||
@ -736,7 +736,7 @@ impl NyashParser {
|
||||
TokenType::IDENTIFIER(name) => {
|
||||
let parent = name.clone();
|
||||
self.advance();
|
||||
if self.match_token(&TokenType::DOUBLE_COLON) {
|
||||
if self.match_token(&TokenType::DoubleColon) {
|
||||
// Parent::method(args)
|
||||
self.advance(); // consume '::'
|
||||
let method = match &self.current_token().token_type {
|
||||
|
||||
@ -223,10 +223,10 @@ impl NyashParser {
|
||||
Err(ParseError::InvalidStatement { line })
|
||||
}
|
||||
}
|
||||
} else if self.match_token(&TokenType::PLUS_ASSIGN) ||
|
||||
self.match_token(&TokenType::MINUS_ASSIGN) ||
|
||||
self.match_token(&TokenType::MUL_ASSIGN) ||
|
||||
self.match_token(&TokenType::DIV_ASSIGN) {
|
||||
} else if self.match_token(&TokenType::PlusAssign) ||
|
||||
self.match_token(&TokenType::MinusAssign) ||
|
||||
self.match_token(&TokenType::MulAssign) ||
|
||||
self.match_token(&TokenType::DivAssign) {
|
||||
if !is_sugar_enabled() {
|
||||
let line = self.current_token().line;
|
||||
return Err(ParseError::UnexpectedToken {
|
||||
@ -237,10 +237,10 @@ impl NyashParser {
|
||||
}
|
||||
// determine operator
|
||||
let op = match &self.current_token().token_type {
|
||||
TokenType::PLUS_ASSIGN => crate::ast::BinaryOperator::Add,
|
||||
TokenType::MINUS_ASSIGN => crate::ast::BinaryOperator::Subtract,
|
||||
TokenType::MUL_ASSIGN => crate::ast::BinaryOperator::Multiply,
|
||||
TokenType::DIV_ASSIGN => crate::ast::BinaryOperator::Divide,
|
||||
TokenType::PlusAssign => crate::ast::BinaryOperator::Add,
|
||||
TokenType::MinusAssign => crate::ast::BinaryOperator::Subtract,
|
||||
TokenType::MulAssign => crate::ast::BinaryOperator::Multiply,
|
||||
TokenType::DivAssign => crate::ast::BinaryOperator::Divide,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
self.advance(); // consume 'op='
|
||||
|
||||
@ -18,13 +18,13 @@ fn tokenizer_has_basic_sugar_tokens() {
|
||||
let mut t = NyashTokenizer::new("|> ?.? ?? += -= *= /= ..");
|
||||
let toks = t.tokenize().unwrap();
|
||||
let has = |p: fn(&TokenType) -> bool| -> bool { toks.iter().any(|k| p(&k.token_type)) };
|
||||
assert!(has(|k| matches!(k, TokenType::PIPE_FORWARD)));
|
||||
assert!(has(|k| matches!(k, TokenType::QMARK_DOT)));
|
||||
assert!(has(|k| matches!(k, TokenType::QMARK_QMARK)));
|
||||
assert!(has(|k| matches!(k, TokenType::PLUS_ASSIGN)));
|
||||
assert!(has(|k| matches!(k, TokenType::MINUS_ASSIGN)));
|
||||
assert!(has(|k| matches!(k, TokenType::MUL_ASSIGN)));
|
||||
assert!(has(|k| matches!(k, TokenType::DIV_ASSIGN)));
|
||||
assert!(has(|k| matches!(k, TokenType::PipeForward)));
|
||||
assert!(has(|k| matches!(k, TokenType::QmarkDot)));
|
||||
assert!(has(|k| matches!(k, TokenType::QmarkQmark)));
|
||||
assert!(has(|k| matches!(k, TokenType::PlusAssign)));
|
||||
assert!(has(|k| matches!(k, TokenType::MinusAssign)));
|
||||
assert!(has(|k| matches!(k, TokenType::MulAssign)));
|
||||
assert!(has(|k| matches!(k, TokenType::DivAssign)));
|
||||
assert!(has(|k| matches!(k, TokenType::RANGE)));
|
||||
}
|
||||
|
||||
|
||||
@ -59,12 +59,12 @@ pub enum TokenType {
|
||||
IMPORT, // import (Phase 12.7)
|
||||
|
||||
// 演算子 (長いものから先に定義)
|
||||
SHIFT_LEFT, // << (bitwise shift-left)
|
||||
SHIFT_RIGHT, // >> (bitwise shift-right)
|
||||
BIT_AND, // & (bitwise and)
|
||||
BIT_OR, // | (bitwise or)
|
||||
BIT_XOR, // ^ (bitwise xor)
|
||||
FAT_ARROW, // => (peek arms)
|
||||
ShiftLeft, // << (bitwise shift-left)
|
||||
ShiftRight, // >> (bitwise shift-right)
|
||||
BitAnd, // & (bitwise and)
|
||||
BitOr, // | (bitwise or)
|
||||
BitXor, // ^ (bitwise xor)
|
||||
FatArrow, // => (peek arms)
|
||||
EQUALS, // ==
|
||||
NotEquals, // !=
|
||||
LessEquals, // <=
|
||||
@ -72,13 +72,13 @@ pub enum TokenType {
|
||||
AND, // && または and
|
||||
OR, // || または or
|
||||
// Phase 12.7-B 基本糖衣: 2文字演算子(最長一致優先)
|
||||
PIPE_FORWARD, // |>
|
||||
QMARK_DOT, // ?.
|
||||
QMARK_QMARK, // ??
|
||||
PLUS_ASSIGN, // +=
|
||||
MINUS_ASSIGN, // -=
|
||||
MUL_ASSIGN, // *=
|
||||
DIV_ASSIGN, // /=
|
||||
PipeForward, // |>
|
||||
QmarkDot, // ?.
|
||||
QmarkQmark, // ??
|
||||
PlusAssign, // +=
|
||||
MinusAssign, // -=
|
||||
MulAssign, // *=
|
||||
DivAssign, // /=
|
||||
RANGE, // ..
|
||||
LESS, // <
|
||||
GREATER, // >
|
||||
@ -91,7 +91,7 @@ pub enum TokenType {
|
||||
|
||||
// 記号
|
||||
DOT, // .
|
||||
DOUBLE_COLON, // :: (Parent::method) - P1用(定義のみ)
|
||||
DoubleColon, // :: (Parent::method) - P1用(定義のみ)
|
||||
LPAREN, // (
|
||||
RPAREN, // )
|
||||
LBRACE, // {
|
||||
@ -194,37 +194,37 @@ impl NyashTokenizer {
|
||||
Some('|') if self.peek_char() == Some('>') => {
|
||||
self.advance();
|
||||
self.advance();
|
||||
return Ok(Token::new(TokenType::PIPE_FORWARD, start_line, start_column));
|
||||
return Ok(Token::new(TokenType::PipeForward, start_line, start_column));
|
||||
}
|
||||
Some('?') if self.peek_char() == Some('.') => {
|
||||
self.advance();
|
||||
self.advance();
|
||||
return Ok(Token::new(TokenType::QMARK_DOT, start_line, start_column));
|
||||
return Ok(Token::new(TokenType::QmarkDot, start_line, start_column));
|
||||
}
|
||||
Some('?') if self.peek_char() == Some('?') => {
|
||||
self.advance();
|
||||
self.advance();
|
||||
return Ok(Token::new(TokenType::QMARK_QMARK, start_line, start_column));
|
||||
return Ok(Token::new(TokenType::QmarkQmark, start_line, start_column));
|
||||
}
|
||||
Some('+') if self.peek_char() == Some('=') => {
|
||||
self.advance();
|
||||
self.advance();
|
||||
return Ok(Token::new(TokenType::PLUS_ASSIGN, start_line, start_column));
|
||||
return Ok(Token::new(TokenType::PlusAssign, start_line, start_column));
|
||||
}
|
||||
Some('-') if self.peek_char() == Some('=') => {
|
||||
self.advance();
|
||||
self.advance();
|
||||
return Ok(Token::new(TokenType::MINUS_ASSIGN, start_line, start_column));
|
||||
return Ok(Token::new(TokenType::MinusAssign, start_line, start_column));
|
||||
}
|
||||
Some('*') if self.peek_char() == Some('=') => {
|
||||
self.advance();
|
||||
self.advance();
|
||||
return Ok(Token::new(TokenType::MUL_ASSIGN, start_line, start_column));
|
||||
return Ok(Token::new(TokenType::MulAssign, start_line, start_column));
|
||||
}
|
||||
Some('/') if self.peek_char() == Some('=') => {
|
||||
self.advance();
|
||||
self.advance();
|
||||
return Ok(Token::new(TokenType::DIV_ASSIGN, start_line, start_column));
|
||||
return Ok(Token::new(TokenType::DivAssign, start_line, start_column));
|
||||
}
|
||||
Some('.') if self.peek_char() == Some('.') => {
|
||||
self.advance();
|
||||
@ -256,17 +256,17 @@ impl NyashTokenizer {
|
||||
Some('>') if self.peek_char() == Some('>') && !Self::strict_12_7() => {
|
||||
self.advance();
|
||||
self.advance();
|
||||
Ok(Token::new(TokenType::SHIFT_RIGHT, start_line, start_column))
|
||||
Ok(Token::new(TokenType::ShiftRight, start_line, start_column))
|
||||
}
|
||||
Some(':') if self.peek_char() == Some(':') => {
|
||||
self.advance();
|
||||
self.advance();
|
||||
Ok(Token::new(TokenType::DOUBLE_COLON, start_line, start_column))
|
||||
Ok(Token::new(TokenType::DoubleColon, start_line, start_column))
|
||||
}
|
||||
Some('=') if self.peek_char() == Some('>') => {
|
||||
self.advance();
|
||||
self.advance();
|
||||
Ok(Token::new(TokenType::FAT_ARROW, start_line, start_column))
|
||||
Ok(Token::new(TokenType::FatArrow, start_line, start_column))
|
||||
}
|
||||
Some('=') if self.peek_char() == Some('=') => {
|
||||
self.advance();
|
||||
@ -282,7 +282,7 @@ impl NyashTokenizer {
|
||||
Some('<') if self.peek_char() == Some('<') => {
|
||||
self.advance();
|
||||
self.advance();
|
||||
Ok(Token::new(TokenType::SHIFT_LEFT, start_line, start_column))
|
||||
Ok(Token::new(TokenType::ShiftLeft, start_line, start_column))
|
||||
}
|
||||
Some('<') if self.peek_char() == Some('=') => {
|
||||
self.advance();
|
||||
@ -307,7 +307,7 @@ impl NyashTokenizer {
|
||||
Some('|') if self.peek_char() == Some('>') => {
|
||||
self.advance();
|
||||
self.advance();
|
||||
return Ok(Token::new(TokenType::PIPE_FORWARD, start_line, start_column));
|
||||
return Ok(Token::new(TokenType::PipeForward, start_line, start_column));
|
||||
}
|
||||
Some('<') => {
|
||||
self.advance();
|
||||
@ -319,15 +319,15 @@ impl NyashTokenizer {
|
||||
}
|
||||
Some('&') => {
|
||||
self.advance();
|
||||
Ok(Token::new(TokenType::BIT_AND, start_line, start_column))
|
||||
Ok(Token::new(TokenType::BitAnd, start_line, start_column))
|
||||
}
|
||||
Some('|') => {
|
||||
self.advance();
|
||||
Ok(Token::new(TokenType::BIT_OR, start_line, start_column))
|
||||
Ok(Token::new(TokenType::BitOr, start_line, start_column))
|
||||
}
|
||||
Some('^') => {
|
||||
self.advance();
|
||||
Ok(Token::new(TokenType::BIT_XOR, start_line, start_column))
|
||||
Ok(Token::new(TokenType::BitXor, start_line, start_column))
|
||||
}
|
||||
Some('=') => {
|
||||
self.advance();
|
||||
@ -698,7 +698,7 @@ mod tests {
|
||||
let mut tokenizer = NyashTokenizer::new(">> == != <= >= < >");
|
||||
let tokens = tokenizer.tokenize().unwrap();
|
||||
|
||||
assert_eq!(tokens[0].token_type, TokenType::SHIFT_RIGHT);
|
||||
assert_eq!(tokens[0].token_type, TokenType::ShiftRight);
|
||||
assert_eq!(tokens[1].token_type, TokenType::EQUALS);
|
||||
assert_eq!(tokens[2].token_type, TokenType::NotEquals);
|
||||
assert_eq!(tokens[3].token_type, TokenType::LessEquals);
|
||||
@ -787,13 +787,13 @@ value"#;
|
||||
// 分かりやすく固めたケース
|
||||
let mut t2 = NyashTokenizer::new("|> ?.? ?? += -= *= /= ..");
|
||||
let toks = t2.tokenize().unwrap();
|
||||
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::PIPE_FORWARD)));
|
||||
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::QMARK_DOT)));
|
||||
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::QMARK_QMARK)));
|
||||
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::PLUS_ASSIGN)));
|
||||
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::MINUS_ASSIGN)));
|
||||
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::MUL_ASSIGN)));
|
||||
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::DIV_ASSIGN)));
|
||||
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::PipeForward)));
|
||||
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::QmarkDot)));
|
||||
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::QmarkQmark)));
|
||||
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::PlusAssign)));
|
||||
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::MinusAssign)));
|
||||
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::MulAssign)));
|
||||
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::DivAssign)));
|
||||
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::RANGE)));
|
||||
}
|
||||
|
||||
@ -803,9 +803,9 @@ value"#;
|
||||
let mut t = NyashTokenizer::new("?? ? ?. .. .");
|
||||
let toks = t.tokenize().unwrap();
|
||||
let kinds: Vec<&TokenType> = toks.iter().map(|k| &k.token_type).collect();
|
||||
assert!(matches!(kinds[0], TokenType::QMARK_QMARK));
|
||||
assert!(matches!(kinds[0], TokenType::QmarkQmark));
|
||||
assert!(matches!(kinds[1], TokenType::QUESTION));
|
||||
assert!(matches!(kinds[2], TokenType::QMARK_DOT));
|
||||
assert!(matches!(kinds[2], TokenType::QmarkDot));
|
||||
assert!(matches!(kinds[3], TokenType::RANGE));
|
||||
assert!(matches!(kinds[4], TokenType::DOT));
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user