🧹 Major warning cleanup: 200+ → 102 warnings (50% reduction)
- Fix TokenType enum naming convention (SNAKE_CASE → CamelCase) - Remove 60+ unused imports across multiple modules - Clean up interpreter, backend, and box modules - LLVM build now passes with significantly fewer warnings
This commit is contained in:
@ -1,13 +1,10 @@
|
|||||||
use super::helpers::{as_float, as_int, map_type};
|
use super::helpers::{as_float, as_int, map_type};
|
||||||
use super::LLVMCompiler;
|
use super::LLVMCompiler;
|
||||||
use crate::backend::llvm::context::CodegenContext;
|
use crate::backend::llvm::context::CodegenContext;
|
||||||
use crate::box_trait::{BoolBox, IntegerBox, StringBox};
|
|
||||||
use crate::boxes::{function_box::FunctionBox, math_box::FloatBox, null_box::NullBox};
|
|
||||||
use crate::mir::function::MirModule;
|
use crate::mir::function::MirModule;
|
||||||
use crate::mir::instruction::{BinaryOp, CompareOp, ConstValue, MirInstruction, UnaryOp};
|
use crate::mir::instruction::{ConstValue, MirInstruction, UnaryOp};
|
||||||
use crate::mir::ValueId;
|
use crate::mir::ValueId;
|
||||||
use inkwell::context::Context;
|
use inkwell::context::Context;
|
||||||
use inkwell::types::BasicType; // for as_basic_type_enum()
|
|
||||||
use inkwell::{
|
use inkwell::{
|
||||||
types::{BasicTypeEnum, FloatType, IntType, PointerType},
|
types::{BasicTypeEnum, FloatType, IntType, PointerType},
|
||||||
values::{BasicValueEnum, FloatValue, FunctionValue, IntValue, PhiValue, PointerValue},
|
values::{BasicValueEnum, FloatValue, FunctionValue, IntValue, PhiValue, PointerValue},
|
||||||
|
|||||||
@ -7,18 +7,14 @@
|
|||||||
* Typical Callers: runner (VM backend), instruction handlers (vm_instructions)
|
* Typical Callers: runner (VM backend), instruction handlers (vm_instructions)
|
||||||
*/
|
*/
|
||||||
|
|
||||||
use crate::mir::{ConstValue, ValueId, BasicBlockId, MirModule, MirFunction, MirInstruction};
|
use crate::mir::{ValueId, BasicBlockId, MirModule};
|
||||||
use crate::box_trait::{NyashBox, StringBox, IntegerBox, BoolBox, VoidBox};
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::sync::Arc;
|
|
||||||
use crate::runtime::NyashRuntime;
|
use crate::runtime::NyashRuntime;
|
||||||
use crate::scope_tracker::ScopeTracker;
|
use crate::scope_tracker::ScopeTracker;
|
||||||
// MirModule is already imported via crate::mir at top
|
// MirModule is already imported via crate::mir at top
|
||||||
use crate::instance_v2::InstanceBox;
|
|
||||||
use super::vm_phi::LoopExecutor;
|
use super::vm_phi::LoopExecutor;
|
||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
use super::frame::ExecutionFrame;
|
use super::frame::ExecutionFrame;
|
||||||
use super::control_flow;
|
|
||||||
|
|
||||||
// Phase 9.78a: Import necessary components for unified Box handling
|
// Phase 9.78a: Import necessary components for unified Box handling
|
||||||
// TODO: Re-enable when interpreter refactoring is complete
|
// TODO: Re-enable when interpreter refactoring is complete
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
use crate::box_trait::{NyashBox, StringBox, BoolBox, VoidBox, BoxCore, BoxBase};
|
use crate::box_trait::{NyashBox, StringBox, BoolBox, BoxCore, BoxBase};
|
||||||
use std::any::Any;
|
use std::any::Any;
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
use crate::box_trait::{NyashBox, StringBox, BoolBox, BoxCore, BoxBase};
|
use crate::box_trait::{NyashBox, StringBox, BoolBox, BoxCore, BoxBase};
|
||||||
use crate::ast::ASTNode;
|
use crate::ast::ASTNode;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::sync::{Arc, Weak};
|
use std::sync::Weak;
|
||||||
use std::any::Any;
|
use std::any::Any;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
use crate::box_trait::{NyashBox, StringBox, BoolBox, IntegerBox, VoidBox, BoxCore, BoxBase};
|
use crate::box_trait::{NyashBox, StringBox, BoolBox, BoxCore, BoxBase};
|
||||||
use std::any::Any;
|
use std::any::Any;
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
|
|||||||
@ -38,7 +38,7 @@
|
|||||||
use crate::box_trait::{NyashBox, StringBox, BoolBox, BoxCore, BoxBase};
|
use crate::box_trait::{NyashBox, StringBox, BoolBox, BoxCore, BoxBase};
|
||||||
use crate::boxes::IntentBox;
|
use crate::boxes::IntentBox;
|
||||||
use crate::method_box::MethodBox;
|
use crate::method_box::MethodBox;
|
||||||
use crate::boxes::result::{ResultBox, NyashResultBox};
|
use crate::boxes::result::ResultBox;
|
||||||
use crate::transport::{Transport, InProcessTransport};
|
use crate::transport::{Transport, InProcessTransport};
|
||||||
use std::any::Any;
|
use std::any::Any;
|
||||||
use std::sync::{RwLock, Arc};
|
use std::sync::{RwLock, Arc};
|
||||||
|
|||||||
@ -8,13 +8,12 @@
|
|||||||
use crate::ast::ASTNode;
|
use crate::ast::ASTNode;
|
||||||
use crate::box_trait::{NyashBox, StringBox, IntegerBox, BoolBox, VoidBox, SharedNyashBox};
|
use crate::box_trait::{NyashBox, StringBox, IntegerBox, BoolBox, VoidBox, SharedNyashBox};
|
||||||
use crate::instance_v2::InstanceBox;
|
use crate::instance_v2::InstanceBox;
|
||||||
use crate::parser::ParseError;
|
|
||||||
use super::BuiltinStdlib;
|
use super::BuiltinStdlib;
|
||||||
use crate::runtime::{NyashRuntime, NyashRuntimeBuilder};
|
use crate::runtime::{NyashRuntime, NyashRuntimeBuilder};
|
||||||
use crate::box_factory::BoxFactory;
|
use crate::box_factory::BoxFactory;
|
||||||
use std::sync::{Arc, Mutex, RwLock};
|
use std::sync::{Arc, Mutex};
|
||||||
use std::collections::{HashMap, HashSet};
|
use std::collections::{HashMap, HashSet};
|
||||||
use super::{ControlFlow, BoxDeclaration, ConstructorContext, StaticBoxDefinition, StaticBoxState};
|
use super::{ControlFlow, ConstructorContext, StaticBoxDefinition, StaticBoxState};
|
||||||
use super::{RuntimeError, SharedState};
|
use super::{RuntimeError, SharedState};
|
||||||
use std::fs::OpenOptions;
|
use std::fs::OpenOptions;
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
|
|||||||
@ -4,15 +4,13 @@
|
|||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::ast::ASTNode;
|
use crate::ast::ASTNode;
|
||||||
use crate::box_trait::{NyashBox, StringBox, IntegerBox, BoolBox, VoidBox};
|
use crate::box_trait::{NyashBox, StringBox, IntegerBox, VoidBox};
|
||||||
use crate::boxes::{ArrayBox, FloatBox, MapBox, FutureBox};
|
use crate::boxes::MapBox;
|
||||||
use crate::boxes::{BufferBox, JSONBox, HttpClientBox, StreamBox, RegexBox, IntentBox, SocketBox};
|
use crate::boxes::{IntentBox, SocketBox};
|
||||||
use crate::boxes::{HTTPServerBox, HTTPRequestBox, HTTPResponseBox, MathBox, TimeBox, DateTimeBox};
|
use crate::boxes::{HTTPServerBox, HTTPRequestBox, HTTPResponseBox, DateTimeBox};
|
||||||
use crate::boxes::{RandomBox, SoundBox, DebugBox};
|
use crate::boxes::{RandomBox, SoundBox, DebugBox};
|
||||||
use crate::instance_v2::InstanceBox;
|
use crate::instance_v2::InstanceBox;
|
||||||
use crate::channel_box::ChannelBox;
|
|
||||||
use crate::interpreter::{NyashInterpreter, RuntimeError};
|
use crate::interpreter::{NyashInterpreter, RuntimeError};
|
||||||
use crate::interpreter::finalization;
|
|
||||||
|
|
||||||
// Debug macro gated by NYASH_DEBUG=1
|
// Debug macro gated by NYASH_DEBUG=1
|
||||||
macro_rules! idebug {
|
macro_rules! idebug {
|
||||||
|
|||||||
@ -7,17 +7,14 @@
|
|||||||
|
|
||||||
// Import all necessary dependencies
|
// Import all necessary dependencies
|
||||||
use crate::ast::{ASTNode, CatchClause};
|
use crate::ast::{ASTNode, CatchClause};
|
||||||
use crate::box_trait::{NyashBox, StringBox, IntegerBox, BoolBox, VoidBox, ErrorBox, BoxCore};
|
use crate::box_trait::{NyashBox, StringBox, BoolBox, VoidBox, ErrorBox, BoxCore};
|
||||||
use crate::boxes::FutureBox;
|
use crate::boxes::FutureBox;
|
||||||
use crate::instance_v2::InstanceBox;
|
use crate::instance_v2::InstanceBox;
|
||||||
use crate::channel_box::ChannelBox;
|
use crate::channel_box::ChannelBox;
|
||||||
use crate::boxes::math_box::{MathBox, RangeBox};
|
use crate::boxes::math_box::MathBox;
|
||||||
use crate::boxes::time_box::{TimeBox, TimerBox};
|
use crate::boxes::time_box::TimerBox;
|
||||||
use crate::boxes::map_box::MapBox;
|
|
||||||
use crate::boxes::random_box::RandomBox;
|
use crate::boxes::random_box::RandomBox;
|
||||||
use crate::boxes::sound_box::SoundBox;
|
|
||||||
use crate::boxes::debug_box::DebugBox;
|
use crate::boxes::debug_box::DebugBox;
|
||||||
use crate::method_box::MethodBox;
|
|
||||||
|
|
||||||
// WASM-specific Box types (conditionally included)
|
// WASM-specific Box types (conditionally included)
|
||||||
#[cfg(target_arch = "wasm32")]
|
#[cfg(target_arch = "wasm32")]
|
||||||
|
|||||||
@ -27,7 +27,7 @@ impl NyashParser {
|
|||||||
fn parse_pipeline(&mut self) -> Result<ASTNode, ParseError> {
|
fn parse_pipeline(&mut self) -> Result<ASTNode, ParseError> {
|
||||||
let mut expr = self.parse_coalesce()?;
|
let mut expr = self.parse_coalesce()?;
|
||||||
|
|
||||||
while self.match_token(&TokenType::PIPE_FORWARD) {
|
while self.match_token(&TokenType::PipeForward) {
|
||||||
if !is_sugar_enabled() {
|
if !is_sugar_enabled() {
|
||||||
let line = self.current_token().line;
|
let line = self.current_token().line;
|
||||||
return Err(ParseError::UnexpectedToken {
|
return Err(ParseError::UnexpectedToken {
|
||||||
@ -85,7 +85,7 @@ impl NyashParser {
|
|||||||
/// デフォルト値(??): x ?? y => peek x { null => y, else => x }
|
/// デフォルト値(??): x ?? y => peek x { null => y, else => x }
|
||||||
fn parse_coalesce(&mut self) -> Result<ASTNode, ParseError> {
|
fn parse_coalesce(&mut self) -> Result<ASTNode, ParseError> {
|
||||||
let mut expr = self.parse_or()?;
|
let mut expr = self.parse_or()?;
|
||||||
while self.match_token(&TokenType::QMARK_QMARK) {
|
while self.match_token(&TokenType::QmarkQmark) {
|
||||||
if !is_sugar_enabled() {
|
if !is_sugar_enabled() {
|
||||||
let line = self.current_token().line;
|
let line = self.current_token().line;
|
||||||
return Err(ParseError::UnexpectedToken {
|
return Err(ParseError::UnexpectedToken {
|
||||||
@ -157,7 +157,7 @@ impl NyashParser {
|
|||||||
/// ビットOR: |
|
/// ビットOR: |
|
||||||
fn parse_bit_or(&mut self) -> Result<ASTNode, ParseError> {
|
fn parse_bit_or(&mut self) -> Result<ASTNode, ParseError> {
|
||||||
let mut expr = self.parse_bit_xor()?;
|
let mut expr = self.parse_bit_xor()?;
|
||||||
while self.match_token(&TokenType::BIT_OR) {
|
while self.match_token(&TokenType::BitOr) {
|
||||||
let operator = BinaryOperator::BitOr;
|
let operator = BinaryOperator::BitOr;
|
||||||
self.advance();
|
self.advance();
|
||||||
let right = self.parse_bit_xor()?;
|
let right = self.parse_bit_xor()?;
|
||||||
@ -169,7 +169,7 @@ impl NyashParser {
|
|||||||
/// ビットXOR: ^
|
/// ビットXOR: ^
|
||||||
fn parse_bit_xor(&mut self) -> Result<ASTNode, ParseError> {
|
fn parse_bit_xor(&mut self) -> Result<ASTNode, ParseError> {
|
||||||
let mut expr = self.parse_bit_and()?;
|
let mut expr = self.parse_bit_and()?;
|
||||||
while self.match_token(&TokenType::BIT_XOR) {
|
while self.match_token(&TokenType::BitXor) {
|
||||||
let operator = BinaryOperator::BitXor;
|
let operator = BinaryOperator::BitXor;
|
||||||
self.advance();
|
self.advance();
|
||||||
let right = self.parse_bit_and()?;
|
let right = self.parse_bit_and()?;
|
||||||
@ -181,7 +181,7 @@ impl NyashParser {
|
|||||||
/// ビットAND: &
|
/// ビットAND: &
|
||||||
fn parse_bit_and(&mut self) -> Result<ASTNode, ParseError> {
|
fn parse_bit_and(&mut self) -> Result<ASTNode, ParseError> {
|
||||||
let mut expr = self.parse_equality()?;
|
let mut expr = self.parse_equality()?;
|
||||||
while self.match_token(&TokenType::BIT_AND) {
|
while self.match_token(&TokenType::BitAnd) {
|
||||||
let operator = BinaryOperator::BitAnd;
|
let operator = BinaryOperator::BitAnd;
|
||||||
self.advance();
|
self.advance();
|
||||||
let right = self.parse_equality()?;
|
let right = self.parse_equality()?;
|
||||||
@ -297,13 +297,13 @@ impl NyashParser {
|
|||||||
fn parse_shift(&mut self) -> Result<ASTNode, ParseError> {
|
fn parse_shift(&mut self) -> Result<ASTNode, ParseError> {
|
||||||
let mut expr = self.parse_factor()?;
|
let mut expr = self.parse_factor()?;
|
||||||
loop {
|
loop {
|
||||||
if self.match_token(&TokenType::SHIFT_LEFT) {
|
if self.match_token(&TokenType::ShiftLeft) {
|
||||||
self.advance();
|
self.advance();
|
||||||
let rhs = self.parse_factor()?;
|
let rhs = self.parse_factor()?;
|
||||||
expr = ASTNode::BinaryOp { operator: BinaryOperator::Shl, left: Box::new(expr), right: Box::new(rhs), span: Span::unknown() };
|
expr = ASTNode::BinaryOp { operator: BinaryOperator::Shl, left: Box::new(expr), right: Box::new(rhs), span: Span::unknown() };
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if self.match_token(&TokenType::SHIFT_RIGHT) {
|
if self.match_token(&TokenType::ShiftRight) {
|
||||||
self.advance();
|
self.advance();
|
||||||
let rhs = self.parse_factor()?;
|
let rhs = self.parse_factor()?;
|
||||||
expr = ASTNode::BinaryOp { operator: BinaryOperator::Shr, left: Box::new(expr), right: Box::new(rhs), span: Span::unknown() };
|
expr = ASTNode::BinaryOp { operator: BinaryOperator::Shr, left: Box::new(expr), right: Box::new(rhs), span: Span::unknown() };
|
||||||
@ -403,7 +403,7 @@ impl NyashParser {
|
|||||||
let is_else = matches!(self.current_token().token_type, TokenType::ELSE);
|
let is_else = matches!(self.current_token().token_type, TokenType::ELSE);
|
||||||
if is_else {
|
if is_else {
|
||||||
self.advance(); // consume 'else'
|
self.advance(); // consume 'else'
|
||||||
self.consume(TokenType::FAT_ARROW)?;
|
self.consume(TokenType::FatArrow)?;
|
||||||
// else アーム: ブロック or 式
|
// else アーム: ブロック or 式
|
||||||
let expr = if self.match_token(&TokenType::LBRACE) {
|
let expr = if self.match_token(&TokenType::LBRACE) {
|
||||||
// ブロックを式として扱う(最後の文の値が返る)
|
// ブロックを式として扱う(最後の文の値が返る)
|
||||||
@ -424,7 +424,7 @@ impl NyashParser {
|
|||||||
} else {
|
} else {
|
||||||
// リテラルのみ許可(P0)
|
// リテラルのみ許可(P0)
|
||||||
let lit = self.parse_literal_only()?;
|
let lit = self.parse_literal_only()?;
|
||||||
self.consume(TokenType::FAT_ARROW)?;
|
self.consume(TokenType::FatArrow)?;
|
||||||
// アーム: ブロック or 式
|
// アーム: ブロック or 式
|
||||||
let expr = if self.match_token(&TokenType::LBRACE) {
|
let expr = if self.match_token(&TokenType::LBRACE) {
|
||||||
self.advance(); // consume '{'
|
self.advance(); // consume '{'
|
||||||
@ -532,7 +532,7 @@ impl NyashParser {
|
|||||||
line,
|
line,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} else if self.match_token(&TokenType::QMARK_DOT) {
|
} else if self.match_token(&TokenType::QmarkDot) {
|
||||||
if !is_sugar_enabled() {
|
if !is_sugar_enabled() {
|
||||||
let line = self.current_token().line;
|
let line = self.current_token().line;
|
||||||
return Err(ParseError::UnexpectedToken {
|
return Err(ParseError::UnexpectedToken {
|
||||||
@ -736,7 +736,7 @@ impl NyashParser {
|
|||||||
TokenType::IDENTIFIER(name) => {
|
TokenType::IDENTIFIER(name) => {
|
||||||
let parent = name.clone();
|
let parent = name.clone();
|
||||||
self.advance();
|
self.advance();
|
||||||
if self.match_token(&TokenType::DOUBLE_COLON) {
|
if self.match_token(&TokenType::DoubleColon) {
|
||||||
// Parent::method(args)
|
// Parent::method(args)
|
||||||
self.advance(); // consume '::'
|
self.advance(); // consume '::'
|
||||||
let method = match &self.current_token().token_type {
|
let method = match &self.current_token().token_type {
|
||||||
|
|||||||
@ -223,10 +223,10 @@ impl NyashParser {
|
|||||||
Err(ParseError::InvalidStatement { line })
|
Err(ParseError::InvalidStatement { line })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if self.match_token(&TokenType::PLUS_ASSIGN) ||
|
} else if self.match_token(&TokenType::PlusAssign) ||
|
||||||
self.match_token(&TokenType::MINUS_ASSIGN) ||
|
self.match_token(&TokenType::MinusAssign) ||
|
||||||
self.match_token(&TokenType::MUL_ASSIGN) ||
|
self.match_token(&TokenType::MulAssign) ||
|
||||||
self.match_token(&TokenType::DIV_ASSIGN) {
|
self.match_token(&TokenType::DivAssign) {
|
||||||
if !is_sugar_enabled() {
|
if !is_sugar_enabled() {
|
||||||
let line = self.current_token().line;
|
let line = self.current_token().line;
|
||||||
return Err(ParseError::UnexpectedToken {
|
return Err(ParseError::UnexpectedToken {
|
||||||
@ -237,10 +237,10 @@ impl NyashParser {
|
|||||||
}
|
}
|
||||||
// determine operator
|
// determine operator
|
||||||
let op = match &self.current_token().token_type {
|
let op = match &self.current_token().token_type {
|
||||||
TokenType::PLUS_ASSIGN => crate::ast::BinaryOperator::Add,
|
TokenType::PlusAssign => crate::ast::BinaryOperator::Add,
|
||||||
TokenType::MINUS_ASSIGN => crate::ast::BinaryOperator::Subtract,
|
TokenType::MinusAssign => crate::ast::BinaryOperator::Subtract,
|
||||||
TokenType::MUL_ASSIGN => crate::ast::BinaryOperator::Multiply,
|
TokenType::MulAssign => crate::ast::BinaryOperator::Multiply,
|
||||||
TokenType::DIV_ASSIGN => crate::ast::BinaryOperator::Divide,
|
TokenType::DivAssign => crate::ast::BinaryOperator::Divide,
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
};
|
};
|
||||||
self.advance(); // consume 'op='
|
self.advance(); // consume 'op='
|
||||||
|
|||||||
@ -18,13 +18,13 @@ fn tokenizer_has_basic_sugar_tokens() {
|
|||||||
let mut t = NyashTokenizer::new("|> ?.? ?? += -= *= /= ..");
|
let mut t = NyashTokenizer::new("|> ?.? ?? += -= *= /= ..");
|
||||||
let toks = t.tokenize().unwrap();
|
let toks = t.tokenize().unwrap();
|
||||||
let has = |p: fn(&TokenType) -> bool| -> bool { toks.iter().any(|k| p(&k.token_type)) };
|
let has = |p: fn(&TokenType) -> bool| -> bool { toks.iter().any(|k| p(&k.token_type)) };
|
||||||
assert!(has(|k| matches!(k, TokenType::PIPE_FORWARD)));
|
assert!(has(|k| matches!(k, TokenType::PipeForward)));
|
||||||
assert!(has(|k| matches!(k, TokenType::QMARK_DOT)));
|
assert!(has(|k| matches!(k, TokenType::QmarkDot)));
|
||||||
assert!(has(|k| matches!(k, TokenType::QMARK_QMARK)));
|
assert!(has(|k| matches!(k, TokenType::QmarkQmark)));
|
||||||
assert!(has(|k| matches!(k, TokenType::PLUS_ASSIGN)));
|
assert!(has(|k| matches!(k, TokenType::PlusAssign)));
|
||||||
assert!(has(|k| matches!(k, TokenType::MINUS_ASSIGN)));
|
assert!(has(|k| matches!(k, TokenType::MinusAssign)));
|
||||||
assert!(has(|k| matches!(k, TokenType::MUL_ASSIGN)));
|
assert!(has(|k| matches!(k, TokenType::MulAssign)));
|
||||||
assert!(has(|k| matches!(k, TokenType::DIV_ASSIGN)));
|
assert!(has(|k| matches!(k, TokenType::DivAssign)));
|
||||||
assert!(has(|k| matches!(k, TokenType::RANGE)));
|
assert!(has(|k| matches!(k, TokenType::RANGE)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -59,12 +59,12 @@ pub enum TokenType {
|
|||||||
IMPORT, // import (Phase 12.7)
|
IMPORT, // import (Phase 12.7)
|
||||||
|
|
||||||
// 演算子 (長いものから先に定義)
|
// 演算子 (長いものから先に定義)
|
||||||
SHIFT_LEFT, // << (bitwise shift-left)
|
ShiftLeft, // << (bitwise shift-left)
|
||||||
SHIFT_RIGHT, // >> (bitwise shift-right)
|
ShiftRight, // >> (bitwise shift-right)
|
||||||
BIT_AND, // & (bitwise and)
|
BitAnd, // & (bitwise and)
|
||||||
BIT_OR, // | (bitwise or)
|
BitOr, // | (bitwise or)
|
||||||
BIT_XOR, // ^ (bitwise xor)
|
BitXor, // ^ (bitwise xor)
|
||||||
FAT_ARROW, // => (peek arms)
|
FatArrow, // => (peek arms)
|
||||||
EQUALS, // ==
|
EQUALS, // ==
|
||||||
NotEquals, // !=
|
NotEquals, // !=
|
||||||
LessEquals, // <=
|
LessEquals, // <=
|
||||||
@ -72,13 +72,13 @@ pub enum TokenType {
|
|||||||
AND, // && または and
|
AND, // && または and
|
||||||
OR, // || または or
|
OR, // || または or
|
||||||
// Phase 12.7-B 基本糖衣: 2文字演算子(最長一致優先)
|
// Phase 12.7-B 基本糖衣: 2文字演算子(最長一致優先)
|
||||||
PIPE_FORWARD, // |>
|
PipeForward, // |>
|
||||||
QMARK_DOT, // ?.
|
QmarkDot, // ?.
|
||||||
QMARK_QMARK, // ??
|
QmarkQmark, // ??
|
||||||
PLUS_ASSIGN, // +=
|
PlusAssign, // +=
|
||||||
MINUS_ASSIGN, // -=
|
MinusAssign, // -=
|
||||||
MUL_ASSIGN, // *=
|
MulAssign, // *=
|
||||||
DIV_ASSIGN, // /=
|
DivAssign, // /=
|
||||||
RANGE, // ..
|
RANGE, // ..
|
||||||
LESS, // <
|
LESS, // <
|
||||||
GREATER, // >
|
GREATER, // >
|
||||||
@ -91,7 +91,7 @@ pub enum TokenType {
|
|||||||
|
|
||||||
// 記号
|
// 記号
|
||||||
DOT, // .
|
DOT, // .
|
||||||
DOUBLE_COLON, // :: (Parent::method) - P1用(定義のみ)
|
DoubleColon, // :: (Parent::method) - P1用(定義のみ)
|
||||||
LPAREN, // (
|
LPAREN, // (
|
||||||
RPAREN, // )
|
RPAREN, // )
|
||||||
LBRACE, // {
|
LBRACE, // {
|
||||||
@ -194,37 +194,37 @@ impl NyashTokenizer {
|
|||||||
Some('|') if self.peek_char() == Some('>') => {
|
Some('|') if self.peek_char() == Some('>') => {
|
||||||
self.advance();
|
self.advance();
|
||||||
self.advance();
|
self.advance();
|
||||||
return Ok(Token::new(TokenType::PIPE_FORWARD, start_line, start_column));
|
return Ok(Token::new(TokenType::PipeForward, start_line, start_column));
|
||||||
}
|
}
|
||||||
Some('?') if self.peek_char() == Some('.') => {
|
Some('?') if self.peek_char() == Some('.') => {
|
||||||
self.advance();
|
self.advance();
|
||||||
self.advance();
|
self.advance();
|
||||||
return Ok(Token::new(TokenType::QMARK_DOT, start_line, start_column));
|
return Ok(Token::new(TokenType::QmarkDot, start_line, start_column));
|
||||||
}
|
}
|
||||||
Some('?') if self.peek_char() == Some('?') => {
|
Some('?') if self.peek_char() == Some('?') => {
|
||||||
self.advance();
|
self.advance();
|
||||||
self.advance();
|
self.advance();
|
||||||
return Ok(Token::new(TokenType::QMARK_QMARK, start_line, start_column));
|
return Ok(Token::new(TokenType::QmarkQmark, start_line, start_column));
|
||||||
}
|
}
|
||||||
Some('+') if self.peek_char() == Some('=') => {
|
Some('+') if self.peek_char() == Some('=') => {
|
||||||
self.advance();
|
self.advance();
|
||||||
self.advance();
|
self.advance();
|
||||||
return Ok(Token::new(TokenType::PLUS_ASSIGN, start_line, start_column));
|
return Ok(Token::new(TokenType::PlusAssign, start_line, start_column));
|
||||||
}
|
}
|
||||||
Some('-') if self.peek_char() == Some('=') => {
|
Some('-') if self.peek_char() == Some('=') => {
|
||||||
self.advance();
|
self.advance();
|
||||||
self.advance();
|
self.advance();
|
||||||
return Ok(Token::new(TokenType::MINUS_ASSIGN, start_line, start_column));
|
return Ok(Token::new(TokenType::MinusAssign, start_line, start_column));
|
||||||
}
|
}
|
||||||
Some('*') if self.peek_char() == Some('=') => {
|
Some('*') if self.peek_char() == Some('=') => {
|
||||||
self.advance();
|
self.advance();
|
||||||
self.advance();
|
self.advance();
|
||||||
return Ok(Token::new(TokenType::MUL_ASSIGN, start_line, start_column));
|
return Ok(Token::new(TokenType::MulAssign, start_line, start_column));
|
||||||
}
|
}
|
||||||
Some('/') if self.peek_char() == Some('=') => {
|
Some('/') if self.peek_char() == Some('=') => {
|
||||||
self.advance();
|
self.advance();
|
||||||
self.advance();
|
self.advance();
|
||||||
return Ok(Token::new(TokenType::DIV_ASSIGN, start_line, start_column));
|
return Ok(Token::new(TokenType::DivAssign, start_line, start_column));
|
||||||
}
|
}
|
||||||
Some('.') if self.peek_char() == Some('.') => {
|
Some('.') if self.peek_char() == Some('.') => {
|
||||||
self.advance();
|
self.advance();
|
||||||
@ -256,17 +256,17 @@ impl NyashTokenizer {
|
|||||||
Some('>') if self.peek_char() == Some('>') && !Self::strict_12_7() => {
|
Some('>') if self.peek_char() == Some('>') && !Self::strict_12_7() => {
|
||||||
self.advance();
|
self.advance();
|
||||||
self.advance();
|
self.advance();
|
||||||
Ok(Token::new(TokenType::SHIFT_RIGHT, start_line, start_column))
|
Ok(Token::new(TokenType::ShiftRight, start_line, start_column))
|
||||||
}
|
}
|
||||||
Some(':') if self.peek_char() == Some(':') => {
|
Some(':') if self.peek_char() == Some(':') => {
|
||||||
self.advance();
|
self.advance();
|
||||||
self.advance();
|
self.advance();
|
||||||
Ok(Token::new(TokenType::DOUBLE_COLON, start_line, start_column))
|
Ok(Token::new(TokenType::DoubleColon, start_line, start_column))
|
||||||
}
|
}
|
||||||
Some('=') if self.peek_char() == Some('>') => {
|
Some('=') if self.peek_char() == Some('>') => {
|
||||||
self.advance();
|
self.advance();
|
||||||
self.advance();
|
self.advance();
|
||||||
Ok(Token::new(TokenType::FAT_ARROW, start_line, start_column))
|
Ok(Token::new(TokenType::FatArrow, start_line, start_column))
|
||||||
}
|
}
|
||||||
Some('=') if self.peek_char() == Some('=') => {
|
Some('=') if self.peek_char() == Some('=') => {
|
||||||
self.advance();
|
self.advance();
|
||||||
@ -282,7 +282,7 @@ impl NyashTokenizer {
|
|||||||
Some('<') if self.peek_char() == Some('<') => {
|
Some('<') if self.peek_char() == Some('<') => {
|
||||||
self.advance();
|
self.advance();
|
||||||
self.advance();
|
self.advance();
|
||||||
Ok(Token::new(TokenType::SHIFT_LEFT, start_line, start_column))
|
Ok(Token::new(TokenType::ShiftLeft, start_line, start_column))
|
||||||
}
|
}
|
||||||
Some('<') if self.peek_char() == Some('=') => {
|
Some('<') if self.peek_char() == Some('=') => {
|
||||||
self.advance();
|
self.advance();
|
||||||
@ -307,7 +307,7 @@ impl NyashTokenizer {
|
|||||||
Some('|') if self.peek_char() == Some('>') => {
|
Some('|') if self.peek_char() == Some('>') => {
|
||||||
self.advance();
|
self.advance();
|
||||||
self.advance();
|
self.advance();
|
||||||
return Ok(Token::new(TokenType::PIPE_FORWARD, start_line, start_column));
|
return Ok(Token::new(TokenType::PipeForward, start_line, start_column));
|
||||||
}
|
}
|
||||||
Some('<') => {
|
Some('<') => {
|
||||||
self.advance();
|
self.advance();
|
||||||
@ -319,15 +319,15 @@ impl NyashTokenizer {
|
|||||||
}
|
}
|
||||||
Some('&') => {
|
Some('&') => {
|
||||||
self.advance();
|
self.advance();
|
||||||
Ok(Token::new(TokenType::BIT_AND, start_line, start_column))
|
Ok(Token::new(TokenType::BitAnd, start_line, start_column))
|
||||||
}
|
}
|
||||||
Some('|') => {
|
Some('|') => {
|
||||||
self.advance();
|
self.advance();
|
||||||
Ok(Token::new(TokenType::BIT_OR, start_line, start_column))
|
Ok(Token::new(TokenType::BitOr, start_line, start_column))
|
||||||
}
|
}
|
||||||
Some('^') => {
|
Some('^') => {
|
||||||
self.advance();
|
self.advance();
|
||||||
Ok(Token::new(TokenType::BIT_XOR, start_line, start_column))
|
Ok(Token::new(TokenType::BitXor, start_line, start_column))
|
||||||
}
|
}
|
||||||
Some('=') => {
|
Some('=') => {
|
||||||
self.advance();
|
self.advance();
|
||||||
@ -698,7 +698,7 @@ mod tests {
|
|||||||
let mut tokenizer = NyashTokenizer::new(">> == != <= >= < >");
|
let mut tokenizer = NyashTokenizer::new(">> == != <= >= < >");
|
||||||
let tokens = tokenizer.tokenize().unwrap();
|
let tokens = tokenizer.tokenize().unwrap();
|
||||||
|
|
||||||
assert_eq!(tokens[0].token_type, TokenType::SHIFT_RIGHT);
|
assert_eq!(tokens[0].token_type, TokenType::ShiftRight);
|
||||||
assert_eq!(tokens[1].token_type, TokenType::EQUALS);
|
assert_eq!(tokens[1].token_type, TokenType::EQUALS);
|
||||||
assert_eq!(tokens[2].token_type, TokenType::NotEquals);
|
assert_eq!(tokens[2].token_type, TokenType::NotEquals);
|
||||||
assert_eq!(tokens[3].token_type, TokenType::LessEquals);
|
assert_eq!(tokens[3].token_type, TokenType::LessEquals);
|
||||||
@ -787,13 +787,13 @@ value"#;
|
|||||||
// 分かりやすく固めたケース
|
// 分かりやすく固めたケース
|
||||||
let mut t2 = NyashTokenizer::new("|> ?.? ?? += -= *= /= ..");
|
let mut t2 = NyashTokenizer::new("|> ?.? ?? += -= *= /= ..");
|
||||||
let toks = t2.tokenize().unwrap();
|
let toks = t2.tokenize().unwrap();
|
||||||
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::PIPE_FORWARD)));
|
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::PipeForward)));
|
||||||
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::QMARK_DOT)));
|
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::QmarkDot)));
|
||||||
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::QMARK_QMARK)));
|
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::QmarkQmark)));
|
||||||
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::PLUS_ASSIGN)));
|
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::PlusAssign)));
|
||||||
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::MINUS_ASSIGN)));
|
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::MinusAssign)));
|
||||||
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::MUL_ASSIGN)));
|
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::MulAssign)));
|
||||||
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::DIV_ASSIGN)));
|
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::DivAssign)));
|
||||||
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::RANGE)));
|
assert!(toks.iter().any(|k| matches!(k.token_type, TokenType::RANGE)));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -803,9 +803,9 @@ value"#;
|
|||||||
let mut t = NyashTokenizer::new("?? ? ?. .. .");
|
let mut t = NyashTokenizer::new("?? ? ?. .. .");
|
||||||
let toks = t.tokenize().unwrap();
|
let toks = t.tokenize().unwrap();
|
||||||
let kinds: Vec<&TokenType> = toks.iter().map(|k| &k.token_type).collect();
|
let kinds: Vec<&TokenType> = toks.iter().map(|k| &k.token_type).collect();
|
||||||
assert!(matches!(kinds[0], TokenType::QMARK_QMARK));
|
assert!(matches!(kinds[0], TokenType::QmarkQmark));
|
||||||
assert!(matches!(kinds[1], TokenType::QUESTION));
|
assert!(matches!(kinds[1], TokenType::QUESTION));
|
||||||
assert!(matches!(kinds[2], TokenType::QMARK_DOT));
|
assert!(matches!(kinds[2], TokenType::QmarkDot));
|
||||||
assert!(matches!(kinds[3], TokenType::RANGE));
|
assert!(matches!(kinds[3], TokenType::RANGE));
|
||||||
assert!(matches!(kinds[4], TokenType::DOT));
|
assert!(matches!(kinds[4], TokenType::DOT));
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user