add tests

This commit is contained in:
tezlm 2023-10-03 19:01:06 -07:00
parent 6e6d7f765b
commit 63bb50f6b3
Signed by: tezlm
GPG key ID: 649733FCD94AFBBA
4 changed files with 43 additions and 24 deletions

View file

@ -42,14 +42,14 @@ pub enum Symbol {
}
impl Lexer {
pub fn new(input: String) -> Lexer {
pub fn new(input: &str) -> Lexer {
Lexer {
input: input.chars().collect(),
pos: 0,
}
}
pub fn next(&mut self) -> Result<Option<Token>, Error> {
pub fn lex_any(&mut self) -> Result<Option<Token>, Error> {
let Some(&ch) = self.input.get(self.pos) else {
return Ok(None);
};
@ -95,7 +95,7 @@ impl Lexer {
},
ch if ch.is_whitespace() => {
self.pos += 1;
return self.next();
return self.lex_any();
}
_ => self.lex_op()?,
};
@ -345,3 +345,11 @@ impl Lexer {
Ok(Token::Symbol(symbol))
}
}
impl Iterator for Lexer {
type Item = Result<Token, Error>;
fn next(&mut self) -> Option<Self::Item> {
self.lex_any().transpose()
}
}

10
src/lib.rs Normal file
View file

@ -0,0 +1,10 @@
#![allow(dead_code, clippy::single_match, clippy::only_used_in_recursion)]
pub mod data;
pub mod error;
pub mod generator;
pub mod lexer;
pub mod parser;
pub mod types;
pub use error::Error;

View file

@ -4,33 +4,20 @@ a second time when generating (so the types are known), there should be
a better way
*/
#![allow(dead_code, clippy::single_match, clippy::only_used_in_recursion)]
mod data;
mod error;
mod generator;
mod lexer;
mod parser;
mod types;
pub use error::Error;
use lang::{generator, lexer, parser};
use generator::Generator;
fn main() {
let source = std::fs::read_to_string(std::env::args().skip(1).next().expect("no filename!")).expect("no source!");
let mut lexer = lexer::Lexer::new(source);
let lexer = lexer::Lexer::new(&source);
let mut tokens = vec![];
loop {
match lexer.next() {
Ok(None) => break,
Ok(Some(token)) => tokens.push(token),
Err(error) => {
eprintln!("error: {:?}", error);
return;
}
let tokens = match lexer.collect::<Result<Vec<_>, _>>() {
Ok(tokens) => tokens,
Err(error) => {
eprintln!("error: {:?}", error);
return;
}
}
};
// dbg!(&tokens);

14
tests/lexer.rs Normal file
View file

@ -0,0 +1,14 @@
use lang::{lexer::{Lexer, Token, Symbol}, Error};
#[test]
fn test_foo() {
let tokens: Result<Vec<_>, Error> = Lexer::new("1 * 5 / 3").collect();
let tokens = tokens.expect("should parse");
assert_eq!(tokens, vec![
Token::Number { radix: 10, text: "1".into() },
Token::Symbol(Symbol::Star),
Token::Number { radix: 10, text: "5".into() },
Token::Symbol(Symbol::Slash),
Token::Number { radix: 10, text: "3".into() },
])
}