nicer errors

This commit is contained in:
tezlm 2023-10-04 00:09:35 -07:00
parent 95c1ca72e6
commit 8f06f8a502
Signed by: tezlm
GPG key ID: 649733FCD94AFBBA
14 changed files with 672 additions and 279 deletions

164
Cargo.lock generated
View file

@ -11,6 +11,12 @@ dependencies = [
"gimli",
]
[[package]]
name = "adler"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
[[package]]
name = "ahash"
version = "0.8.3"
@ -99,6 +105,30 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "backtrace"
version = "0.3.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837"
dependencies = [
"addr2line",
"cc",
"cfg-if",
"libc",
"miniz_oxide",
"object",
"rustc-demangle",
]
[[package]]
name = "backtrace-ext"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "537beee3be4a18fb023b570f80e3ae28003db9167a751266b259926e25539d50"
dependencies = [
"backtrace",
]
[[package]]
name = "base64"
version = "0.21.4"
@ -554,6 +584,12 @@ version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
[[package]]
name = "hermit-abi"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7"
[[package]]
name = "id-arena"
version = "2.2.1"
@ -581,6 +617,23 @@ dependencies = [
"serde",
]
[[package]]
name = "is-terminal"
version = "0.4.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b"
dependencies = [
"hermit-abi",
"rustix",
"windows-sys",
]
[[package]]
name = "is_ci"
version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "616cde7c720bb2bb5824a224687d8f77bfd38922027f01d825cd7453be5099fb"
[[package]]
name = "itertools"
version = "0.10.5"
@ -630,6 +683,9 @@ name = "lang"
version = "0.1.0"
dependencies = [
"clap",
"leb128",
"miette",
"thiserror",
"wasm-encoder 0.33.2",
"wasmtime",
]
@ -691,6 +747,47 @@ dependencies = [
"autocfg",
]
[[package]]
name = "miette"
version = "5.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59bb584eaeeab6bd0226ccf3509a69d7936d148cf3d036ad350abe35e8c6856e"
dependencies = [
"backtrace",
"backtrace-ext",
"is-terminal",
"miette-derive",
"once_cell",
"owo-colors",
"supports-color",
"supports-hyperlinks",
"supports-unicode",
"terminal_size",
"textwrap",
"thiserror",
"unicode-width",
]
[[package]]
name = "miette-derive"
version = "5.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49e7bc1560b95a3c4a25d03de42fe76ca718ab92d1a22a55b9b4cf67b3ae635c"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "miniz_oxide"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7"
dependencies = [
"adler",
]
[[package]]
name = "object"
version = "0.32.1"
@ -709,6 +806,12 @@ version = "1.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
[[package]]
name = "owo-colors"
version = "3.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1b04fb49957986fdce4d6ee7a65027d55d4b6d2265e5848bbb507b58ccfdb6f"
[[package]]
name = "paste"
version = "1.0.14"
@ -951,6 +1054,12 @@ version = "1.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "942b4a808e05215192e39f4ab80813e599068285906cc91aa64f923db842bd5a"
[[package]]
name = "smawk"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f67ad224767faa3c7d8b6d91985b78e70a1324408abcb1cfcc2be4c06bc06043"
[[package]]
name = "sptr"
version = "0.3.2"
@ -969,6 +1078,34 @@ version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
[[package]]
name = "supports-color"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4950e7174bffabe99455511c39707310e7e9b440364a2fcb1cc21521be57b354"
dependencies = [
"is-terminal",
"is_ci",
]
[[package]]
name = "supports-hyperlinks"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f84231692eb0d4d41e4cdd0cabfdd2e6cd9e255e65f80c9aa7c98dd502b4233d"
dependencies = [
"is-terminal",
]
[[package]]
name = "supports-unicode"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b6c2cb240ab5dd21ed4906895ee23fe5a48acdbd15a3ce388e7b62a9b66baf7"
dependencies = [
"is-terminal",
]
[[package]]
name = "syn"
version = "2.0.37"
@ -986,6 +1123,27 @@ version = "0.12.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d0e916b1148c8e263850e1ebcbd046f333e0683c724876bb0da63ea4373dc8a"
[[package]]
name = "terminal_size"
version = "0.1.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "633c1a546cee861a1a6d0dc69ebeca693bf4296661ba7852b9d21d159e0506df"
dependencies = [
"libc",
"winapi",
]
[[package]]
name = "textwrap"
version = "0.15.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b7b3e525a49ec206798b40326a44121291b530c963cfb01018f63e135bac543d"
dependencies = [
"smawk",
"unicode-linebreak",
"unicode-width",
]
[[package]]
name = "thiserror"
version = "1.0.49"
@ -1057,6 +1215,12 @@ version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
[[package]]
name = "unicode-linebreak"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f"
[[package]]
name = "unicode-normalization"
version = "0.1.22"

View file

@ -5,5 +5,8 @@ edition = "2021"
[dependencies]
clap = { version = "4.4.6", features = ["derive"] }
leb128 = "0.2.5"
miette = { version = "5.10.0", features = ["fancy"] }
thiserror = "1.0.49"
wasm-encoder = "0.33.2"
wasmtime = "13.0.0"

View file

@ -7,7 +7,10 @@ optimizations
- [ ] don't create local variables at all if they're only used once
*/
use std::io::Write;
use crate::data::{BinaryOp, Expr, Func, Literal, Pattern, PrefixOp, Statement, Type};
use crate::generator::OutputFormat;
use crate::parser::Context;
use crate::Error;
// use crate::generator::Generator;
@ -15,24 +18,6 @@ use crate::Error;
/// pipeline to convert parsed tree -> wat/wasm
pub struct Compiler<'a> {
output: Box<&'a mut dyn std::io::Write>,
// generator: Generator<'a>,
output_type: OutputType,
}
#[derive(Debug, PartialEq, Eq)]
pub enum OutputType {
Wat,
WatVerbose,
Wasm,
}
impl OutputType {
pub fn is_binary(&self) -> bool {
match self {
Self::Wat | Self::WatVerbose => false,
Self::Wasm => true,
}
}
}
struct Allocator {
@ -59,20 +44,14 @@ struct Allocator {
// "#;
impl<'a> Compiler<'a> {
pub fn new(output: Box<&mut dyn std::io::Write>, output_type: OutputType) -> Compiler {
pub fn new(output: Box<&mut dyn Write>, _output_format: OutputFormat) -> Compiler {
Compiler {
output,
output_type,
}
}
pub fn write_module(&mut self, stmts: &[Statement]) -> Result<(), Error> {
if self.output_type.is_binary() {
self.output.write_all(&[0x00, 0x61, 0x73, 0x6d])?; // magic
self.output.write_all(&[0x01, 0x00, 0x00, 0x00])?; // version
} else {
writeln!(self.output, "(module")?;
}
writeln!(self.output, "(module")?;
let mut ctx = Context::new();
// ctx.funcs.insert("print".to_string(), (vec![("message".to_string(), Type::String)], Type::empty()));
@ -94,7 +73,7 @@ impl<'a> Compiler<'a> {
match stmt {
Statement::Func(func) => self.write_func(&gctx, &ctx, func)?,
Statement::Let(..) | Statement::TailExpr(..) | Statement::Expr (..) => {
return Err(Error::syn("incorrect top level statement"))
return Err(Error::OtherError("incorrect top level statement".into()))
}
};
}
@ -102,15 +81,9 @@ impl<'a> Compiler<'a> {
Ok(())
}
fn write_comment(&mut self, comment: &str) -> Result<(), Error> {
if self.output_type == OutputType::WatVerbose {
writeln!(self.output, ";; {}", comment)?;
}
Ok(())
}
fn write_func(&mut self, parent_gctx: &GenContext, ctx: &Context, func: &Func) -> Result<(), Error> {
write!(self.output, "(func ${}", func.name)?;
if func.name == "main" {
write!(self.output, " (export \"_start\")")?;
} else if func.public {
@ -134,7 +107,7 @@ impl<'a> Compiler<'a> {
let inferred = func.block.infer(&ctx)?;
if func.ret != inferred {
return Err(Error::TypeError(format!(
return Err(Error::TypeErrorOld(format!(
"fn should return {:?}, but instead returns {inferred:?}",
func.ret
)));

View file

@ -1,6 +1,6 @@
// TODO: TypedStatement and TypedExpression?
use crate::lexer::{Token, Symbol};
use crate::lexer::{TokenContent, Symbol};
#[rustfmt::skip]
#[derive(Debug, Clone)]
@ -114,27 +114,27 @@ impl BinaryOp {
}
}
pub fn from_token(token: &Token) -> Option<Self> {
pub fn from_token(token: &TokenContent) -> Option<Self> {
let op = match token {
Token::Symbol(Symbol::DoubleStar) => Self::Pow,
Token::Symbol(Symbol::Star) => Self::Mul,
Token::Symbol(Symbol::Slash) => Self::Div,
Token::Symbol(Symbol::Percent) => Self::Mod,
Token::Symbol(Symbol::Plus) => Self::Add,
Token::Symbol(Symbol::Minus) => Self::Sub,
Token::Symbol(Symbol::Shl) => Self::Shl,
Token::Symbol(Symbol::Shr) => Self::Shr,
Token::Symbol(Symbol::Less) => Self::Less,
Token::Symbol(Symbol::LessEq) => Self::LessEq,
Token::Symbol(Symbol::Greater) => Self::Greater,
Token::Symbol(Symbol::GreaterEq) => Self::GreaterEq,
Token::Symbol(Symbol::Eq) => Self::Eq,
Token::Symbol(Symbol::Neq) => Self::Neq,
Token::Symbol(Symbol::And) => Self::BitAnd,
Token::Symbol(Symbol::Carat) => Self::Xor,
Token::Symbol(Symbol::Pipe) => Self::BitOr,
Token::Symbol(Symbol::DoubleAnd) => Self::LogicAnd,
Token::Symbol(Symbol::DoublePipe) => Self::LogicOr,
TokenContent::Symbol(Symbol::DoubleStar) => Self::Pow,
TokenContent::Symbol(Symbol::Star) => Self::Mul,
TokenContent::Symbol(Symbol::Slash) => Self::Div,
TokenContent::Symbol(Symbol::Percent) => Self::Mod,
TokenContent::Symbol(Symbol::Plus) => Self::Add,
TokenContent::Symbol(Symbol::Minus) => Self::Sub,
TokenContent::Symbol(Symbol::Shl) => Self::Shl,
TokenContent::Symbol(Symbol::Shr) => Self::Shr,
TokenContent::Symbol(Symbol::Less) => Self::Less,
TokenContent::Symbol(Symbol::LessEq) => Self::LessEq,
TokenContent::Symbol(Symbol::Greater) => Self::Greater,
TokenContent::Symbol(Symbol::GreaterEq) => Self::GreaterEq,
TokenContent::Symbol(Symbol::Eq) => Self::Eq,
TokenContent::Symbol(Symbol::Neq) => Self::Neq,
TokenContent::Symbol(Symbol::And) => Self::BitAnd,
TokenContent::Symbol(Symbol::Carat) => Self::Xor,
TokenContent::Symbol(Symbol::Pipe) => Self::BitOr,
TokenContent::Symbol(Symbol::DoubleAnd) => Self::LogicAnd,
TokenContent::Symbol(Symbol::DoublePipe) => Self::LogicOr,
_ => return None,
};
Some(op)
@ -146,11 +146,11 @@ impl PrefixOp {
// todo!(),
// }
pub fn from_token(token: &Token) -> Option<Self> {
pub fn from_token(token: &TokenContent) -> Option<Self> {
let op = match token {
Token::Symbol(Symbol::Minus) => Self::Minus,
Token::Symbol(Symbol::Not) => Self::LogicNot,
Token::Symbol(Symbol::DoublePipe) => Self::BitNot,
TokenContent::Symbol(Symbol::Minus) => Self::Minus,
TokenContent::Symbol(Symbol::Not) => Self::LogicNot,
TokenContent::Symbol(Symbol::DoublePipe) => Self::BitNot,
_ => return None,
};
Some(op)

View file

@ -1,18 +1,96 @@
#[derive(Debug)]
use miette::{Diagnostic, SourceSpan, NamedSource};
use thiserror::Error;
#[derive(Debug, Error, Diagnostic)]
#[error("some kind of error")]
pub enum Error {
SyntaxError(String),
TypeError(String),
SyntaxError(SyntaxError),
TypeError(TypeError),
TypeErrorOld(String),
ReferenceError(String),
IoError(std::io::Error),
OtherError(String),
}
#[derive(Debug, Error, Diagnostic)]
#[error("")]
#[diagnostic()]
pub struct SyntaxWrapper {
#[source_code]
pub src: NamedSource,
#[related]
pub syn: Vec<SyntaxError>,
}
#[derive(Debug, Error, Diagnostic)]
#[error("")]
#[diagnostic()]
pub struct TypeWrapper {
#[source_code]
pub src: NamedSource,
#[related]
pub ty: Vec<TypeError>,
}
#[derive(Debug, Error, Diagnostic)]
#[error("syntax error!")]
#[diagnostic()]
pub struct SyntaxError {
// #[source_code]
// pub src: NamedSource,
#[help]
pub help: String,
#[label("i do not know what this is")]
pub pos: SourceSpan,
}
#[derive(Debug, Error, Diagnostic)]
#[error("type error!")]
#[diagnostic()]
pub enum TypeError {
Unary {
#[help]
help: String,
#[label("i do not know what this is")]
expr: SourceSpan,
#[label("i do not know what this is")]
operator: SourceSpan,
},
Binary {
#[help]
help: String,
#[label("i do not know what this is")]
expr: SourceSpan,
#[label("i do not know what this is")]
operator: SourceSpan,
},
UnknownType {
#[help]
help: String,
#[label("i do not know what this type is")]
unknown: SourceSpan,
},
}
impl Error {
pub fn syn(what: &'static str) -> Error {
Error::SyntaxError(what.to_string())
pub fn syn(pos: (usize, usize), help: impl Into<String>) -> Error {
Error::SyntaxError(SyntaxError {
help: help.into(),
pos: pos.into(),
})
}
pub fn ty(what: &'static str) -> Error {
Error::TypeError(what.to_string())
Error::TypeErrorOld(what.to_string())
}
}

View file

@ -1,28 +1,60 @@
// use crate::Error;
use crate::Error;
// use leb128::write;
// /// helper pipeline to convert wat/wasm -> actual text/binary
// pub struct Generator<'a> {
// output: Box<&'a mut dyn std::io::Write>,
/// helper pipeline to convert wat/wasm -> actual text/binary
pub struct Generator<'a> {
output: Box<&'a mut dyn std::io::Write>,
format: OutputFormat,
}
#[derive(Debug, PartialEq, Eq)]
pub enum OutputFormat {
Wat,
WatVerbose,
Wasm,
}
impl OutputFormat {
pub fn is_binary(&self) -> bool {
match self {
Self::Wat | Self::WatVerbose => false,
Self::Wasm => true,
}
}
}
impl Generator<'_> {
pub fn new(output: Box<&mut dyn std::io::Write>, format: OutputFormat) -> Generator {
Generator {
output,
format,
}
}
pub fn write_module(&mut self) -> Result<(), Error> {
if self.format.is_binary() {
self.output.write_all(&[0x00, 0x61, 0x73, 0x6d])?; // magic
self.output.write_all(&[0x01, 0x00, 0x00, 0x00])?; // version
} else {
writeln!(self.output, "(module")?;
}
if self.format.is_binary() {
self.output.write_all(&[0x01])?; // type section
self.output.write_all(&[0x01])?; // 1 item
}
Ok(())
}
pub fn write_comment(&mut self, comment: &str) -> Result<(), Error> {
if self.format == OutputFormat::WatVerbose {
writeln!(self.output, ";; {}", comment)?;
}
Ok(())
}
// }
// #[derive(Debug)]
// enum OutputFormat {
// Wat, Wasm,
// }
// impl Generator<'_> {
// pub fn new(output: Box<&mut dyn std::io::Write>) -> Generator {
// Generator { output }
// }
// pub fn write_comment(&mut self, comment: &str) -> Result<(), Error> {
// writeln!(self.output, ";; {}", comment)?;
// Ok(())
// }
// pub fn write_raw(&mut self, text: &str) -> Result<(), Error> {
// write!(self.output, "{}", text)?;
// Ok(())
// }
// }
pub fn write_raw(&mut self, text: &str) -> Result<(), Error> {
write!(self.output, "{}", text)?;
Ok(())
}
}

View file

@ -4,11 +4,20 @@ use crate::Error;
pub struct Lexer {
input: Vec<char>,
pos: usize,
name: String,
src: String,
}
#[rustfmt::skip]
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Token {
pub struct Token {
pub token: TokenContent,
pub span: (usize, usize),
}
#[rustfmt::skip]
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum TokenContent {
Number { radix: u32, text: String },
Ident(String),
String(String),
@ -42,10 +51,12 @@ pub enum Symbol {
}
impl Lexer {
pub fn new(input: &str) -> Lexer {
pub fn new(input: &str, name: &str) -> Lexer {
Lexer {
input: input.chars().collect(),
pos: 0,
src: input.to_string(),
name: name.to_string(),
}
}
@ -53,6 +64,7 @@ impl Lexer {
let Some(&ch) = self.input.get(self.pos) else {
return Ok(None);
};
let start = self.pos;
let tok = match ch {
'0'..='9' => {
let token = self.lex_number()?;
@ -61,37 +73,37 @@ impl Lexer {
.get(self.pos)
.is_some_and(|c| c.is_ascii_alphanumeric())
{
panic!("unexpected char");
return Err(Error::syn((self.pos, 1), "there shouldn't be extra characters after the number"));
}
token
}
'\'' => {
self.pos += 1;
let ch = self.lex_char()?;
if self.input.get(self.pos).is_some_and(|c| *c != '\'') {
panic!("expected '");
if !self.input.get(self.pos).is_some_and(|c| *c == '\'') {
return Err(Error::syn((start, self.pos - start), "close your character with a '"));
}
self.pos += 1;
Token::Char(ch)
TokenContent::Char(ch)
}
'"' => Token::String(self.lex_string()?),
'"' => TokenContent::String(self.lex_string()?),
ch if ch.is_alphabetic() || ch == '_' => match self.lex_ident().as_str() {
"let" => Token::Let,
"const" => Token::Const,
"type" => Token::Type,
"fn" => Token::Fn,
"true" => Token::True,
"false" => Token::False,
"if" => Token::If,
"else" => Token::Else,
"match" => Token::Match,
"while" => Token::While,
"loop" => Token::Loop,
"for" => Token::For,
"break" => Token::Break,
"continue" => Token::Continue,
"pub" => Token::Pub,
ident => Token::Ident(ident.to_string()),
"let" => TokenContent::Let,
"const" => TokenContent::Const,
"type" => TokenContent::Type,
"fn" => TokenContent::Fn,
"true" => TokenContent::True,
"false" => TokenContent::False,
"if" => TokenContent::If,
"else" => TokenContent::Else,
"match" => TokenContent::Match,
"while" => TokenContent::While,
"loop" => TokenContent::Loop,
"for" => TokenContent::For,
"break" => TokenContent::Break,
"continue" => TokenContent::Continue,
"pub" => TokenContent::Pub,
ident => TokenContent::Ident(ident.to_string()),
},
ch if ch.is_whitespace() => {
self.pos += 1;
@ -99,10 +111,10 @@ impl Lexer {
}
_ => self.lex_op()?,
};
Ok(Some(tok))
Ok(Some(Token { token: tok, span: (start, self.pos - start) }))
}
fn lex_number(&mut self) -> Result<Token, Error> {
fn lex_number(&mut self) -> Result<TokenContent, Error> {
let mut buffer = String::new();
let radix = match (self.input[self.pos], self.input.get(self.pos + 1)) {
('0', Some(ch)) if ch.is_ascii_digit() => 10,
@ -112,11 +124,12 @@ impl Lexer {
'x' => 16,
'o' => 8,
'b' => 2,
'd' => 10,
_ if !ch.is_ascii_alphanumeric() => {
self.pos -= 2;
10
}
_ => return Err(Error::SyntaxError(format!("unknown number radix {ch}"))),
_ => return Err(Error::syn((self.pos - 2, 2), "use a known number radix, like 0x (hex), 0o (octal), 0b (binary), or none/0d (decimal)")),
}
}
_ => 10,
@ -130,17 +143,20 @@ impl Lexer {
} else if ch.is_digit(radix) {
buffer.push(ch);
self.pos += 1;
} else if ch.is_ascii_digit() {
return Err(Error::syn((self.pos, 1), "you seem to have used the wrong base for this number"));
} else {
break;
}
}
Ok(Token::Number {
Ok(TokenContent::Number {
radix,
text: buffer,
})
}
fn lex_string(&mut self) -> Result<String, Error> {
let start = self.pos;
self.pos += 1; // take "
let mut buffer = String::new();
while let Some(&ch) = self.input.get(self.pos) {
@ -150,12 +166,17 @@ impl Lexer {
buffer.push(self.lex_char()?);
}
if !self.input.get(self.pos).is_some_and(|c| *c == '"') {
panic!("expected \"");
return Err(Error::syn((start, self.pos), "end your string with a \""));
}
self.pos += 1;
Ok(buffer)
}
#[inline]
fn err_syn(&self, pos: (usize, usize), help: &str) -> Error {
Error::syn(pos, help)
}
#[inline]
fn lex_char(&mut self) -> Result<char, Error> {
let ch = match self.input.get(self.pos) {
@ -164,7 +185,7 @@ impl Lexer {
let ch = self
.input
.get(self.pos)
.ok_or_else(|| Error::syn("expected escape char"))?;
.ok_or_else(|| Error::syn((self.pos, 1), "there should be an character here (which will be escaped)"))?;
match ch {
'n' => '\n',
't' => '\t',
@ -172,11 +193,11 @@ impl Lexer {
'"' => '\"',
// 'x' => '\x',
// 'u' => '\u',
_ => return Err(Error::syn("unknown escape char")),
_ => return Err(Error::syn((self.pos - 1, 2), &format!("i only know how to escape \\n, \\t, \\\\, or \\\", not \\{ch}"))),
}
}
Some(ch) => *ch,
None => return Err(Error::syn("expected char")),
None => return Err(Error::syn((self.pos, 1), "there should be a character here, not an eof")),
};
self.pos += 1;
Ok(ch)
@ -192,7 +213,7 @@ impl Lexer {
rest
}
fn lex_op(&mut self) -> Result<Token, Error> {
fn lex_op(&mut self) -> Result<TokenContent, Error> {
let ch = self.input[self.pos];
macro_rules! settable {
@ -220,12 +241,12 @@ impl Lexer {
}
let token = match ch {
'(' => Some(Token::OpenParan),
')' => Some(Token::CloseParan),
'[' => Some(Token::OpenBracket),
']' => Some(Token::CloseBracket),
'{' => Some(Token::OpenBrace),
'}' => Some(Token::CloseBrace),
'(' => Some(TokenContent::OpenParan),
')' => Some(TokenContent::CloseParan),
'[' => Some(TokenContent::OpenBracket),
']' => Some(TokenContent::CloseBracket),
'{' => Some(TokenContent::OpenBrace),
'}' => Some(TokenContent::CloseBrace),
_ => None,
};
@ -339,10 +360,10 @@ impl Lexer {
',' => Symbol::Comma,
';' => Symbol::Semicolon,
'?' => Symbol::Question,
_ => return Err(Error::SyntaxError(format!("unexpected character {}", ch))),
_ => return Err(Error::syn((self.pos, self.pos), "unexpected character here")),
};
self.pos += 1;
Ok(Token::Symbol(symbol))
Ok(TokenContent::Symbol(symbol))
}
}

View file

@ -7,8 +7,8 @@ a better way
use std::{path::PathBuf, fs::OpenOptions};
use clap::Parser;
use lang::{compiler::{self, OutputType}, lexer, parser};
use compiler::Compiler;
use lang::{compiler::Compiler, generator::OutputFormat, lexer, parser, error::{SyntaxWrapper, TypeWrapper}};
use miette::NamedSource;
#[derive(Debug, clap::Parser)]
enum Cli {
@ -22,7 +22,7 @@ enum Cli {
Compile {
#[arg(short, long)]
/// What format to output as
format: Option<OutputFormat>,
format: Option<OutputType>,
#[arg(short, long)]
/// Where to output to
@ -58,40 +58,101 @@ enum Cli {
}
#[derive(Debug, Clone, clap::ValueEnum)]
enum OutputFormat {
enum OutputType {
Wat, Wasm,
}
fn main() {
let args = Cli::parse();
let theme = miette::GraphicalTheme {
characters: miette::ThemeCharacters {
hbar: '',
vbar: '',
xbar: '',
vbar_break: '',
uarrow: '',
rarrow: '>',
ltop: '',
mtop: '',
rtop: '',
lbot: '',
rbot: '',
mbot: '',
lbox: '[',
rbox: ']',
lcross: '',
rcross: '',
underbar: '',
underline: '',
error: "error:".to_string(),
warning: "warn:".to_string(),
advice: "note:".to_string(),
},
..Default::default()
};
let reporter = miette::GraphicalReportHandler::new().with_theme(theme);
match args {
Cli::Run { file } => {
let source = std::fs::read_to_string(file).expect("no source!");
let lexer = lexer::Lexer::new(&source);
let source = std::fs::read_to_string(&file).expect("no source!");
let lexer = lexer::Lexer::new(&source, file.to_str().unwrap());
let tokens = match lexer.collect::<Result<Vec<_>, _>>() {
Ok(tokens) => tokens,
Err(error) => {
eprintln!("error: {:?}", error);
match error {
lang::Error::SyntaxError(syn) => {
let mut s = String::new();
let syn = SyntaxWrapper {
src: NamedSource::new(file.to_string_lossy().to_string(), source),
syn: vec![syn],
};
reporter.render_report(&mut s, &syn).unwrap();
eprintln!("{}", s);
}
_ => eprintln!("error: {:?}", error),
}
return;
}
};
let mut parser = parser::Parser::new(tokens);
let mut parser = parser::Parser::new(tokens, source.clone(), file.to_string_lossy().to_string());
let mut statements = vec![];
loop {
match parser.next() {
Ok(None) => break,
Ok(Some(tree)) => statements.push(tree),
Err(error) => {
eprintln!("error: {:?}", error);
match error {
lang::Error::SyntaxError(syn) => {
let mut s = String::new();
let syn = SyntaxWrapper {
src: NamedSource::new(file.to_string_lossy().to_string(), source),
syn: vec![syn],
};
reporter.render_report(&mut s, &syn).unwrap();
eprintln!("{}", s);
}
lang::Error::TypeError(ty) => {
let mut s = String::new();
let ty = TypeWrapper {
src: NamedSource::new(file.to_string_lossy().to_string(), source),
ty: vec![ty],
};
reporter.render_report(&mut s, &ty).unwrap();
eprintln!("{}", s);
}
_ => eprintln!("error: {:?}", error),
}
return;
}
}
}
let mut wat = vec![];
let mut gen = Compiler::new(Box::new(&mut wat), OutputType::WatVerbose);
let mut gen = Compiler::new(Box::new(&mut wat), OutputFormat::WatVerbose);
if let Err(err) = gen.write_module(&statements) {
panic!("{:?}", err);
@ -108,17 +169,17 @@ fn main() {
}
Cli::Compile { file, format, output } => {
let source = std::fs::read_to_string(&file).expect("no source!");
let lexer = lexer::Lexer::new(&source);
let lexer = lexer::Lexer::new(&source, file.to_str().unwrap());
let tokens = match lexer.collect::<Result<Vec<_>, _>>() {
Ok(tokens) => tokens,
Err(error) => {
eprintln!("error: {:?}", error);
return;
return
}
};
let mut parser = parser::Parser::new(tokens);
let mut parser = parser::Parser::new(tokens, source.clone(), file.to_string_lossy().to_string());
let mut statements = vec![];
loop {
match parser.next() {
@ -133,9 +194,9 @@ fn main() {
let format = format.unwrap_or_else(|| {
if file.extension().is_some_and(|ext| ext == "wasm") {
OutputFormat::Wasm
OutputType::Wasm
} else {
OutputFormat::Wat
OutputType::Wat
}
});
@ -145,15 +206,15 @@ fn main() {
};
match format {
OutputFormat::Wat => {
let mut gen = Compiler::new(Box::new(&mut output), OutputType::WatVerbose);
OutputType::Wat => {
let mut gen = Compiler::new(Box::new(&mut output), OutputFormat::WatVerbose);
if let Err(err) = gen.write_module(&statements) {
panic!("{:?}", err);
}
}
OutputFormat::Wasm => {
let mut gen = Compiler::new(Box::new(&mut output), OutputType::Wasm);
OutputType::Wasm => {
let mut gen = Compiler::new(Box::new(&mut output), OutputFormat::Wasm);
if let Err(err) = gen.write_module(&statements) {
panic!("{:?}", err);

View file

@ -1,12 +1,14 @@
use std::collections::HashMap;
use crate::data::{BinaryOp, Block, Expr, Literal, Pattern, PrefixOp, Statement, Type, Func};
use crate::lexer::{Token, Symbol};
use crate::lexer::{TokenContent, Symbol, Token};
use crate::Error;
pub struct Parser {
tokens: Vec<Token>,
pos: usize,
src: String,
name: String,
}
#[derive(Debug, Clone)]
@ -16,8 +18,28 @@ pub struct Context {
}
impl Parser {
pub fn new(tokens: Vec<Token>) -> Parser {
Parser { tokens, pos: 0 }
pub fn new(tokens: Vec<Token>, src: String, name: String) -> Parser {
Parser {
src,
name,
tokens,
pos: 0,
}
}
fn err_syn(&self, pos: (usize, usize), help: &str) -> Error {
Error::syn(pos, help)
}
fn err_eof(&self, wanted: &str) -> Error {
self.err_syn(self.tokens.last().map(|tok| tok.span).unwrap_or((0, 0)), &format!("wanted {}, got eof", wanted))
}
fn err_ty_unknown(&self, pos: (usize, usize), help: &str) -> Error {
Error::TypeError(crate::error::TypeError::UnknownType {
help: help.into(),
unknown: pos.into(),
})
}
fn peek_tok(&self) -> Option<&Token> {
@ -30,149 +52,143 @@ impl Parser {
tok
}
fn eat(&mut self, token: Token) -> Result<&Token, Error> {
match self.next_tok() {
Some(t) if t == &token => Ok(t),
Some(t) => Err(Error::SyntaxError(format!("expected {token:?}, got {t:?}"))),
None => Err(Error::SyntaxError(format!("expected {token:?}, got eof"))),
}
fn eat(&mut self, token: TokenContent) -> Result<Token, Error> {
let result = match self.peek_tok() {
Some(t) if t.token == token => {
let t = t.clone();
self.pos += 1;
return Ok(t);
}
Some(t) => Err(self.err_syn(t.span, &format!("expected {token:?}, got {:?}", t.token))),
None => Err(self.err_eof(&format!("{:?}", token))),
};
result
}
pub fn next(&mut self) -> Result<Option<Statement>, Error> {
match self.peek_tok() {
Some(_) => self.parse_statement(),
Some(_) => Some(self.parse_statement()).transpose(),
None => Ok(None),
}
}
fn parse_statement(&mut self) -> Result<Option<Statement>, Error> {
fn parse_statement(&mut self) -> Result<Statement, Error> {
let Some(tok) = self.peek_tok() else {
return Err(Error::syn("unexpected eof"));
return Err(self.err_eof("statement"));
};
let stmt = match tok {
Token::Let => {
self.eat(Token::Let)?;
let stmt = match tok.token {
TokenContent::Let => {
self.eat(TokenContent::Let)?;
let name = self.parse_ident()?;
self.eat(Token::Symbol(Symbol::Set))?;
if let Ok(_) = self.eat(TokenContent::Symbol(Symbol::Colon)) {
// TODO: types in variables
self.parse_type()?;
}
self.eat(TokenContent::Symbol(Symbol::Set))?;
let expr = self.parse_expr(0)?;
self.eat(Token::Symbol(Symbol::Semicolon))?;
self.eat(TokenContent::Symbol(Symbol::Semicolon))?;
Statement::Let(name, expr)
}
Token::Pub | Token::Fn => {
TokenContent::Pub | TokenContent::Fn => {
// TODO: public things that aren't functions
let public = if tok == &Token::Pub {
self.eat(Token::Pub)?;
let public = if tok.token == TokenContent::Pub {
self.eat(TokenContent::Pub)?;
true
} else {
false
};
self.eat(Token::Fn)?;
self.eat(TokenContent::Fn)?;
let name = self.parse_ident()?;
self.eat(Token::OpenParan)?;
self.eat(TokenContent::OpenParan)?;
let mut params = vec![];
while !self.peek_tok().is_some_and(|tok| tok == &Token::CloseParan) {
while !self.peek_tok().is_some_and(|tok| tok.token == TokenContent::CloseParan) {
let name = self.parse_ident()?;
self.eat(Token::Symbol(Symbol::Colon))?;
self.eat(TokenContent::Symbol(Symbol::Colon))?;
let ty = self.parse_type()?;
params.push((name, ty));
if self.peek_tok() == Some(&Token::Symbol(Symbol::Comma)) {
self.eat(Token::Symbol(Symbol::Comma))?;
} else {
if self.eat(TokenContent::Symbol(Symbol::Comma)).is_err() {
break;
}
}
self.eat(Token::CloseParan)?;
let ret = if self.peek_tok().is_some_and(|tok| tok == &Token::Symbol(Symbol::ThinArrow)) {
self.eat(Token::Symbol(Symbol::ThinArrow))?;
self.eat(TokenContent::CloseParan)?;
let ret = if self.eat(TokenContent::Symbol(Symbol::ThinArrow)).is_ok() {
self.parse_type()?
} else {
Type::empty()
};
self.eat(Token::OpenBrace)?;
self.eat(TokenContent::OpenBrace)?;
let block = self.parse_block()?;
self.eat(Token::CloseBrace)?;
self.eat(TokenContent::CloseBrace)?;
Statement::Func(Func { name, params, ret, block, public })
}
_ => {
let expr = self.parse_expr(0)?;
if self.peek_tok().is_some_and(|tk| tk == &Token::Symbol(Symbol::Semicolon)) {
self.eat(Token::Symbol(Symbol::Semicolon))?;
if self.peek_tok().is_some_and(|tk| tk.token == TokenContent::Symbol(Symbol::Semicolon)) {
self.eat(TokenContent::Symbol(Symbol::Semicolon))?;
Statement::Expr(expr)
} else {
Statement::TailExpr(expr)
}
}
};
Ok(Some(stmt))
Ok(stmt)
}
fn parse_type(&mut self) -> Result<Type, Error> {
let Some(tok) = self.next_tok() else {
return Err(Error::syn("unexpected eof"));
let Some(tok) = self.next_tok().cloned() else {
return Err(self.err_eof("type"));
};
let ty = match tok {
Token::Ident(ident) => match ident.as_str() {
let ty = match &tok.token {
TokenContent::Ident(ident) => match ident.as_str() {
"i32" => Type::Integer,
"f64" => Type::Float,
"bool" => Type::Boolean,
_ => return Err(Error::TypeError(format!("unknown type {ident}"))),
_ => return Err(self.err_ty_unknown(tok.span, "use a type that i know (currently only i32 and bool)")),
},
Token::OpenParan => {
TokenContent::OpenParan => {
let mut tys = vec![];
while !self.peek_tok().is_some_and(|tok| tok == &Token::CloseParan) {
while !self.peek_tok().is_some_and(|tok| tok.token == TokenContent::CloseParan) {
tys.push(self.parse_type()?);
if self.peek_tok() == Some(&Token::Symbol(Symbol::Comma)) {
self.eat(Token::Symbol(Symbol::Comma))?;
} else {
if self.eat(TokenContent::Symbol(Symbol::Comma)).is_err() {
break;
}
}
self.eat(Token::CloseParan)?;
self.eat(TokenContent::CloseParan)?;
Type::Tuple(tys)
},
_ => todo!(),
_ => return Err(self.err_syn(tok.span, "this should be a pattern")),
};
Ok(ty)
}
fn parse_ident(&mut self) -> Result<String, Error> {
match self.next_tok() {
Some(Token::Ident(ident)) => Ok(ident.to_string()),
Some(tk) => {
Err(Error::SyntaxError(format!(
"expected identifier, got {tk:?}"
)))
}
None => Err(Error::syn("expected identifier, got eof")),
}
let result = match self.peek_tok() {
Some(Token { token: TokenContent::Ident(ident), .. }) => Ok(ident.to_string()),
Some(t) => return Err(self.err_syn(t.span, &format!("expected ident, got {:?}", t.token))),
None => return Err(self.err_eof("ident")),
};
self.pos += 1;
result
}
fn parse_block(&mut self) -> Result<Block, Error> {
let mut statements = vec![];
loop {
match self.peek_tok() {
Some(Token::CloseBrace) => break,
Some(_) => (),
None => return Err(Error::syn("missing closing brace")),
};
match self.parse_statement()?.unwrap() {
if self.peek_tok().is_some_and(|tok| tok.token == TokenContent::CloseBrace) {
break;
}
match self.parse_statement()? {
stmt @ Statement::TailExpr(..) => {
statements.push(stmt);
match self.peek_tok() {
Some(Token::CloseBrace) => break,
Some(tok) => return Err(Error::SyntaxError(format!("unexpected token {tok:?}"))),
None => return Err(Error::syn("unexpected eof")),
};
break;
}
stmt => statements.push(stmt),
}
match self.peek_tok() {
Some(Token::CloseBrace) => break,
// Some(_) => return Err(Error::SyntaxError(format!("unexpected token {tok:?}"))),
// tok => return Err(Error::SyntaxError(format!("unexpected token {tok:?}"))),
Some(Token { token: TokenContent::CloseBrace, .. }) => break,
_ => (),
};
}
@ -180,55 +196,55 @@ impl Parser {
}
fn parse_expr(&mut self, binding: u8) -> Result<Expr, Error> {
let tok = self.next_tok().ok_or(Error::syn("expected a token"))?;
let mut expr = match tok {
Token::Number { radix: _, text } => {
let Some(tok) = self.next_tok() else {
return Err(self.err_eof("expression"));
};
let tok = tok.clone();
let mut expr = match &tok.token {
TokenContent::Number { radix: _, text } => {
if text.contains('.') {
Expr::Literal(Literal::Float(text.parse().unwrap()))
} else {
Expr::Literal(Literal::Integer(text.parse().unwrap()))
}
}
Token::Ident(ident) => {
TokenContent::Ident(ident) => {
let ident = ident.clone();
if self.peek_tok().is_some_and(|t| *t == Token::OpenParan) {
self.eat(Token::OpenParan)?;
if self.eat(TokenContent::OpenParan).is_ok() {
let mut params = vec![];
while !self.peek_tok().is_some_and(|tok| tok == &Token::CloseParan) {
while !self.peek_tok().is_some_and(|tok| tok.token == TokenContent::CloseParan) {
params.push(self.parse_expr(0)?);
if self.peek_tok() == Some(&Token::Symbol(Symbol::Comma)) {
self.eat(Token::Symbol(Symbol::Comma))?;
} else {
if self.eat(TokenContent::Symbol(Symbol::Comma)).is_err() {
break;
}
}
self.eat(Token::CloseParan)?;
self.eat(TokenContent::CloseParan)?;
Expr::Call(ident, params)
} else {
Expr::Variable(ident)
}
}
Token::False => Expr::Literal(Literal::Boolean(false)),
Token::True => Expr::Literal(Literal::Boolean(true)),
Token::String(s) => Expr::Literal(Literal::String(s.to_string())),
Token::Char(ch) => Expr::Literal(Literal::Char(*ch)),
Token::If => {
TokenContent::False => Expr::Literal(Literal::Boolean(false)),
TokenContent::True => Expr::Literal(Literal::Boolean(true)),
TokenContent::String(s) => Expr::Literal(Literal::String(s.to_string())),
TokenContent::Char(ch) => Expr::Literal(Literal::Char(*ch)),
TokenContent::If => {
let cond = self.parse_expr(0)?;
self.eat(Token::OpenBrace)?;
self.eat(TokenContent::OpenBrace)?;
let block = self.parse_block()?;
self.eat(Token::CloseBrace)?;
let otherwise = if self.peek_tok().is_some_and(|t| *t == Token::Else) {
self.eat(TokenContent::CloseBrace)?;
let otherwise = if self.peek_tok().is_some_and(|t| t.token == TokenContent::Else) {
self.next_tok();
match self.peek_tok() {
Some(Token::OpenBrace) => {
self.eat(Token::OpenBrace)?;
Some(Token { token: TokenContent::OpenBrace, .. }) => {
self.eat(TokenContent::OpenBrace)?;
let b = Some(self.parse_block()?);
self.eat(Token::CloseBrace)?;
self.eat(TokenContent::CloseBrace)?;
b
}
Some(Token::If) => Some(Block(vec![Statement::TailExpr(self.parse_expr(0)?)])),
Some(_) => return Err(Error::syn("unexpected token")),
None => return Err(Error::syn("unexpected eof, wanted body for else")),
Some(Token { token: TokenContent::If, .. }) => Some(Block(vec![Statement::TailExpr(self.parse_expr(0)?)])),
Some(tk) => return Err(self.err_syn(tk.span, "this should be followed by an if or block")),
None => return Err(self.err_eof("if or block")),
}
} else {
None
@ -242,50 +258,48 @@ impl Parser {
}
Expr::Match(Box::new(cond), map)
}
Token::Symbol(_) => {
let Some(op) = PrefixOp::from_token(tok) else {
return Err(Error::SyntaxError(format!("unexpected token {tok:?}")));
TokenContent::Symbol(_) => {
let Some(op) = PrefixOp::from_token(&tok.token) else {
return Err(self.err_syn(tok.span, "this should be changed into a valid operator or removed outright"));
};
let expr = self.parse_expr(1)?;
Expr::Unary(op, Box::new(expr))
}
Token::Match => {
TokenContent::Match => {
let expr = self.parse_expr(0)?;
let mut arms = vec![];
self.eat(Token::OpenBrace)?;
self.eat(TokenContent::OpenBrace)?;
loop {
let pat = self.parse_pattern()?;
self.eat(Token::Symbol(Symbol::FatArrow))?;
self.eat(TokenContent::Symbol(Symbol::FatArrow))?;
let expr = self.parse_expr(0)?;
arms.push((pat, expr));
if self.peek_tok().is_some_and(|t| t == &Token::Symbol(Symbol::Comma)) {
self.next_tok();
} else {
if self.eat(TokenContent::Symbol(Symbol::Comma)).is_err() {
break;
}
if self.peek_tok().is_none() || self.peek_tok().is_some_and(|t| t == &Token::CloseBrace) {
if self.peek_tok().is_none() || self.peek_tok().is_some_and(|t| t.token == TokenContent::CloseBrace) {
break;
}
}
self.eat(Token::CloseBrace)?;
self.eat(TokenContent::CloseBrace)?;
Expr::Match(Box::new(expr), arms)
}
Token::OpenBrace => {
TokenContent::OpenBrace => {
let b = Expr::Block(self.parse_block()?);
self.eat(Token::CloseBrace)?;
self.eat(TokenContent::CloseBrace)?;
b
}
Token::OpenParan => {
TokenContent::OpenParan => {
let expr = self.parse_expr(0)?;
self.eat(Token::CloseParan)?;
self.eat(TokenContent::CloseParan)?;
expr
}
_ => return Err(Error::syn("unexpected token")),
_ => return Err(self.err_syn(tok.span, "this should be an expression")),
};
while let Some(next) = self.peek_tok() {
let Some(op) = BinaryOp::from_token(next) else {
let Some(op) = BinaryOp::from_token(&next.token) else {
break;
};
let (bind_left, bind_right) = op.precedence();
@ -300,18 +314,20 @@ impl Parser {
}
fn parse_pattern(&mut self) -> Result<Pattern, Error> {
let tok = self.next_tok().ok_or(Error::syn("expected a token"))?;
let pat = match tok {
Token::Number { radix: _, text } => {
let Some(tok) = self.next_tok() else {
return Err(self.err_eof("pattern"));
};
let pat = match &tok.token {
TokenContent::Number { radix: _, text } => {
if text.contains('.') {
Pattern::Literal(Literal::Float(text.parse().unwrap()))
} else {
Pattern::Literal(Literal::Integer(text.parse().unwrap()))
}
}
Token::False => Pattern::Literal(Literal::Boolean(false)),
Token::True => Pattern::Literal(Literal::Boolean(true)),
Token::Ident(s) if s == "_" => Pattern::Wildcard,
TokenContent::False => Pattern::Literal(Literal::Boolean(false)),
TokenContent::True => Pattern::Literal(Literal::Boolean(true)),
TokenContent::Ident(s) if s == "_" => Pattern::Wildcard,
_ => todo!("no pattern for {:?} yet", tok),
};
Ok(pat)

View file

@ -56,7 +56,7 @@ impl Expr {
for (arg, (name, ty)) in args.iter().zip(params) {
let got = arg.infer(ctx)?;
if got != *ty {
return Err(Error::TypeError(format!("wrong type for {name}: it should've been {ty:?} but you put a {got:?}", )));
return Err(Error::TypeErrorOld(format!("wrong type for {name}: it should've been {ty:?} but you put a {got:?}", )));
}
}
@ -92,7 +92,7 @@ impl BinaryOp {
) => T::Boolean,
// (B::Add | B::Sub | B::Mul | B::Div, T::Float, T::Float) => T::Float,
(op, a, b) => {
return Err(Error::TypeError(format!(
return Err(Error::TypeErrorOld(format!(
"operator {op:?} cannot be applied to {a:?} and {b:?}"
)))
}
@ -112,7 +112,7 @@ impl PrefixOp {
// (U::Minus, T::Float) => T::Float,
(U::LogicNot, T::Boolean) => T::Boolean,
(op, ty) => {
return Err(Error::TypeError(format!(
return Err(Error::TypeErrorOld(format!(
"operator {op:?} cannot be applied to {ty:?}"
)))
}

3
test/bad Normal file
View file

@ -0,0 +1,3 @@
fn main() {
let a = 10 * true;
}

42
test/itoa Normal file
View file

@ -0,0 +1,42 @@
fn itoa(number: i32) -> i32 {
let count: i32 = 0;
let is_negative = number < 0;
if is_negative {
number = -number;
}
loop {
let ch = (number % 10) + 0x30;
store_i32_8(count + 8, ch);
number = number / 10;
count = count + 1;
if number == 0 {
break;
}
}
if is_negative {
store_i32_8(count + 8, 45);
count = count + 1;
}
let i: i32 = 0;
loop {
let swap_to = count - i + 7;
let swap_from = i + 8;
let tmp = load_i32_8(swap_to);
store_i32_8(swap_to, load_i32_8(swap_from));
store_i32_8(swap_from, tmp);
i = i + 1;
if i >= count / 2 {
break;
}
}
count
}

View file

@ -1,14 +1,14 @@
use lang::{lexer::{Lexer, Token, Symbol}, Error};
use lang::{lexer::{Lexer, TokenContent, Symbol}, Error};
#[test]
fn test_foo() {
let tokens: Result<Vec<_>, Error> = Lexer::new("1 * 5 / 3").collect();
let tokens = tokens.expect("should parse");
assert_eq!(tokens, vec![
Token::Number { radix: 10, text: "1".into() },
Token::Symbol(Symbol::Star),
Token::Number { radix: 10, text: "5".into() },
Token::Symbol(Symbol::Slash),
Token::Number { radix: 10, text: "3".into() },
TokenContent::Number { radix: 10, text: "1".into() },
TokenContent::Symbol(Symbol::Star),
TokenContent::Number { radix: 10, text: "5".into() },
TokenContent::Symbol(Symbol::Slash),
TokenContent::Number { radix: 10, text: "3".into() },
])
}