diff --git a/examples/pointer.sloth b/examples/pointer.sloth new file mode 100644 index 0000000..7d7ca10 --- /dev/null +++ b/examples/pointer.sloth @@ -0,0 +1,5 @@ +fn main() Int { + val x: Int = 0; + val xPtr: Int = *x; + val x: Int = @xPtr; +} \ No newline at end of file diff --git a/sloth/Cargo.toml b/sloth/Cargo.toml index f58f40e..044622f 100644 --- a/sloth/Cargo.toml +++ b/sloth/Cargo.toml @@ -6,8 +6,19 @@ version.workspace = true edition.workspace = true [dependencies] -llvm-sys = "150" -inkwell = { version = "0.2.0", features = ["llvm15-0"] } itertools = "0.10.5" rand = "0.8.5" thiserror = "1.0.40" + +[dependencies.llvm-sys] +version = "150" +optional = true + +[dependencies.inkwell] +version = "0.2.0" +features = ["llvm15-0"] +optional = true + +[features] +default = ["llvm"] +llvm = ["dep:llvm-sys", "dep:inkwell"] \ No newline at end of file diff --git a/sloth/src/lexer.rs b/sloth/src/lexer.rs index 0b2399f..5033c99 100644 --- a/sloth/src/lexer.rs +++ b/sloth/src/lexer.rs @@ -67,6 +67,8 @@ pub enum TokenType { GtEq, // >= GtGtEq, // >>= + At, // @ + Comma, Question, // ? @@ -158,6 +160,7 @@ impl Display for TokenType { TokenType::GtGt => ">>", TokenType::GtEq => ">=", TokenType::GtGtEq => ">>=", + TokenType::At => "@", TokenType::Comma => ",", TokenType::Question => "?", TokenType::QuestionDot => "?.", @@ -455,6 +458,8 @@ impl<'a> Iterator for Lexer<'a> { ['>', '=', ..] => self.advance_by_with(2, TokenType::GtEq), ['>', ..] => self.advance_with(TokenType::Gt), + ['@', ..] => self.advance_with(TokenType::At), + [',', ..] => self.advance_with(TokenType::Comma), ['.', '.', ..] => self.advance_by_with(2, TokenType::DotDot), @@ -533,7 +538,7 @@ mod tests { #[test] fn lex_operators() { let source = "+ ++ - * ** / % ~ += ++= -= *= **= /= %= ~= & && | || ^ = == ! !! != < << \ - <<= <= > >> >>= >= , ? ?. ?? . .. : :: ; -> =>"; + <<= <= > >> >>= >= @ , ? ?. ?? . .. : :: ; -> =>"; let tokens = Lexer::new(source).map(|it| it.tt).collect_vec(); assert_eq!(&tokens, &[ @@ -571,6 +576,7 @@ mod tests { TokenType::GtGt, TokenType::GtGtEq, TokenType::GtEq, + TokenType::At, TokenType::Comma, TokenType::Question, TokenType::QuestionDot, diff --git a/sloth/src/main.rs b/sloth/src/main.rs index 23946b2..f66a1be 100644 --- a/sloth/src/main.rs +++ b/sloth/src/main.rs @@ -7,17 +7,15 @@ )] pub mod analysis; -pub mod codegen; pub mod lexer; pub mod parser; pub mod symtable; -use std::fs::File; +#[cfg(feature = "llvm")] +pub mod codegen; + use std::{env, fs}; -use codegen::Codegen; -use inkwell::context::Context; -use inkwell::targets::FileType; use itertools::Itertools; use lexer::Lexer; use parser::AstParser; @@ -32,7 +30,7 @@ fn main() { if args.len() < 2 { println!("Sloth programming language interpreter\n"); println!("Usage: sloth "); - return; + std::process::exit(1); } // Reading source files @@ -40,7 +38,7 @@ fn main() { for path in args.iter().skip(1) { let Ok(contents) = fs::read_to_string(path) else { eprintln!("Error while reading '{path}'"); - return; + std::process::exit(1); }; source.push_str(&contents); let len = contents.lines().collect_vec().len(); @@ -52,6 +50,7 @@ fn main() { // Parsing let tokens = Lexer::new(&source).collect_vec(); + println!("{tokens:#?}"); let global_symtable = mk_symtable(); let mut ast = match AstParser::parse(tokens, global_symtable) { @@ -62,7 +61,7 @@ fn main() { args[1 + (error.line() / 1_000) as usize], error.line() % 1000 + 1, ); - return; + std::process::exit(1); } }; @@ -72,16 +71,25 @@ fn main() { args[1 + (error.line() / 1_000) as usize], error.line() % 1000 + 1, ); - return; + std::process::exit(1); } - // Generating code for module - let context = Context::create(); - let mut codegen = Codegen::new(&context, "s"); - let mut output_file = File::create("output.o").unwrap(); + // Generating code for module if LLVM enabled + #[cfg(feature = "llvm")] + { + use std::fs::File; - codegen.codegen(&ast); - codegen.write_obj(&mut output_file, FileType::Object); + use codegen::Codegen; + use inkwell::context::Context; + use inkwell::targets::FileType; + + let context = Context::create(); + let mut codegen = Codegen::new(&context, "s"); + let mut output_file = File::create("output.o").unwrap(); + + codegen.codegen(&ast); + codegen.write_obj(&mut output_file, FileType::Object); + } } fn mk_symtable() -> SymbolTable { diff --git a/sloth/src/parser/ast.rs b/sloth/src/parser/ast.rs index 7bc2c1b..a82a6df 100644 --- a/sloth/src/parser/ast.rs +++ b/sloth/src/parser/ast.rs @@ -424,6 +424,9 @@ impl Display for BinaryOp { pub enum UnaryOp { Not, Neg, + + Reference, + Dereference, } impl TryFrom for UnaryOp { @@ -434,6 +437,9 @@ impl TryFrom for UnaryOp { TokenType::Bang => Self::Not, TokenType::Minus => Self::Neg, + TokenType::Star => Self::Reference, + TokenType::At => Self::Dereference, + _ => return Err(ParsingError::InvalidOp), }; @@ -446,6 +452,9 @@ impl Display for UnaryOp { let value = match self { UnaryOp::Not => "!", UnaryOp::Neg => "-", + + UnaryOp::Reference => "*", + UnaryOp::Dereference => "@", }; write!(f, "{value}") diff --git a/sloth/src/parser/expr.rs b/sloth/src/parser/expr.rs index ac7a19e..60f1610 100644 --- a/sloth/src/parser/expr.rs +++ b/sloth/src/parser/expr.rs @@ -10,9 +10,14 @@ impl<'a> AstParser<'a> { } fn unary(&mut self) -> Result { - if !self.eof() && matches!(self.peek().tt, TokenType::Bang | TokenType::Minus) { - let oeprator_tt = self.advance().unwrap().tt.clone(); - let operator = UnaryOp::try_from(oeprator_tt)?; + if !self.eof() + && matches!( + self.peek().tt, + TokenType::Bang | TokenType::Minus | TokenType::Star | TokenType::At + ) + { + let operator_tt = self.advance().unwrap().tt.clone(); + let operator = UnaryOp::try_from(operator_tt)?; let value = self.unary()?;