Skip to content

Commit

Permalink
Merge branch 'master' of github.com:kengorab/abra-lang into llvm2
Browse files Browse the repository at this point in the history
  • Loading branch information
kengorab committed Oct 12, 2023
2 parents dfd4ba0 + fb19518 commit 3fda77b
Show file tree
Hide file tree
Showing 11 changed files with 673 additions and 318 deletions.
38 changes: 0 additions & 38 deletions abra_cli/abra-files/example.abra
Original file line number Diff line number Diff line change
@@ -1,39 +1 @@
24
//1.00
//1.23
//false
//"hello"
//println("hello")
//println(123)
//
//type Foo<T> {
// t: T
//
// func foo(self, i: Int) {}
// func bar<U, V>(self, u: U, v: V): (U, V) {
// self.foo(1)
// (u, v)
// }
//}
//
////func q<T>(i: T) {}
////func a(i: Int) {}
////val fns = [q, a] // should be an error: Cannot use generic function q in this context yet
//
//val f = Foo(t: 1)
////val f = Foo(t: 1)
//
////f.foo(1)
////f.foo(2)
//f.bar("f", "f")
//f.bar("f", 134)
//f.bar(1.23, "f")
//f.bar(1.23, 123)

//val arr: Int[][] = [[], []]

// TODO: So should this
//func makeArray<U>(): U[] = []
//type Foo<T> {
// a: T[] = makeArray()
//}
4 changes: 3 additions & 1 deletion abra_core/src/lexer/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -470,6 +470,7 @@ impl<'a> Lexer<'a> {
',' => Ok(Some(Token::Comma(pos))),
':' => Ok(Some(Token::Colon(pos))),
'.' => Ok(Some(Token::Dot(pos))),
'@' => Ok(Some(Token::At(pos))),
_ => Ok(None)
}
}
Expand Down Expand Up @@ -602,7 +603,7 @@ mod tests {

#[test]
fn test_tokenize_separators() {
let input = "( ) [ ] { } | , : ? #{";
let input = "( ) [ ] { } | , : ? #{ @";
let tokens = tokenize(input).unwrap();
let expected = vec![
Token::LParen(Position::new(1, 1), false),
Expand All @@ -616,6 +617,7 @@ mod tests {
Token::Colon(Position::new(1, 17)),
Token::Question(Position::new(1, 19)),
Token::LBraceHash(Position::new(1, 21)),
Token::At(Position::new(1, 24)),
];
assert_eq!(expected, tokens);
}
Expand Down
5 changes: 4 additions & 1 deletion abra_core/src/lexer/tokens.rs
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,7 @@ pub enum Token {
#[strum(to_string = ".", serialize = "Dot")] Dot(Position),
#[strum(to_string = "?.", serialize = "QuestionDot")] QuestionDot(Position),
#[strum(to_string = "=>", serialize = "Arrow")] Arrow(Position),
#[strum(to_string = "@", serialize = "At")] At(Position),
}

impl Token {
Expand Down Expand Up @@ -212,7 +213,8 @@ impl Token {
Token::Question(pos) |
Token::Dot(pos) |
Token::QuestionDot(pos) |
Token::Arrow(pos) => pos
Token::Arrow(pos) |
Token::At(pos) => pos
};
pos.clone()
}
Expand Down Expand Up @@ -296,6 +298,7 @@ impl Token {
Token::Dot(pos) => Range::with_length(pos, 0),
Token::QuestionDot(pos) => Range::with_length(pos, 1),
Token::Arrow(pos) => Range::with_length(pos, 1),
Token::At(pos) => Range::with_length(pos, 0),
}
}

Expand Down
10 changes: 0 additions & 10 deletions abra_core/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,16 +74,6 @@ fn tokenize_and_parse(module_id: &ModuleId, input: &String) -> Result<ParseResul
}
}

fn tokenize_and_parse_stub(module_id: &ModuleId, input: &String) -> Result<ParseResult, Error> {
match lexer::lexer::tokenize(module_id, input) {
Err(e) => Err(Error::LexerError(e)),
Ok(tokens) => match parser::parser::parse_stub(module_id.clone(), tokens) {
Err(e) => Err(Error::ParseError(e)),
Ok(nodes) => Ok(nodes)
}
}
}

pub fn typecheck<R>(module_id: ModuleId, input: &String, loader: &mut ModuleLoader<R>) -> Result<TypedModule, Error>
where R: ModuleReader
{
Expand Down
12 changes: 12 additions & 0 deletions abra_core/src/parser/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -231,6 +231,7 @@ impl BindingPattern {

#[derive(Clone, Debug, PartialEq)]
pub struct BindingDeclNode {
pub decorators: Vec<DecoratorNode>,
pub export_token: Option<Token>,
pub binding: BindingPattern,
pub type_ann: Option<TypeIdentifier>,
Expand All @@ -253,6 +254,7 @@ pub fn args_to_parameters(raw_arg_tuple: &(Token, Option<TypeIdentifier>, bool,

#[derive(Clone, Debug, PartialEq)]
pub struct FunctionDeclNode {
pub decorators: Vec<DecoratorNode>,
pub export_token: Option<Token>,
// Must be a Token::Ident
pub name: Token,
Expand Down Expand Up @@ -284,6 +286,7 @@ impl LambdaNode {

#[derive(Clone, Debug, PartialEq)]
pub struct TypeDeclNode {
pub decorators: Vec<DecoratorNode>,
pub export_token: Option<Token>,
// Must be a Token::Ident
pub name: Token,
Expand All @@ -303,6 +306,7 @@ pub struct TypeDeclField {

#[derive(Clone, Debug, PartialEq)]
pub struct EnumDeclNode {
pub decorators: Vec<DecoratorNode>,
pub export_token: Option<Token>,
// Must be a Token::Ident
pub name: Token,
Expand Down Expand Up @@ -440,6 +444,14 @@ pub struct ImportNode {
pub module_id: ModuleId,
}

#[derive(Clone, Debug, PartialEq)]
pub struct DecoratorNode {
pub at_token: Token,
// Must be a Token::Ident
pub name: Token,
pub args: Vec<(Option<Token>, AstNode)>,
}

#[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub enum ModulePathSegment {
CurrentDir,
Expand Down
Loading

0 comments on commit 3fda77b

Please sign in to comment.