diff --git a/kalk/Cargo.toml b/kalk/Cargo.toml index cbe672e..26f7984 100644 --- a/kalk/Cargo.toml +++ b/kalk/Cargo.toml @@ -12,4 +12,5 @@ panic = "abort" [dependencies] phf = { version = "0.8", features = ["macros"] } -rug = "1.9.0" \ No newline at end of file +rug = "1.9.0" +test-case = "1.0.0" diff --git a/kalk/src/lexer.rs b/kalk/src/lexer.rs index c7722d1..970d15a 100644 --- a/kalk/src/lexer.rs +++ b/kalk/src/lexer.rs @@ -159,3 +159,74 @@ fn build(kind: TokenKind, value: &str) -> Token { fn is_valid_identifier(c: char) -> bool { c.is_alphabetic() || c == '°' || c == '√' || c == '\'' || c == '¨' || c == 'Σ' } + +#[cfg(test)] +mod tests { + use super::*; + use crate::ast::compare_enums; + use test_case::test_case; + + fn match_tokens(tokens: Vec, expected: Vec) { + let mut expected_iter = expected.iter(); + + for token in tokens { + assert!(compare_enums(&token.kind, &expected_iter.next().unwrap())); + } + } + + #[test] + fn test_token_kinds() { + let tokens = Lexer::lex("+-*/^()|=!,"); + let expected = vec![ + TokenKind::Plus, + TokenKind::Minus, + TokenKind::Star, + TokenKind::Slash, + TokenKind::Power, + TokenKind::OpenParenthesis, + TokenKind::ClosedParenthesis, + TokenKind::Pipe, + TokenKind::Equals, + TokenKind::Exclamation, + TokenKind::Comma, + TokenKind::EOF, + ]; + + match_tokens(tokens, expected); + } + + #[test_case("1")] + #[test_case("24")] + #[test_case("56.4")] + fn test_number_literal(input: &str) { + let tokens = Lexer::lex(input); + let expected = vec![TokenKind::Literal, TokenKind::EOF]; + + assert_eq!(&tokens[0].value, input); + match_tokens(tokens, expected); + } + + #[test_case("x")] + #[test_case("xy")] + fn test_identifier(input: &str) { + let tokens = Lexer::lex(input); + let expected = vec![TokenKind::Identifier, TokenKind::EOF]; + + assert_eq!(&tokens[0].value, input); + match_tokens(tokens, expected); + } + + #[test] + fn test_function_call() { + let tokens = Lexer::lex("f(x)"); + let expected = vec![ + TokenKind::Identifier, + TokenKind::OpenParenthesis, + TokenKind::Identifier, + TokenKind::ClosedParenthesis, + TokenKind::EOF, + ]; + + match_tokens(tokens, expected); + } +}