diff --git a/.vscode/tasks.json b/.vscode/tasks.json index a7ad9ffd15..0398f3beb7 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -17,6 +17,21 @@ "kind": "build", "isDefault": true } + }, + { + "type": "cargo", + "label": "cargo Test", + "command": "cargo", + "args": [ + "test" + ], + "problemMatcher": [ + "$rustc" + ], + "group": { + "kind": "test", + "isDefault": true + } } ] } \ No newline at end of file diff --git a/src/bin/bin.rs b/src/bin/bin.rs index fa84b5c8a3..f7f5e36cc5 100644 --- a/src/bin/bin.rs +++ b/src/bin/bin.rs @@ -3,9 +3,8 @@ use js::syntax::lexer::Lexer; use std::fs::read_to_string; pub fn main() { - let buffer = read_to_string("test.js").unwrap(); + let buffer = read_to_string("tests/js/defineVar.js").unwrap(); let mut lexer = Lexer::new(&buffer); lexer.lex().expect("finished"); println!("{:?}", lexer.tokens); - println!("Hello World"); } diff --git a/test.js b/test.js deleted file mode 100644 index de896e4235..0000000000 --- a/test.js +++ /dev/null @@ -1 +0,0 @@ -let a = (2 * 2); \ No newline at end of file diff --git a/tests/js/defineVar.js b/tests/js/defineVar.js new file mode 100644 index 0000000000..0f504264f5 --- /dev/null +++ b/tests/js/defineVar.js @@ -0,0 +1 @@ +console.log("hello world"); \ No newline at end of file diff --git a/tests/lexer_test.rs b/tests/lexer_test.rs new file mode 100644 index 0000000000..7274220a07 --- /dev/null +++ b/tests/lexer_test.rs @@ -0,0 +1,23 @@ +extern crate js; +use js::syntax::ast::keyword::Keyword; +use js::syntax::ast::punc::Punctuator; +use js::syntax::ast::token::TokenData; +use js::syntax::lexer::Lexer; + +#[test] +/// Check basic variable definition tokens +fn check_variable_definition_tokens() { + let s = &String::from("let a = 'hello';"); + let mut lexer = Lexer::new(s); + lexer.lex().expect("finished"); + assert_eq!(lexer.tokens[0].data, TokenData::Keyword(Keyword::Let)); + assert_eq!(lexer.tokens[1].data, TokenData::Identifier("a".to_string())); + assert_eq!( + lexer.tokens[2].data, + TokenData::Punctuator(Punctuator::Assign) + ); + assert_eq!( + lexer.tokens[3].data, + TokenData::StringLiteral("hello".to_string()) + ); +}