diff --git a/src/bin/bin.rs b/src/bin/bin.rs index 5f2dbe7dc1..cdc8482941 100644 --- a/src/bin/bin.rs +++ b/src/bin/bin.rs @@ -1,6 +1,12 @@ +extern crate js; +use js::syntax::lexer::Lexer; use std::fs::read_to_string; +use std::fs::File; +use std::io::BufReader; pub fn main() { - let buffer = read_to_string("test.js").unwrap(); - println!("{}", buffer); -} \ No newline at end of file + let mut f = File::open("test.js").expect("file not found"); + let mut reader = BufReader::new(f); + let mut lexer = Lexer::new(reader); + lexer.lex() +} diff --git a/src/lib/syntax/lexer.rs b/src/lib/syntax/lexer.rs index 1373617d63..550f2772b0 100644 --- a/src/lib/syntax/lexer.rs +++ b/src/lib/syntax/lexer.rs @@ -1,20 +1,22 @@ +use std::io::{BufRead, BufReader, ErrorKind}; +use std::str::Chars; use syntax::ast::punc::Punctuator; use syntax::ast::token::{Token, TokenData}; /// A javascript Lexer -pub struct Lexer { +pub struct Lexer { // The list fo tokens generated so far pub tokens: Vec, // The current line number in the script line_number: u64, // the current column number in the script column_number: u64, - // the reader - buffer: String, + // The full string + buffer: B, } -impl Lexer { - pub fn new(buffer: String) -> Lexer { +impl Lexer { + pub fn new(buffer: B) -> Lexer { Lexer { tokens: Vec::new(), line_number: 1, @@ -33,22 +35,17 @@ impl Lexer { self.push_token(TokenData::TPunctuator(punc)); } - /// Processes an input stream from a string into an array of tokens - pub fn lex_str(script: String) -> Vec { - let mut lexer = Lexer::new(script); - lexer.tokens + fn next(&mut self) -> char { + let mut buffer = [0; 1]; + self.buffer.read_exact(&mut buffer).unwrap(); + let result = buffer as char; + result } - fn next(&mut self) -> Option { - self.buffer.chars().next() - } - - pub fn lex(&mut self) -> Result<(), &str> { + pub fn lex(&mut self) { loop { - let ch = match self.next() { - Some(ch) => ch, - None => return Err("oh my days"), - }; + let ch = self.next(); + println!("{}", ch.unwrap()); } } }