From 686a7368e3ef06a3c3df5b85576ceb4990f1926e Mon Sep 17 00:00:00 2001 From: jasonwilliams Date: Wed, 15 Aug 2018 19:46:41 +0100 Subject: [PATCH] removing T from token --- src/lib/syntax/ast/token.rs | 40 ++++++++++++++++++------------------- src/lib/syntax/lexer.rs | 26 ++++++++++++------------ 2 files changed, 33 insertions(+), 33 deletions(-) diff --git a/src/lib/syntax/ast/token.rs b/src/lib/syntax/ast/token.rs index 0685c78769..a5693a3f2e 100644 --- a/src/lib/syntax/ast/token.rs +++ b/src/lib/syntax/ast/token.rs @@ -25,40 +25,40 @@ impl Token { /// Represents the type of Token pub enum TokenData { /// A boolean literal, which is either `true` or `false` - TBooleanLiteral(bool), + BooleanLiteral(bool), /// The end of the file - TEOF, + EOF, /// An identifier - TIdentifier(String), + Identifier(String), /// A keyword - TKeyword(Keyword), + Keyword(Keyword), /// A `null` literal - TNullLiteral, + NullLiteral, /// A numeric literal - TNumericLiteral(f64), + NumericLiteral(f64), /// A piece of punctuation - TPunctuator(Punctuator), + Punctuator(Punctuator), /// A string literal - TStringLiteral(String), + StringLiteral(String), /// A regular expression - TRegularExpression(String), + RegularExpression(String), /// A comment - TComment(String), + Comment(String), } impl Display for TokenData { fn fmt(&self, f: &mut Formatter) -> Result { match self.clone() { - TokenData::TBooleanLiteral(val) => write!(f, "{}", val), - TokenData::TEOF => write!(f, "end of file"), - TokenData::TIdentifier(ident) => write!(f, "{}", ident), - TokenData::TKeyword(word) => write!(f, "{}", word), - TokenData::TNullLiteral => write!(f, "null"), - TokenData::TNumericLiteral(num) => write!(f, "{}", num), - TokenData::TPunctuator(punc) => write!(f, "{}", punc), - TokenData::TStringLiteral(lit) => write!(f, "{}", lit), - TokenData::TRegularExpression(reg) => write!(f, "{}", reg), - TokenData::TComment(comm) => write!(f, "/*{}*/", comm), + TokenData::BooleanLiteral(val) => write!(f, "{}", val), + TokenData::EOF => write!(f, "end of file"), + TokenData::Identifier(ident) => write!(f, "{}", ident), + TokenData::Keyword(word) => write!(f, "{}", word), + TokenData::NullLiteral => write!(f, "null"), + TokenData::NumericLiteral(num) => write!(f, "{}", num), + TokenData::Punctuator(punc) => write!(f, "{}", punc), + TokenData::StringLiteral(lit) => write!(f, "{}", lit), + TokenData::RegularExpression(reg) => write!(f, "{}", reg), + TokenData::Comment(comm) => write!(f, "/*{}*/", comm), } } } diff --git a/src/lib/syntax/lexer.rs b/src/lib/syntax/lexer.rs index dcae66467a..b0980c058a 100644 --- a/src/lib/syntax/lexer.rs +++ b/src/lib/syntax/lexer.rs @@ -68,7 +68,7 @@ impl<'a> Lexer<'a> { /// Push a punctuation token fn push_punc(&mut self, punc: Punctuator) { - self.push_token(TokenData::TPunctuator(punc)); + self.push_token(TokenData::Punctuator(punc)); } fn next(&mut self) -> Result { @@ -186,7 +186,7 @@ impl<'a> Lexer<'a> { ch => buf.push(ch), } } - self.push_token(TokenData::TStringLiteral(buf)); + self.push_token(TokenData::StringLiteral(buf)); } '0' => { let mut buf = String::new(); @@ -218,7 +218,7 @@ impl<'a> Lexer<'a> { } u64::from_str_radix(&buf, 8).unwrap() }; - self.push_token(TokenData::TNumericLiteral(num as f64)) + self.push_token(TokenData::NumericLiteral(num as f64)) } _ if ch.is_digit(10) => { let mut buf = ch.to_string(); @@ -235,7 +235,7 @@ impl<'a> Lexer<'a> { } } // TODO make this a bit more safe -------------------------------VVVV - self.push_token(TokenData::TNumericLiteral(f64::from_str(&buf).unwrap())) + self.push_token(TokenData::NumericLiteral(f64::from_str(&buf).unwrap())) } _ if ch.is_alphabetic() || ch == '$' || ch == '_' => { let mut buf = ch.to_string(); @@ -253,12 +253,12 @@ impl<'a> Lexer<'a> { // Match won't compare &String to &str so i need to convert it :( let buf_compare: &str = &buf; self.push_token(match buf_compare { - "true" => TokenData::TBooleanLiteral(true), - "false" => TokenData::TBooleanLiteral(false), - "null" => TokenData::TNullLiteral, + "true" => TokenData::BooleanLiteral(true), + "false" => TokenData::BooleanLiteral(false), + "null" => TokenData::NullLiteral, slice => match FromStr::from_str(slice) { - Ok(keyword) => TokenData::TKeyword(keyword), - Err(_) => TokenData::TIdentifier(buf.clone()), + Ok(keyword) => TokenData::Keyword(keyword), + Err(_) => TokenData::Identifier(buf.clone()), }, }); } @@ -278,7 +278,7 @@ impl<'a> Lexer<'a> { // Matched comment '/' => { let comment = self.read_line()?; - TokenData::TComment(comment) + TokenData::Comment(comment) } '*' => { let mut buf = String::new(); @@ -294,10 +294,10 @@ impl<'a> Lexer<'a> { ch => buf.push(ch), } } - TokenData::TComment(buf) + TokenData::Comment(buf) } - '=' => TokenData::TPunctuator(Punctuator::AssignDiv), - _ => TokenData::TPunctuator(Punctuator::Div), + '=' => TokenData::Punctuator(Punctuator::AssignDiv), + _ => TokenData::Punctuator(Punctuator::Div), }; self.push_token(token) }