Browse Source

Implement template literals and tagged templates (#997)

* Implement template literals and tagged templates

* Merge master into for-in

* Implement suggestions from review

* Implement suggestions from review

Co-authored-by: tofpie <tofpie@users.noreply.github.com>
pull/1069/head
tofpie 4 years ago committed by GitHub
parent
commit
f62a77d3fa
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 12
      boa/src/syntax/ast/node/mod.rs
  2. 156
      boa/src/syntax/ast/node/template/mod.rs
  3. 31
      boa/src/syntax/ast/node/template/tests.rs
  4. 10
      boa/src/syntax/lexer/cursor.rs
  5. 7
      boa/src/syntax/lexer/mod.rs
  6. 112
      boa/src/syntax/lexer/string.rs
  7. 74
      boa/src/syntax/lexer/template.rs
  8. 35
      boa/src/syntax/lexer/tests.rs
  9. 41
      boa/src/syntax/lexer/token.rs
  10. 6
      boa/src/syntax/parser/cursor/buffered_lexer/mod.rs
  11. 5
      boa/src/syntax/parser/cursor/mod.rs
  12. 13
      boa/src/syntax/parser/expression/left_hand_side/call.rs
  13. 14
      boa/src/syntax/parser/expression/left_hand_side/member.rs
  14. 1
      boa/src/syntax/parser/expression/left_hand_side/mod.rs
  15. 90
      boa/src/syntax/parser/expression/left_hand_side/template.rs
  16. 18
      boa/src/syntax/parser/expression/primary/mod.rs
  17. 104
      boa/src/syntax/parser/expression/primary/template/mod.rs
  18. 3
      boa/src/syntax/parser/function/mod.rs
  19. 3
      boa/src/syntax/parser/mod.rs
  20. 4
      test_ignore.txt

12
boa/src/syntax/ast/node/mod.rs

@ -17,6 +17,7 @@ pub mod return_smt;
pub mod spread; pub mod spread;
pub mod statement_list; pub mod statement_list;
pub mod switch; pub mod switch;
pub mod template;
pub mod throw; pub mod throw;
pub mod try_node; pub mod try_node;
@ -41,6 +42,7 @@ pub use self::{
spread::Spread, spread::Spread,
statement_list::{RcStatementList, StatementList}, statement_list::{RcStatementList, StatementList},
switch::{Case, Switch}, switch::{Case, Switch},
template::{TaggedTemplate, TemplateLit},
throw::Throw, throw::Throw,
try_node::{Catch, Finally, Try}, try_node::{Catch, Finally, Try},
}; };
@ -160,6 +162,12 @@ pub enum Node {
/// A spread (...x) statement. [More information](./spread/struct.Spread.html). /// A spread (...x) statement. [More information](./spread/struct.Spread.html).
Spread(Spread), Spread(Spread),
/// A tagged template. [More information](./template/struct.TaggedTemplate.html).
TaggedTemplate(TaggedTemplate),
/// A template literal. [More information](./template/struct.TemplateLit.html).
TemplateLit(TemplateLit),
/// A throw statement. [More information](./throw/struct.Throw.html). /// A throw statement. [More information](./throw/struct.Throw.html).
Throw(Throw), Throw(Throw),
@ -257,6 +265,8 @@ impl Node {
Self::BinOp(ref op) => Display::fmt(op, f), Self::BinOp(ref op) => Display::fmt(op, f),
Self::UnaryOp(ref op) => Display::fmt(op, f), Self::UnaryOp(ref op) => Display::fmt(op, f),
Self::Return(ref ret) => Display::fmt(ret, f), Self::Return(ref ret) => Display::fmt(ret, f),
Self::TaggedTemplate(ref template) => Display::fmt(template, f),
Self::TemplateLit(ref template) => Display::fmt(template, f),
Self::Throw(ref throw) => Display::fmt(throw, f), Self::Throw(ref throw) => Display::fmt(throw, f),
Self::Assign(ref op) => Display::fmt(op, f), Self::Assign(ref op) => Display::fmt(op, f),
Self::LetDeclList(ref decl) => Display::fmt(decl, f), Self::LetDeclList(ref decl) => Display::fmt(decl, f),
@ -309,6 +319,8 @@ impl Executable for Node {
Node::UnaryOp(ref op) => op.run(context), Node::UnaryOp(ref op) => op.run(context),
Node::New(ref call) => call.run(context), Node::New(ref call) => call.run(context),
Node::Return(ref ret) => ret.run(context), Node::Return(ref ret) => ret.run(context),
Node::TaggedTemplate(ref template) => template.run(context),
Node::TemplateLit(ref template) => template.run(context),
Node::Throw(ref throw) => throw.run(context), Node::Throw(ref throw) => throw.run(context),
Node::Assign(ref op) => op.run(context), Node::Assign(ref op) => op.run(context),
Node::VarDeclList(ref decl) => decl.run(context), Node::VarDeclList(ref decl) => decl.run(context),

156
boa/src/syntax/ast/node/template/mod.rs

@ -0,0 +1,156 @@
//! Template literal node.
use super::Node;
use crate::{builtins::Array, exec::Executable, value::Type, BoaProfiler, Context, Result, Value};
use gc::{Finalize, Trace};
#[cfg(feature = "deser")]
use serde::{Deserialize, Serialize};
use std::fmt;
#[cfg(test)]
mod tests;
/// Template literals are string literals allowing embedded expressions.
///
/// More information:
/// - [ECMAScript reference][spec]
/// - [MDN documentation][mdn]
///
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Template_literals
/// [spec]: https://tc39.es/ecma262/#sec-template-literals
#[cfg_attr(feature = "deser", derive(Serialize, Deserialize))]
#[derive(Clone, Debug, Trace, Finalize, PartialEq)]
pub struct TemplateLit {
elements: Vec<TemplateElement>,
}
impl TemplateLit {
pub fn new(elements: Vec<TemplateElement>) -> Self {
TemplateLit { elements }
}
}
impl Executable for TemplateLit {
fn run(&self, context: &mut Context) -> Result<Value> {
let _timer = BoaProfiler::global().start_event("TemplateLiteral", "exec");
let mut result = String::new();
for element in self.elements.iter() {
match element {
TemplateElement::String(s) => {
result.push_str(s);
}
TemplateElement::Expr(node) => {
let value = node.run(context)?;
let s = value.to_string(context)?;
result.push_str(&s);
}
}
}
Ok(result.into())
}
}
impl fmt::Display for TemplateLit {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "`")?;
for elt in &self.elements {
match elt {
TemplateElement::String(s) => write!(f, "{}", s)?,
TemplateElement::Expr(n) => write!(f, "${{{}}}", n)?,
}
}
write!(f, "`")
}
}
#[cfg_attr(feature = "deser", derive(Serialize, Deserialize))]
#[derive(Clone, Debug, Trace, Finalize, PartialEq)]
pub struct TaggedTemplate {
tag: Box<Node>,
raws: Vec<Box<str>>,
cookeds: Vec<Box<str>>,
exprs: Vec<Node>,
}
impl TaggedTemplate {
pub fn new(tag: Node, raws: Vec<Box<str>>, cookeds: Vec<Box<str>>, exprs: Vec<Node>) -> Self {
Self {
tag: Box::new(tag),
raws,
cookeds,
exprs,
}
}
}
impl Executable for TaggedTemplate {
fn run(&self, context: &mut Context) -> Result<Value> {
let _timer = BoaProfiler::global().start_event("TaggedTemplate", "exec");
let template_object = Array::new_array(context)?;
let raw_array = Array::new_array(context)?;
for (i, raw) in self.raws.iter().enumerate() {
raw_array.set_field(i, Value::from(raw), context)?;
}
for (i, cooked) in self.cookeds.iter().enumerate() {
template_object.set_field(i, Value::from(cooked), context)?;
}
template_object.set_field("raw", raw_array, context)?;
let (this, func) = match *self.tag {
Node::GetConstField(ref get_const_field) => {
let mut obj = get_const_field.obj().run(context)?;
if obj.get_type() != Type::Object {
obj = Value::Object(obj.to_object(context)?);
}
(
obj.clone(),
obj.get_field(get_const_field.field(), context)?,
)
}
Node::GetField(ref get_field) => {
let obj = get_field.obj().run(context)?;
let field = get_field.field().run(context)?;
(
obj.clone(),
obj.get_field(field.to_property_key(context)?, context)?,
)
}
_ => (context.global_object().clone(), self.tag.run(context)?),
};
let mut args = Vec::new();
args.push(template_object);
for expr in self.exprs.iter() {
args.push(expr.run(context)?);
}
context.call(&func, &this, &args)
}
}
impl fmt::Display for TaggedTemplate {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}`", self.tag)?;
for (raw, expr) in self.raws.iter().zip(self.exprs.iter()) {
write!(f, "{}${{{}}}", raw, expr)?;
}
write!(f, "`")
}
}
impl From<TaggedTemplate> for Node {
fn from(template: TaggedTemplate) -> Self {
Node::TaggedTemplate(template)
}
}
#[cfg_attr(feature = "deser", derive(Serialize, Deserialize))]
#[derive(Clone, Debug, Trace, Finalize, PartialEq)]
pub enum TemplateElement {
String(Box<str>),
Expr(Node),
}

31
boa/src/syntax/ast/node/template/tests.rs

@ -0,0 +1,31 @@
use crate::exec;
#[test]
fn template_literal() {
let scenario = r#"
let a = 10;
`result: ${a} and ${a+10}`;
"#;
assert_eq!(&exec(scenario), "\"result: 10 and 20\"");
}
#[test]
fn tagged_template() {
let scenario = r#"
function tag(t, ...args) {
let a = []
a = a.concat([t[0], t[1], t[2]]);
a = a.concat([t.raw[0], t.raw[1], t.raw[2]]);
a = a.concat([args[0], args[1]]);
return a
}
let a = 10;
tag`result: ${a} \x26 ${a+10}`;
"#;
assert_eq!(
&exec(scenario),
r#"[ "result: ", " & ", "", "result: ", " \x26 ", "", 10, 20 ]"#
);
}

10
boa/src/syntax/lexer/cursor.rs

@ -56,6 +56,16 @@ where
} }
} }
/// Creates a new Lexer cursor with an initial position.
#[inline]
pub(super) fn with_position(inner: R, pos: Position) -> Self {
Self {
iter: InnerIter::new(inner.bytes()),
pos,
strict_mode: false,
}
}
/// Peeks the next byte. /// Peeks the next byte.
#[inline] #[inline]
pub(super) fn peek(&mut self) -> Result<Option<u8>, Error> { pub(super) fn peek(&mut self) -> Result<Option<u8>, Error> {

7
boa/src/syntax/lexer/mod.rs

@ -281,6 +281,13 @@ impl<R> Lexer<R> {
)) ))
} }
} }
pub(crate) fn lex_template(&mut self, start: Position) -> Result<Token, Error>
where
R: Read,
{
TemplateLiteral.lex(&mut self.cursor, start)
}
} }
/// ECMAScript goal symbols. /// ECMAScript goal symbols.

112
boa/src/syntax/lexer/string.rs

@ -44,9 +44,10 @@ impl StringLiteral {
/// Terminator for the string. /// Terminator for the string.
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum StringTerminator { pub(crate) enum StringTerminator {
SingleQuote, SingleQuote,
DoubleQuote, DoubleQuote,
End,
} }
impl<R> Tokenizer<R> for StringLiteral { impl<R> Tokenizer<R> for StringLiteral {
@ -56,37 +57,48 @@ impl<R> Tokenizer<R> for StringLiteral {
{ {
let _timer = BoaProfiler::global().start_event("StringLiteral", "Lexing"); let _timer = BoaProfiler::global().start_event("StringLiteral", "Lexing");
let mut buf: Vec<u16> = Vec::new(); let (lit, span) =
unescape_string(cursor, start_pos, self.terminator, cursor.strict_mode())?;
Ok(Token::new(TokenKind::string_literal(lit), span))
}
}
pub(super) fn unescape_string<R>(
cursor: &mut Cursor<R>,
start_pos: Position,
terminator: StringTerminator,
strict_mode: bool,
) -> Result<(String, Span), Error>
where
R: Read,
{
let mut buf = Vec::new();
loop { loop {
let next_chr_start = cursor.pos(); let next_chr = cursor.next_char()?.map(char::try_from).transpose().unwrap();
let next_chr = char::try_from(cursor.next_char()?.ok_or_else(|| {
Error::from(io::Error::new(
ErrorKind::UnexpectedEof,
"unterminated string literal",
))
})?)
.unwrap();
match next_chr { match next_chr {
'\'' if self.terminator == StringTerminator::SingleQuote => { Some('\'') if terminator == StringTerminator::SingleQuote => {
break; break;
} }
'"' if self.terminator == StringTerminator::DoubleQuote => { Some('"') if terminator == StringTerminator::DoubleQuote => {
break; break;
} }
'\\' => { Some('\\') => {
let _timer = BoaProfiler::global() let _timer =
.start_event("StringLiteral - escape sequence", "Lexing"); BoaProfiler::global().start_event("StringLiteral - escape sequence", "Lexing");
let escape = cursor.next_byte()?.ok_or_else(|| { let escape = cursor.peek()?.ok_or_else(|| {
Error::from(io::Error::new( Error::from(io::Error::new(
ErrorKind::UnexpectedEof, ErrorKind::UnexpectedEof,
"unterminated escape sequence in string literal", "unterminated escape sequence in literal",
)) ))
})?; })?;
if escape != b'\n' { if escape <= 0x7f {
let _ = cursor.next_byte()?;
match escape { match escape {
b'\n' => (),
b'n' => buf.push('\n' as u16), b'n' => buf.push('\n' as u16),
b'r' => buf.push('\r' as u16), b'r' => buf.push('\r' as u16),
b't' => buf.push('\t' as u16), b't' => buf.push('\t' as u16),
@ -115,12 +127,11 @@ impl<R> Tokenizer<R> for StringLiteral {
let mut code_point_buf = Vec::with_capacity(6); let mut code_point_buf = Vec::with_capacity(6);
cursor.take_until(b'}', &mut code_point_buf)?; cursor.take_until(b'}', &mut code_point_buf)?;
let code_point_str = unsafe { let code_point_str =
str::from_utf8_unchecked(code_point_buf.as_slice()) unsafe { str::from_utf8_unchecked(code_point_buf.as_slice()) };
};
// We know this is a single unicode codepoint, convert to u32 // We know this is a single unicode codepoint, convert to u32
let code_point = u32::from_str_radix(&code_point_str, 16) let code_point =
.map_err(|_| { u32::from_str_radix(&code_point_str, 16).map_err(|_| {
Error::syntax( Error::syntax(
"malformed Unicode character escape sequence", "malformed Unicode character escape sequence",
cursor.pos(), cursor.pos(),
@ -146,8 +157,8 @@ impl<R> Tokenizer<R> for StringLiteral {
// Convert to u16 // Convert to u16
let code_point_str = str::from_utf8(&code_point_utf8_bytes) let code_point_str = str::from_utf8(&code_point_utf8_bytes)
.expect("malformed Unicode character escape sequence"); .expect("malformed Unicode character escape sequence");
let code_point = u16::from_str_radix(code_point_str, 16) let code_point =
.map_err(|_| { u16::from_str_radix(code_point_str, 16).map_err(|_| {
Error::syntax( Error::syntax(
"invalid Unicode escape sequence", "invalid Unicode escape sequence",
cursor.pos(), cursor.pos(),
@ -157,19 +168,38 @@ impl<R> Tokenizer<R> for StringLiteral {
buf.push(code_point); buf.push(code_point);
} }
} }
b'\'' | b'"' | b'\\' => buf.push(escape as u16), n if char::is_digit(char::from(n), 8) => {
_ => { if strict_mode {
let details = format!( return Err(Error::syntax(
"invalid escape sequence at line {}, column {}", "octal escape sequences are deprecated",
next_chr_start.line_number(), cursor.pos(),
next_chr_start.column_number(), ));
); }
return Err(Error::syntax(details, cursor.pos())); let mut o = char::from(n).to_digit(8).unwrap();
match cursor.peek()? {
Some(c) if char::is_digit(char::from(c), 8) => {
let _ = cursor.next_byte()?;
o = o * 8 + char::from(n).to_digit(8).unwrap();
if n <= b'3' {
match cursor.peek()? {
Some(c) if char::is_digit(char::from(c), 8) => {
let _ = cursor.next_byte();
o = o * 8 + char::from(n).to_digit(8).unwrap();
}
_ => (),
}
}
}
_ => (),
} }
buf.push(o as u16);
}
_ => buf.push(escape as u16),
}; };
} }
} }
next_ch => { Some(next_ch) => {
if next_ch.len_utf16() == 1 { if next_ch.len_utf16() == 1 {
buf.push(next_ch as u16); buf.push(next_ch as u16);
} else { } else {
@ -179,12 +209,20 @@ impl<R> Tokenizer<R> for StringLiteral {
buf.extend(code_point_bytes.iter()); buf.extend(code_point_bytes.iter());
} }
} }
None if terminator != StringTerminator::End => {
return Err(Error::from(io::Error::new(
ErrorKind::UnexpectedEof,
"unterminated string literal",
)));
}
None => {
break;
}
} }
} }
Ok(Token::new( Ok((
TokenKind::string_literal(String::from_utf16_lossy(buf.as_slice())), String::from_utf16_lossy(buf.as_slice()),
Span::new(start_pos, cursor.pos()), Span::new(start_pos, cursor.pos()),
)) ))
} }
}

74
boa/src/syntax/lexer/template.rs

@ -3,13 +3,14 @@
use super::{Cursor, Error, Tokenizer}; use super::{Cursor, Error, Tokenizer};
use crate::{ use crate::{
profiler::BoaProfiler, profiler::BoaProfiler,
syntax::lexer::string::{unescape_string, StringTerminator},
syntax::{ syntax::{
ast::{Position, Span}, ast::{Position, Span},
lexer::{Token, TokenKind}, lexer::{Token, TokenKind},
}, },
}; };
use std::convert::TryFrom;
use std::io::{self, ErrorKind, Read}; use std::io::{self, ErrorKind, Read};
use std::str;
/// Template literal lexing. /// Template literal lexing.
/// ///
@ -33,28 +34,65 @@ impl<R> Tokenizer<R> for TemplateLiteral {
let mut buf = Vec::new(); let mut buf = Vec::new();
loop { loop {
match cursor.next_byte()? { let next_chr = char::try_from(cursor.next_char()?.ok_or_else(|| {
None => { Error::from(io::Error::new(
return Err(Error::from(io::Error::new(
ErrorKind::UnexpectedEof, ErrorKind::UnexpectedEof,
"Unterminated template literal", "unterminated template literal",
))); ))
} })?)
Some(b'`') => break, // Template literal finished. .unwrap();
Some(next_byte) => buf.push(next_byte), // TODO when there is an expression inside the literal match next_chr {
} '`' => {
let raw = String::from_utf16_lossy(buf.as_slice());
let (cooked, _) = unescape_string(
&mut Cursor::with_position(raw.as_bytes(), start_pos),
start_pos,
StringTerminator::End,
true,
)?;
return Ok(Token::new(
TokenKind::template_no_substitution(raw, cooked),
Span::new(start_pos, cursor.pos()),
));
} }
'$' if cursor.peek()? == Some(b'{') => {
if let Ok(s) = str::from_utf8(buf.as_slice()) { let _ = cursor.next_byte()?;
Ok(Token::new( let raw = String::from_utf16_lossy(buf.as_slice());
TokenKind::template_literal(s), let (cooked, _) = unescape_string(
&mut Cursor::with_position(raw.as_bytes(), start_pos),
start_pos,
StringTerminator::End,
true,
)?;
return Ok(Token::new(
TokenKind::template_middle(raw, cooked),
Span::new(start_pos, cursor.pos()), Span::new(start_pos, cursor.pos()),
));
}
'\\' => {
let escape = cursor.peek()?.ok_or_else(|| {
Error::from(io::Error::new(
ErrorKind::UnexpectedEof,
"unterminated escape sequence in literal",
)) ))
})?;
buf.push('\\' as u16);
match escape {
b'`' | b'$' | b'\\' => buf.push(cursor.next_byte()?.unwrap() as u16),
_ => continue,
}
}
next_ch => {
if next_ch.len_utf16() == 1 {
buf.push(next_ch as u16);
} else { } else {
Err(Error::from(io::Error::new( let mut code_point_bytes_buf = [0u16; 2];
ErrorKind::InvalidData, let code_point_bytes = next_ch.encode_utf16(&mut code_point_bytes_buf);
"Invalid UTF-8 character in template literal",
))) buf.extend(code_point_bytes.iter());
}
}
}
} }
} }
} }

35
boa/src/syntax/lexer/tests.rs

@ -6,6 +6,7 @@ use super::token::Numeric;
use super::*; use super::*;
use super::{Error, Position}; use super::{Error, Position};
use crate::syntax::ast::Keyword; use crate::syntax::ast::Keyword;
use crate::syntax::lexer::string::{unescape_string, StringTerminator};
use std::str; use std::str;
fn span(start: (u32, u32), end: (u32, u32)) -> Span { fn span(start: (u32, u32), end: (u32, u32)) -> Span {
@ -136,7 +137,7 @@ fn check_template_literal_simple() {
assert_eq!( assert_eq!(
lexer.next().unwrap().unwrap().kind(), lexer.next().unwrap().unwrap().kind(),
&TokenKind::template_literal("I'm a template literal") &TokenKind::template_no_substitution("I'm a template literal", "I'm a template literal")
); );
} }
@ -857,6 +858,38 @@ fn unicode_escape_with_braces() {
} }
} }
#[test]
fn unicode_escape_with_braces_() {
let s = r#"\u{20ac}\u{a0}\u{a0}"#.to_string();
let mut cursor = Cursor::new(s.as_bytes());
if let Ok((s, _)) = unescape_string(
&mut cursor,
Position::new(1, 1),
StringTerminator::End,
false,
) {
assert_eq!(s, "\u{20ac}\u{a0}\u{a0}")
} else {
panic!();
}
}
#[test]
fn unescape_string_with_single_escape() {
let s = r#"\Б"#.to_string();
let mut cursor = Cursor::new(s.as_bytes());
let (s, _) = unescape_string(
&mut cursor,
Position::new(1, 1),
StringTerminator::End,
false,
)
.unwrap();
assert_eq!(s, "Б");
}
mod carriage_return { mod carriage_return {
use super::*; use super::*;

41
boa/src/syntax/lexer/token.rs

@ -125,7 +125,21 @@ pub enum TokenKind {
/// A string literal. /// A string literal.
StringLiteral(Box<str>), StringLiteral(Box<str>),
TemplateLiteral(Box<str>), /// A part of a template literal without substitution.
TemplateNoSubstitution {
/// The string as it has been entered, without processing escape sequences.
raw: Box<str>,
/// The raw string with escape sequences processed.
cooked: Box<str>,
},
/// The part of a template literal between substitutions
TemplateMiddle {
/// The string as it has been entered, without processing escape sequences.
raw: Box<str>,
/// The raw string with escape sequences processed.
cooked: Box<str>,
},
/// A regular expression, consisting of body and flags. /// A regular expression, consisting of body and flags.
RegularExpressionLiteral(Box<str>, RegExpFlags), RegularExpressionLiteral(Box<str>, RegExpFlags),
@ -206,12 +220,26 @@ impl TokenKind {
Self::StringLiteral(lit.into()) Self::StringLiteral(lit.into())
} }
/// Creates a `TemplateLiteral` token type. pub fn template_middle<R, C>(raw: R, cooked: C) -> Self
pub fn template_literal<S>(lit: S) -> Self
where where
S: Into<Box<str>>, R: Into<Box<str>>,
C: Into<Box<str>>,
{ {
Self::TemplateLiteral(lit.into()) Self::TemplateMiddle {
raw: raw.into(),
cooked: cooked.into(),
}
}
pub fn template_no_substitution<R, C>(raw: R, cooked: C) -> Self
where
R: Into<Box<str>>,
C: Into<Box<str>>,
{
Self::TemplateNoSubstitution {
raw: raw.into(),
cooked: cooked.into(),
}
} }
/// Creates a `RegularExpressionLiteral` token kind. /// Creates a `RegularExpressionLiteral` token kind.
@ -247,7 +275,8 @@ impl Display for TokenKind {
Self::NumericLiteral(Numeric::BigInt(ref num)) => write!(f, "{}n", num), Self::NumericLiteral(Numeric::BigInt(ref num)) => write!(f, "{}n", num),
Self::Punctuator(ref punc) => write!(f, "{}", punc), Self::Punctuator(ref punc) => write!(f, "{}", punc),
Self::StringLiteral(ref lit) => write!(f, "{}", lit), Self::StringLiteral(ref lit) => write!(f, "{}", lit),
Self::TemplateLiteral(ref lit) => write!(f, "{}", lit), Self::TemplateNoSubstitution { ref cooked, .. } => write!(f, "{}", cooked),
Self::TemplateMiddle { ref cooked, .. } => write!(f, "{}", cooked),
Self::RegularExpressionLiteral(ref body, ref flags) => write!(f, "/{}/{}", body, flags), Self::RegularExpressionLiteral(ref body, ref flags) => write!(f, "/{}/{}", body, flags),
Self::LineTerminator => write!(f, "line terminator"), Self::LineTerminator => write!(f, "line terminator"),
Self::Comment => write!(f, "comment"), Self::Comment => write!(f, "comment"),

6
boa/src/syntax/parser/cursor/buffered_lexer/mod.rs

@ -84,6 +84,12 @@ where
self.lexer.lex_slash_token(start).map_err(|e| e.into()) self.lexer.lex_slash_token(start).map_err(|e| e.into())
} }
/// Lexes the next tokens as template middle or template tail assuming that the starting
/// '}' has already been consumed.
pub(super) fn lex_template(&mut self, start: Position) -> Result<Token, ParseError> {
self.lexer.lex_template(start).map_err(ParseError::from)
}
#[inline] #[inline]
pub(super) fn strict_mode(&self) -> bool { pub(super) fn strict_mode(&self) -> bool {
self.lexer.strict_mode() self.lexer.strict_mode()

5
boa/src/syntax/parser/cursor/mod.rs

@ -46,6 +46,11 @@ where
self.buffered_lexer.lex_regex(start) self.buffered_lexer.lex_regex(start)
} }
#[inline]
pub(super) fn lex_template(&mut self, start: Position) -> Result<Token, ParseError> {
self.buffered_lexer.lex_template(start)
}
#[inline] #[inline]
pub(super) fn next(&mut self) -> Result<Option<Token>, ParseError> { pub(super) fn next(&mut self) -> Result<Option<Token>, ParseError> {
self.buffered_lexer.next(true) self.buffered_lexer.next(true)

13
boa/src/syntax/parser/expression/left_hand_side/call.rs

@ -19,8 +19,8 @@ use crate::{
}, },
lexer::TokenKind, lexer::TokenKind,
parser::{ parser::{
expression::Expression, AllowAwait, AllowYield, Cursor, ParseError, ParseResult, expression::{left_hand_side::template::TaggedTemplateLiteral, Expression},
TokenParser, AllowAwait, AllowYield, Cursor, ParseError, ParseResult, TokenParser,
}, },
}, },
BoaProfiler, BoaProfiler,
@ -112,6 +112,15 @@ where
cursor.expect(Punctuator::CloseBracket, "call expression")?; cursor.expect(Punctuator::CloseBracket, "call expression")?;
lhs = GetField::new(lhs, idx).into(); lhs = GetField::new(lhs, idx).into();
} }
TokenKind::TemplateNoSubstitution { .. } | TokenKind::TemplateMiddle { .. } => {
lhs = TaggedTemplateLiteral::new(
self.allow_yield,
self.allow_await,
tok.span().start(),
lhs,
)
.parse(cursor)?;
}
_ => break, _ => break,
} }
} }

14
boa/src/syntax/parser/expression/left_hand_side/member.rs

@ -17,7 +17,10 @@ use crate::{
}, },
lexer::TokenKind, lexer::TokenKind,
parser::{ parser::{
expression::{primary::PrimaryExpression, Expression}, expression::{
left_hand_side::template::TaggedTemplateLiteral, primary::PrimaryExpression,
Expression,
},
AllowAwait, AllowYield, Cursor, ParseError, ParseResult, TokenParser, AllowAwait, AllowYield, Cursor, ParseError, ParseResult, TokenParser,
}, },
}, },
@ -105,6 +108,15 @@ where
cursor.expect(Punctuator::CloseBracket, "member expression")?; cursor.expect(Punctuator::CloseBracket, "member expression")?;
lhs = GetField::new(lhs, idx).into(); lhs = GetField::new(lhs, idx).into();
} }
TokenKind::TemplateNoSubstitution { .. } | TokenKind::TemplateMiddle { .. } => {
lhs = TaggedTemplateLiteral::new(
self.allow_yield,
self.allow_await,
tok.span().start(),
lhs,
)
.parse(cursor)?;
}
_ => break, _ => break,
} }
} }

1
boa/src/syntax/parser/expression/left_hand_side/mod.rs

@ -10,6 +10,7 @@
mod arguments; mod arguments;
mod call; mod call;
mod member; mod member;
mod template;
use self::{call::CallExpression, member::MemberExpression}; use self::{call::CallExpression, member::MemberExpression};
use crate::{ use crate::{

90
boa/src/syntax/parser/expression/left_hand_side/template.rs

@ -0,0 +1,90 @@
use crate::{
profiler::BoaProfiler,
syntax::{
ast::node::TaggedTemplate,
ast::{Node, Position, Punctuator},
lexer::TokenKind,
parser::{
cursor::Cursor, expression::Expression, AllowAwait, AllowYield, ParseError,
ParseResult, TokenParser,
},
},
};
use std::io::Read;
/// Parses a tagged template.
///
/// More information:
/// - [ECMAScript specification][spec]
///
/// [spec]: https://tc39.es/ecma262/#prod-TemplateLiteral
#[derive(Debug, Clone)]
pub(super) struct TaggedTemplateLiteral {
allow_yield: AllowYield,
allow_await: AllowAwait,
start: Position,
tag: Node,
}
impl TaggedTemplateLiteral {
/// Creates a new `TaggedTemplateLiteral` parser.
pub(super) fn new<Y, A>(allow_yield: Y, allow_await: A, start: Position, tag: Node) -> Self
where
Y: Into<AllowYield>,
A: Into<AllowAwait>,
{
Self {
allow_yield: allow_yield.into(),
allow_await: allow_await.into(),
start,
tag,
}
}
}
impl<R> TokenParser<R> for TaggedTemplateLiteral
where
R: Read,
{
type Output = Node;
fn parse(self, cursor: &mut Cursor<R>) -> ParseResult {
let _timer = BoaProfiler::global().start_event("TaggedTemplateLiteral", "Parsing");
let mut raws = Vec::new();
let mut cookeds = Vec::new();
let mut exprs = Vec::new();
let mut token = cursor.next()?.ok_or(ParseError::AbruptEnd)?;
loop {
match token.kind() {
TokenKind::TemplateMiddle { raw, cooked } => {
raws.push(raw.clone());
cookeds.push(cooked.clone());
exprs.push(
Expression::new(true, self.allow_yield, self.allow_await).parse(cursor)?,
);
cursor.expect(
TokenKind::Punctuator(Punctuator::CloseBlock),
"template literal",
)?;
}
TokenKind::TemplateNoSubstitution { raw, cooked } => {
raws.push(raw.clone());
cookeds.push(cooked.clone());
return Ok(Node::from(TaggedTemplate::new(
self.tag, raws, cookeds, exprs,
)));
}
_ => {
return Err(ParseError::general(
"cannot parse tagged template literal",
self.start,
))
}
}
token = cursor.lex_template(self.start)?;
}
}
}

18
boa/src/syntax/parser/expression/primary/mod.rs

@ -11,6 +11,7 @@ mod array_initializer;
mod async_function_expression; mod async_function_expression;
mod function_expression; mod function_expression;
mod object_initializer; mod object_initializer;
mod template;
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;
@ -27,7 +28,10 @@ use crate::{
Const, Keyword, Punctuator, Const, Keyword, Punctuator,
}, },
lexer::{token::Numeric, InputElement, TokenKind}, lexer::{token::Numeric, InputElement, TokenKind},
parser::{AllowAwait, AllowYield, Cursor, ParseError, ParseResult, TokenParser}, parser::{
expression::primary::template::TemplateLiteral, AllowAwait, AllowYield, Cursor,
ParseError, ParseResult, TokenParser,
},
}, },
}; };
pub(in crate::syntax::parser) use object_initializer::Initializer; pub(in crate::syntax::parser) use object_initializer::Initializer;
@ -103,7 +107,9 @@ where
TokenKind::BooleanLiteral(boolean) => Ok(Const::from(*boolean).into()), TokenKind::BooleanLiteral(boolean) => Ok(Const::from(*boolean).into()),
TokenKind::NullLiteral => Ok(Const::Null.into()), TokenKind::NullLiteral => Ok(Const::Null.into()),
TokenKind::Identifier(ident) => Ok(Identifier::from(ident.as_ref()).into()), // TODO: IdentifierReference TokenKind::Identifier(ident) => Ok(Identifier::from(ident.as_ref()).into()), // TODO: IdentifierReference
TokenKind::StringLiteral(s) => Ok(Const::from(s.as_ref()).into()), TokenKind::StringLiteral(s) | TokenKind::TemplateNoSubstitution { cooked: s, .. } => {
Ok(Const::from(s.as_ref()).into())
}
TokenKind::NumericLiteral(Numeric::Integer(num)) => Ok(Const::from(*num).into()), TokenKind::NumericLiteral(Numeric::Integer(num)) => Ok(Const::from(*num).into()),
TokenKind::NumericLiteral(Numeric::Rational(num)) => Ok(Const::from(*num).into()), TokenKind::NumericLiteral(Numeric::Rational(num)) => Ok(Const::from(*num).into()),
TokenKind::NumericLiteral(Numeric::BigInt(num)) => Ok(Const::from(num.clone()).into()), TokenKind::NumericLiteral(Numeric::BigInt(num)) => Ok(Const::from(num.clone()).into()),
@ -132,6 +138,14 @@ where
Err(ParseError::unexpected(tok, "regular expression literal")) Err(ParseError::unexpected(tok, "regular expression literal"))
} }
} }
TokenKind::TemplateMiddle { cooked, .. } => TemplateLiteral::new(
self.allow_yield,
self.allow_await,
tok.span().start(),
cooked.as_ref(),
)
.parse(cursor)
.map(Node::TemplateLit),
_ => Err(ParseError::unexpected(tok.clone(), "primary expression")), _ => Err(ParseError::unexpected(tok.clone(), "primary expression")),
} }
} }

104
boa/src/syntax/parser/expression/primary/template/mod.rs

@ -0,0 +1,104 @@
//! Template literal parsing.
//!
//! More information:
//! - [MDN documentation][mdn]
//! - [ECMAScript specification][spec]
//!
//! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Template_literals
//! [spec]: https://tc39.es/ecma262/#sec-template-literals
use crate::{
profiler::BoaProfiler,
syntax::{
ast::node::template::{TemplateElement, TemplateLit},
ast::Position,
ast::Punctuator,
lexer::TokenKind,
parser::cursor::Cursor,
parser::expression::Expression,
parser::{AllowAwait, AllowYield, ParseError, TokenParser},
},
};
use std::io::Read;
/// Parses a template literal.
///
/// More information:
/// - [MDN documentation][mdn]
/// - [ECMAScript specification][spec]
///
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Template_literals
/// [spec]: https://tc39.es/ecma262/#prod-TemplateLiteral
#[derive(Debug, Clone)]
pub(super) struct TemplateLiteral {
allow_yield: AllowYield,
allow_await: AllowAwait,
start: Position,
first: String,
}
impl TemplateLiteral {
/// Creates a new `TemplateLiteral` parser.
pub(super) fn new<Y, A>(allow_yield: Y, allow_await: A, start: Position, first: &str) -> Self
where
Y: Into<AllowYield>,
A: Into<AllowAwait>,
{
Self {
allow_yield: allow_yield.into(),
allow_await: allow_await.into(),
start,
first: first.to_owned(),
}
}
}
impl<R> TokenParser<R> for TemplateLiteral
where
R: Read,
{
type Output = TemplateLit;
fn parse(self, cursor: &mut Cursor<R>) -> Result<Self::Output, ParseError> {
let _timer = BoaProfiler::global().start_event("TemplateLiteral", "Parsing");
let mut elements = Vec::new();
elements.push(TemplateElement::String(self.first.into_boxed_str()));
elements.push(TemplateElement::Expr(
Expression::new(true, self.allow_yield, self.allow_await).parse(cursor)?,
));
cursor.expect(
TokenKind::Punctuator(Punctuator::CloseBlock),
"template literal",
)?;
loop {
match cursor.lex_template(self.start)?.kind() {
TokenKind::TemplateMiddle {
cooked: template, ..
} => {
elements.push(TemplateElement::String(template.to_owned()));
elements.push(TemplateElement::Expr(
Expression::new(true, self.allow_yield, self.allow_await).parse(cursor)?,
));
cursor.expect(
TokenKind::Punctuator(Punctuator::CloseBlock),
"template literal",
)?;
}
TokenKind::TemplateNoSubstitution {
cooked: template, ..
} => {
elements.push(TemplateElement::String(template.to_owned()));
return Ok(TemplateLit::new(elements));
}
_ => {
return Err(ParseError::general(
"cannot parse template literal",
self.start,
))
}
}
}
}
}

3
boa/src/syntax/parser/function/mod.rs

@ -269,7 +269,8 @@ where
TokenKind::Punctuator(Punctuator::CloseBlock) => { TokenKind::Punctuator(Punctuator::CloseBlock) => {
return Ok(Vec::new().into()); return Ok(Vec::new().into());
} }
TokenKind::StringLiteral(string) | TokenKind::TemplateLiteral(string) => { TokenKind::StringLiteral(string)
| TokenKind::TemplateNoSubstitution { cooked: string, .. } => {
if string == &"use strict".into() { if string == &"use strict".into() {
cursor.set_strict_mode(true); cursor.set_strict_mode(true);
} }

3
boa/src/syntax/parser/mod.rs

@ -125,7 +125,8 @@ where
match cursor.peek(0)? { match cursor.peek(0)? {
Some(tok) => { Some(tok) => {
match tok.kind() { match tok.kind() {
TokenKind::StringLiteral(string) | TokenKind::TemplateLiteral(string) => { TokenKind::StringLiteral(string)
| TokenKind::TemplateNoSubstitution { cooked: string, .. } => {
if string.as_ref() == "use strict" { if string.as_ref() == "use strict" {
cursor.set_strict_mode(true); cursor.set_strict_mode(true);
} }

4
test_ignore.txt

@ -54,3 +54,7 @@ S15.1.3.4_A1.3_T1
// This one seems to terminate the process somehow: // This one seems to terminate the process somehow:
arg-length-near-integer-limit arg-length-near-integer-limit
// These generate a stack overflow
tco-call
tco-member

Loading…
Cancel
Save