Skip to content

Commit

Permalink
js: Add error message to syntax error
Browse files Browse the repository at this point in the history
  • Loading branch information
simonwuelker committed May 18, 2024
1 parent a268768 commit 5cfad36
Show file tree
Hide file tree
Showing 14 changed files with 88 additions and 64 deletions.
32 changes: 21 additions & 11 deletions crates/js/src/parser/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,25 +3,32 @@ use sl_std::slice::SubsliceOffset;
/// A pointer the the location of a syntax error in a script
///
/// The offset is in bytes
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct SyntaxError(usize);
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct SyntaxError {
position: usize,
message: String,
}

impl SyntaxError {
#[must_use]
pub const fn from_position(position: usize) -> Self {
Self(position)
pub const fn new(position: usize, message: String) -> Self {
Self { position, message }
}

pub fn get_context<'a>(&self, context: &'a str) -> ErrorContext<'a> {
pub fn get_context<'source_code, 'error>(
&'error self,
context: &'source_code str,
) -> ErrorContext<'source_code, 'error> {
for line in context.lines() {
let byte_range = context
.subslice_range(line)
.expect("Line is not a reference to the source string");

if byte_range.contains(self.0) {
if byte_range.contains(self.position) {
return ErrorContext {
line,
offset_in_line: self.0 - byte_range.start(),
offset_in_line: self.position - byte_range.start(),
message: &self.message,
};
}
}
Expand All @@ -31,20 +38,23 @@ impl SyntaxError {
ErrorContext {
line: last_line,
offset_in_line: last_line.len(),
message: &self.message,
}
}
}

/// A [SyntaxError] with added source code annotations for more context
#[derive(Clone, Copy, Debug)]
pub struct ErrorContext<'a> {
pub line: &'a str,
pub struct ErrorContext<'source_code, 'error> {
pub line: &'source_code str,
pub offset_in_line: usize,
pub message: &'error str,
}

impl<'a> ErrorContext<'a> {
impl<'source_code, 'error> ErrorContext<'source_code, 'error> {
pub fn dump(&self) {
println!("Error Context:");
println!("* {}", self.line);
println!("* {}^", " ".repeat(self.offset_in_line))
println!("* {}^ {}", " ".repeat(self.offset_in_line), self.message)
}
}
7 changes: 4 additions & 3 deletions crates/js/src/parser/expressions/assignment_expression.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ impl AssignmentExpression {
tokenizer: &mut Tokenizer<'_>,
) -> Result<Expression, SyntaxError> {
let Some(next_token) = tokenizer.peek(0, SkipLineTerminators::Yes)? else {
return Err(tokenizer.syntax_error());
return Err(tokenizer.syntax_error("expected more tokens"));
};

match next_token {
Expand All @@ -40,17 +40,18 @@ impl AssignmentExpression {
_ => {},
}

// This works because every LeftHandSideExpression is also a valid ConditionalExpression
let conditional_expression = ConditionalExpression::parse::<IN, YIELD, AWAIT>(tokenizer)?;

let next_token = tokenizer.peek(0, SkipLineTerminators::Yes)?;
log::info!("next token: {next_token:?}");

if let Some(operator) = next_token.and_then(AssignmentOp::from_token) {
tokenizer.advance(1);

let Some(lhs) =
AssignmentTarget::from_expression(conditional_expression, tokenizer.is_strict())
else {
return Err(tokenizer.syntax_error());
return Err(tokenizer.syntax_error("expression is not a valid assignment target"));
};

let rhs = AssignmentExpression::parse::<IN, YIELD, AWAIT>(tokenizer)?;
Expand Down
2 changes: 1 addition & 1 deletion crates/js/src/parser/expressions/binary_expression.rs
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ pub fn parse_exponentiation_expression<const YIELD: bool, const AWAIT: bool>(
// NOTE: This function cannot be defined with the macro above since it can contain either UpdateExpressions
// or UnaryExpressions
let Some(next_token) = tokenizer.peek(0, SkipLineTerminators::Yes)? else {
return Err(tokenizer.syntax_error());
return Err(tokenizer.syntax_error("expected more tokens"));
};

let is_unary_expression = match next_token {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ pub fn parse_lefthandside_expression<const YIELD: bool, const AWAIT: bool>(
tokenizer: &mut Tokenizer<'_>,
) -> Result<Expression, SyntaxError> {
let Some(next_token) = tokenizer.peek(0, SkipLineTerminators::Yes)? else {
return Err(tokenizer.syntax_error());
return Err(tokenizer.syntax_error("expected more tokens"));
};

let lhs_expression = match next_token {
Expand Down
4 changes: 2 additions & 2 deletions crates/js/src/parser/expressions/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ fn parse_primary_expression<const YIELD: bool, const AWAIT: bool>(
tokenizer: &mut Tokenizer<'_>,
) -> Result<Expression, SyntaxError> {
let Some(next_token) = tokenizer.peek(0, SkipLineTerminators::Yes)? else {
return Err(tokenizer.syntax_error());
return Err(tokenizer.syntax_error("expected more tokens"));
};

let primary_expression = match next_token {
Expand Down Expand Up @@ -87,7 +87,7 @@ fn parse_primary_expression<const YIELD: bool, const AWAIT: bool>(
let object_literal = ObjectLiteral::parse::<YIELD, AWAIT>(tokenizer)?;
object_literal.into()
},
_ => return Err(tokenizer.syntax_error()),
_ => return Err(tokenizer.syntax_error("failed to parse primary expression")),
};

Ok(primary_expression)
Expand Down
9 changes: 2 additions & 7 deletions crates/js/src/parser/expressions/object.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,13 +27,8 @@ impl PropertyDefinition {
pub fn parse<const YIELD: bool, const AWAIT: bool>(
tokenizer: &mut Tokenizer<'_>,
) -> Result<Self, SyntaxError> {
let property_definition = if let Ok(identifier_reference) =
parse_identifier_reference::<YIELD, AWAIT>(tokenizer)
{
Self::IdentifierRef(identifier_reference)
} else {
return Err(tokenizer.syntax_error());
};
let property_definition =
parse_identifier_reference::<YIELD, AWAIT>(tokenizer).map(Self::IdentifierRef)?;

Ok(property_definition)
}
Expand Down
2 changes: 1 addition & 1 deletion crates/js/src/parser/expressions/unary_expression.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ impl UnaryExpression {
tokenizer: &mut Tokenizer<'_>,
) -> Result<Expression, SyntaxError> {
let Some(next_token) = tokenizer.peek(0, SkipLineTerminators::Yes)? else {
return Err(tokenizer.syntax_error());
return Err(tokenizer.syntax_error("expected more tokens"));
};

let unary_expression = match next_token {
Expand Down
2 changes: 1 addition & 1 deletion crates/js/src/parser/expressions/update_expression.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ impl UpdateExpression {
tokenizer: &mut Tokenizer<'_>,
) -> Result<Expression, SyntaxError> {
let Some(next_token) = tokenizer.peek(0, SkipLineTerminators::Yes)? else {
return Err(tokenizer.syntax_error());
return Err(tokenizer.syntax_error("expected more tokens"));
};

let update_expression = match next_token {
Expand Down
27 changes: 18 additions & 9 deletions crates/js/src/parser/identifiers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,19 +50,22 @@ pub(crate) fn parse_binding_identifier<const YIELD: bool, const AWAIT: bool>(
tokenizer: &mut Tokenizer<'_>,
) -> Result<String, SyntaxError> {
let Some(Token::Identifier(identifier)) = tokenizer.next(SkipLineTerminators::Yes)? else {
return Err(tokenizer.syntax_error());
return Err(tokenizer.syntax_error("expected identifier"));
};

if !YIELD && identifier.as_str() == "yield" {
return Err(tokenizer.syntax_error());
return Err(tokenizer.syntax_error("\"yield\" is not a valid identifier here"));
}

if !AWAIT && identifier.as_str() == "await" {
return Err(tokenizer.syntax_error());
return Err(tokenizer.syntax_error("\"await\" is not a valid identifier here"));
}

if tokenizer.is_strict() && matches!(identifier.as_str(), "arguments" | "eval") {
return Err(tokenizer.syntax_error());
return Err(tokenizer.syntax_error(format!(
"{:?} is not a valid identifier here",
identifier.as_str()
)));
}

Ok(identifier)
Expand All @@ -89,21 +92,27 @@ impl Identifier {
pub(crate) fn parse(tokenizer: &mut Tokenizer<'_>) -> Result<Self, SyntaxError> {
let Some(Token::Identifier(identifier_name)) = tokenizer.next(SkipLineTerminators::Yes)?
else {
return Err(tokenizer.syntax_error());
return Err(tokenizer.syntax_error("expected identifier"));
};

if RESERVED_WORDS.contains(&identifier_name.as_str()) {
return Err(tokenizer.syntax_error());
return Err(
tokenizer.syntax_error(format!("{:?} is a reserved identifier", identifier_name))
);
}

if tokenizer.is_strict()
&& DISALLOWED_IDENTIFIERS_IN_STRICT_MODE.contains(&identifier_name.as_str())
{
return Err(tokenizer.syntax_error());
return Err(tokenizer.syntax_error(format!(
"{:?} is a not allowed as an identifier in strict mode",
identifier_name
)));
}

if tokenizer.goal_symbol() == GoalSymbol::Module && identifier_name.as_str() == "await" {
return Err(tokenizer.syntax_error());
return Err(tokenizer
.syntax_error("\"await\" is a disallowed identifier when parsing a module"));
}

Ok(Self(identifier_name))
Expand All @@ -129,6 +138,6 @@ pub(crate) fn parse_identifier_reference<const YIELD: bool, const AWAIT: bool>(

Identifier::parse(tokenizer).map(|i| i.0)
} else {
Err(tokenizer.syntax_error())
Err(tokenizer.syntax_error("expected identifier"))
}
}
2 changes: 1 addition & 1 deletion crates/js/src/parser/literals.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ impl Literal {
Some(Token::NumericLiteral(numeric_literal)) => {
Self::NumericLiteral(Number::new(f64::from(numeric_literal)))
},
_ => return Err(tokenizer.syntax_error()),
_ => return Err(tokenizer.syntax_error("expected literal token")),
};

Ok(literal)
Expand Down
16 changes: 8 additions & 8 deletions crates/js/src/parser/statements_and_declarations/declaration.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ impl Declaration {
tokenizer: &mut Tokenizer<'_>,
) -> Result<Self, SyntaxError> {
let Some(next_token) = tokenizer.peek(0, SkipLineTerminators::Yes)? else {
return Err(tokenizer.syntax_error());
return Err(tokenizer.syntax_error("expected more tokens"));
};

let declaration = match next_token {
Expand All @@ -34,7 +34,7 @@ impl Declaration {
Token::Identifier(ident) if matches!(ident.as_str(), "let" | "const") => {
LexicalDeclaration::parse::<true, YIELD, AWAIT>(tokenizer)?.into()
},
_ => return Err(tokenizer.syntax_error()),
_ => return Err(tokenizer.syntax_error("failed to parse declaration")),
};

Ok(declaration)
Expand All @@ -61,7 +61,7 @@ impl LetOrConst {
let let_or_const = match tokenizer.next(SkipLineTerminators::Yes)? {
Some(Token::Identifier(ident)) if ident == "let" => Self::Let,
Some(Token::Identifier(ident)) if ident == "const" => Self::Const,
_ => return Err(tokenizer.syntax_error()),
_ => return Err(tokenizer.syntax_error("expected \"let\" or \"const\"")),
};

Ok(let_or_const)
Expand Down Expand Up @@ -100,7 +100,7 @@ impl LexicalDeclaration {
.iter()
.any(LexicalBinding::has_no_initializer)
{
return Err(tokenizer.syntax_error());
return Err(tokenizer.syntax_error("const declaration without inititializer"));
}

let lexical_declaration = Self {
Expand All @@ -127,7 +127,7 @@ impl LexicalBinding {
tokenizer: &mut Tokenizer<'_>,
) -> Result<Self, SyntaxError> {
let Some(next_token) = tokenizer.peek(0, SkipLineTerminators::Yes)? else {
return Err(tokenizer.syntax_error());
return Err(tokenizer.syntax_error("expected more tokens"));
};

let lexical_binding = match next_token {
Expand All @@ -151,13 +151,13 @@ impl LexicalBinding {
},
Token::Punctuator(Punctuator::BracketOpen) => {
log::error!("Unimplemented: ArrayBindingPattern in LexicalBinding");
return Err(tokenizer.syntax_error());
return Err(tokenizer.syntax_error("TODO"));
},
Token::Punctuator(Punctuator::CurlyBraceOpen) => {
log::error!("Unimplemented: ObjectBindingPattern in LexicalBinding");
return Err(tokenizer.syntax_error());
return Err(tokenizer.syntax_error("TODO"));
},
_ => return Err(tokenizer.syntax_error()),
_ => return Err(tokenizer.syntax_error("failed to parse lexical binding")),
};

Ok(lexical_binding)
Expand Down
4 changes: 2 additions & 2 deletions crates/js/src/parser/statements_and_declarations/statement.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ impl StatementListItem {
tokenizer: &mut Tokenizer<'_>,
) -> Result<Self, SyntaxError> {
let Some(next_token) = tokenizer.peek(0, SkipLineTerminators::Yes)? else {
return Err(tokenizer.syntax_error());
return Err(tokenizer.syntax_error("expected more tokens"));
};

let statement_list_item = match next_token {
Expand Down Expand Up @@ -62,7 +62,7 @@ impl Statement {
tokenizer: &mut Tokenizer<'_>,
) -> Result<Self, SyntaxError> {
let Some(next_token) = tokenizer.peek(0, SkipLineTerminators::Yes)? else {
return Err(tokenizer.syntax_error());
return Err(tokenizer.syntax_error("expected more tokens"));
};

let statement = match next_token {
Expand Down
Loading

0 comments on commit 5cfad36

Please sign in to comment.