mirror of
https://github.com/LadybirdBrowser/ladybird.git
synced 2024-11-22 07:30:19 +00:00
LibJS: Fix lexing of the last character in a file
Before this commit the last character in a file would be swallowed. This also fixes parsing of empty files which would previously ASSERT.
This commit is contained in:
parent
8285102b6d
commit
ac524b632f
Notes:
sideshowbarker
2024-07-19 08:19:40 +09:00
Author: https://github.com/sunverwerth Commit: https://github.com/SerenityOS/serenity/commit/ac524b632f6 Pull-request: https://github.com/SerenityOS/serenity/pull/1443 Issue: https://github.com/SerenityOS/serenity/issues/1431
1 changed files with 6 additions and 5 deletions
|
@ -115,7 +115,8 @@ Lexer::Lexer(StringView source)
|
|||
|
||||
void Lexer::consume()
|
||||
{
|
||||
if (is_eof()) {
|
||||
if (m_position >= m_source.length()) {
|
||||
m_position = m_source.length() + 1;
|
||||
m_current_char = EOF;
|
||||
return;
|
||||
}
|
||||
|
@ -125,7 +126,7 @@ void Lexer::consume()
|
|||
|
||||
bool Lexer::is_eof() const
|
||||
{
|
||||
return m_position >= m_source.length();
|
||||
return m_current_char == EOF;
|
||||
}
|
||||
|
||||
bool Lexer::is_identifier_start() const
|
||||
|
@ -157,12 +158,12 @@ Token Lexer::next()
|
|||
{
|
||||
size_t trivia_start = m_position;
|
||||
|
||||
// consume up whitespace and comments
|
||||
// consume whitespace and comments
|
||||
while (true) {
|
||||
if (isspace(m_current_char)) {
|
||||
do {
|
||||
consume();
|
||||
} while (!is_eof() && isspace(m_current_char));
|
||||
} while (isspace(m_current_char));
|
||||
} else if (is_line_comment_start()) {
|
||||
consume();
|
||||
do {
|
||||
|
@ -228,7 +229,7 @@ Token Lexer::next()
|
|||
}
|
||||
|
||||
bool found_two_char_token = false;
|
||||
if (!found_three_char_token && !is_eof()) {
|
||||
if (!found_three_char_token && m_position < m_source.length()) {
|
||||
char second_char = m_source[m_position];
|
||||
char two_chars[] { (char)m_current_char, second_char, 0 };
|
||||
auto it = s_two_char_tokens.find(two_chars);
|
||||
|
|
Loading…
Reference in a new issue