mirror of
https://github.com/LadybirdBrowser/ladybird.git
synced 2024-11-22 15:40:19 +00:00
Shell: Correctly track nested expansions in POSIX mode
Previously any expansion closing sequence would've caused the entire expansion chain to be terminated, fix this by keeping track of active expansions and running the parser in 'skip' mode. Fixes #19110.
This commit is contained in:
parent
d1f98108a9
commit
b6d7c5fb0e
Notes:
sideshowbarker
2024-07-17 01:11:48 +09:00
Author: https://github.com/alimpfard Commit: https://github.com/SerenityOS/serenity/commit/b6d7c5fb0e Pull-request: https://github.com/SerenityOS/serenity/pull/20279 Issue: https://github.com/SerenityOS/serenity/issues/19110
2 changed files with 62 additions and 10 deletions
|
@ -256,7 +256,7 @@ ErrorOr<Lexer::ReductionResult> Lexer::reduce_operator()
|
|||
|
||||
if (expect_heredoc_entry && tokens.size() > 1) {
|
||||
auto [key, interpolation] = process_heredoc_key(tokens[1]);
|
||||
m_state.heredoc_entries.enqueue(HeredocEntry {
|
||||
m_state.heredoc_entries.append(HeredocEntry {
|
||||
.key = key,
|
||||
.allow_interpolation = interpolation,
|
||||
.dedent = tokens[0].type == Token::Type::DoubleLessDash,
|
||||
|
@ -544,7 +544,7 @@ ErrorOr<Lexer::ReductionResult> Lexer::reduce_start()
|
|||
}
|
||||
|
||||
if (was_on_new_line && !m_state.heredoc_entries.is_empty()) {
|
||||
auto const& entry = m_state.heredoc_entries.head();
|
||||
auto const& entry = m_state.heredoc_entries.first();
|
||||
|
||||
auto start_index = m_lexer.tell();
|
||||
Optional<size_t> end_index;
|
||||
|
@ -576,7 +576,7 @@ ErrorOr<Lexer::ReductionResult> Lexer::reduce_start()
|
|||
token.relevant_heredoc_key = entry.key;
|
||||
token.type = Token::Type::HeredocContents;
|
||||
|
||||
m_state.heredoc_entries.dequeue();
|
||||
m_state.heredoc_entries.take_first();
|
||||
|
||||
m_state.on_new_line = true;
|
||||
|
||||
|
@ -705,6 +705,14 @@ ErrorOr<Lexer::ReductionResult> Lexer::reduce_start()
|
|||
};
|
||||
}
|
||||
|
||||
if (!m_state.escaping && is_any_of("})"sv)(m_lexer.peek())) {
|
||||
// That's an eof for us.
|
||||
return ReductionResult {
|
||||
.tokens = {},
|
||||
.next_reduction = Reduction::None,
|
||||
};
|
||||
}
|
||||
|
||||
m_state.escaping = false;
|
||||
m_state.buffer.append(consume());
|
||||
return ReductionResult {
|
||||
|
@ -793,6 +801,42 @@ ErrorOr<Lexer::ReductionResult> Lexer::reduce_parameter_expansion()
|
|||
|
||||
ErrorOr<Lexer::ReductionResult> Lexer::reduce_command_or_arithmetic_substitution_expansion()
|
||||
{
|
||||
auto ch = m_lexer.peek();
|
||||
if (ch == '(' && m_state.buffer.string_view().ends_with("$("sv)) {
|
||||
m_state.buffer.append(consume());
|
||||
m_state.expansions.last() = ArithmeticExpansion {
|
||||
.expression = {},
|
||||
.value = StringBuilder {},
|
||||
.range = range(-2)
|
||||
};
|
||||
return ReductionResult {
|
||||
.tokens = {},
|
||||
.next_reduction = Reduction::ArithmeticExpansion,
|
||||
};
|
||||
}
|
||||
|
||||
auto saved_position = m_state.position;
|
||||
{
|
||||
auto skip_mode = switch_to_skip_mode();
|
||||
|
||||
auto next_reduction = Reduction::Start;
|
||||
do {
|
||||
auto result = TRY(reduce(next_reduction));
|
||||
next_reduction = result.next_reduction;
|
||||
} while (next_reduction != Reduction::None);
|
||||
saved_position = m_state.position;
|
||||
}
|
||||
|
||||
auto const skipped_text = m_lexer.input().substring_view(m_state.position.end_offset, saved_position.end_offset - m_state.position.end_offset);
|
||||
m_state.position.end_offset = saved_position.end_offset;
|
||||
m_state.position.end_line = saved_position.end_line;
|
||||
|
||||
m_state.buffer.append(skipped_text);
|
||||
m_state.expansions.last().get<CommandExpansion>().command.append(skipped_text);
|
||||
m_state.expansions.last().visit([&](auto& expansion) {
|
||||
expansion.range.length = m_state.position.end_offset - expansion.range.start - m_state.position.start_offset;
|
||||
});
|
||||
|
||||
if (m_lexer.is_eof()) {
|
||||
return ReductionResult {
|
||||
.tokens = { Token::continuation("$("_short_string) },
|
||||
|
@ -800,12 +844,12 @@ ErrorOr<Lexer::ReductionResult> Lexer::reduce_command_or_arithmetic_substitution
|
|||
};
|
||||
}
|
||||
|
||||
auto ch = m_lexer.peek();
|
||||
ch = m_lexer.peek();
|
||||
if (ch == '(' && m_state.buffer.string_view().ends_with("$("sv)) {
|
||||
m_state.buffer.append(consume());
|
||||
m_state.expansions.last() = ArithmeticExpansion {
|
||||
.expression = {},
|
||||
.value = StringBuilder {},
|
||||
.value = m_state.expansions.last().get<CommandExpansion>().command,
|
||||
.range = range(-2)
|
||||
};
|
||||
return ReductionResult {
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
#include <AK/GenericLexer.h>
|
||||
#include <AK/Queue.h>
|
||||
#include <AK/String.h>
|
||||
#include <AK/TemporaryChange.h>
|
||||
#include <AK/Variant.h>
|
||||
#include <AK/Vector.h>
|
||||
#include <Shell/AST.h>
|
||||
|
@ -209,7 +210,7 @@ struct State {
|
|||
},
|
||||
};
|
||||
Vector<Expansion> expansions {};
|
||||
Queue<HeredocEntry> heredoc_entries {};
|
||||
Vector<HeredocEntry> heredoc_entries {};
|
||||
bool on_new_line { true };
|
||||
};
|
||||
|
||||
|
@ -313,10 +314,6 @@ struct Token {
|
|||
return Token::Type::And;
|
||||
if (name == "|"sv)
|
||||
return Token::Type::Pipe;
|
||||
if (name == "("sv)
|
||||
return Token::Type::OpenParen;
|
||||
if (name == ")"sv)
|
||||
return Token::Type::CloseParen;
|
||||
if (name == ">"sv)
|
||||
return Token::Type::Great;
|
||||
if (name == "<"sv)
|
||||
|
@ -430,6 +427,17 @@ private:
|
|||
ErrorOr<ReductionResult> reduce_extended_parameter_expansion();
|
||||
ErrorOr<ReductionResult> reduce_heredoc_contents();
|
||||
|
||||
struct SkipTokens {
|
||||
explicit SkipTokens(Lexer& lexer)
|
||||
: m_state_change(lexer.m_state, lexer.m_state)
|
||||
{
|
||||
}
|
||||
|
||||
TemporaryChange<State> m_state_change;
|
||||
};
|
||||
|
||||
SkipTokens switch_to_skip_mode() { return SkipTokens { *this }; }
|
||||
|
||||
char consume();
|
||||
bool consume_specific(char);
|
||||
void reconsume(StringView);
|
||||
|
|
Loading…
Reference in a new issue