TokenStream.h 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147
  1. /*
  2. * Copyright (c) 2020-2021, the SerenityOS developers.
  3. * Copyright (c) 2021-2023, Sam Atkins <atkinssj@serenityos.org>
  4. *
  5. * SPDX-License-Identifier: BSD-2-Clause
  6. */
  7. #pragma once
  8. #include <AK/Format.h>
  9. #include <AK/Vector.h>
  10. #include <LibWeb/CSS/Parser/ComponentValue.h>
  11. #include <LibWeb/CSS/Parser/Tokenizer.h>
  12. namespace Web::CSS::Parser {
  13. template<typename T>
  14. class TokenStream {
  15. public:
  16. class StateTransaction {
  17. public:
  18. explicit StateTransaction(TokenStream<T>& token_stream)
  19. : m_token_stream(token_stream)
  20. , m_saved_iterator_offset(token_stream.m_iterator_offset)
  21. {
  22. }
  23. ~StateTransaction()
  24. {
  25. if (!m_commit)
  26. m_token_stream.m_iterator_offset = m_saved_iterator_offset;
  27. }
  28. StateTransaction create_child() { return StateTransaction(*this); }
  29. void commit()
  30. {
  31. m_commit = true;
  32. if (m_parent)
  33. m_parent->commit();
  34. }
  35. private:
  36. explicit StateTransaction(StateTransaction& parent)
  37. : m_parent(&parent)
  38. , m_token_stream(parent.m_token_stream)
  39. , m_saved_iterator_offset(parent.m_token_stream.m_iterator_offset)
  40. {
  41. }
  42. StateTransaction* m_parent { nullptr };
  43. TokenStream<T>& m_token_stream;
  44. int m_saved_iterator_offset { 0 };
  45. bool m_commit { false };
  46. };
  47. explicit TokenStream(Vector<T> const& tokens)
  48. : m_tokens(tokens)
  49. , m_eof(make_eof())
  50. {
  51. }
  52. TokenStream(TokenStream<T> const&) = delete;
  53. TokenStream(TokenStream<T>&&) = default;
  54. bool has_next_token()
  55. {
  56. return (size_t)(m_iterator_offset + 1) < m_tokens.size();
  57. }
  58. T const& next_token()
  59. {
  60. if (!has_next_token())
  61. return m_eof;
  62. ++m_iterator_offset;
  63. return m_tokens.at(m_iterator_offset);
  64. }
  65. T const& peek_token(int offset = 0)
  66. {
  67. if (!has_next_token())
  68. return m_eof;
  69. return m_tokens.at(m_iterator_offset + offset + 1);
  70. }
  71. T const& current_token()
  72. {
  73. if ((size_t)m_iterator_offset >= m_tokens.size())
  74. return m_eof;
  75. return m_tokens.at(m_iterator_offset);
  76. }
  77. void reconsume_current_input_token()
  78. {
  79. if (m_iterator_offset >= 0)
  80. --m_iterator_offset;
  81. }
  82. StateTransaction begin_transaction() { return StateTransaction(*this); }
  83. void skip_whitespace()
  84. {
  85. while (peek_token().is(Token::Type::Whitespace))
  86. next_token();
  87. }
  88. size_t token_count() const { return m_tokens.size(); }
  89. size_t remaining_token_count() const { return token_count() - m_iterator_offset - 1; }
  90. void dump_all_tokens()
  91. {
  92. dbgln("Dumping all tokens:");
  93. for (size_t i = 0; i < m_tokens.size(); ++i) {
  94. auto& token = m_tokens[i];
  95. if ((i - 1) == (size_t)m_iterator_offset)
  96. dbgln("-> {}", token.to_debug_string());
  97. else
  98. dbgln(" {}", token.to_debug_string());
  99. }
  100. }
  101. void copy_state(Badge<Parser>, TokenStream<T> const& other)
  102. {
  103. m_iterator_offset = other.m_iterator_offset;
  104. }
  105. private:
  106. Vector<T> const& m_tokens;
  107. int m_iterator_offset { -1 };
  108. T make_eof()
  109. {
  110. if constexpr (IsSame<T, Token>) {
  111. return Tokenizer::create_eof_token();
  112. }
  113. if constexpr (IsSame<T, ComponentValue>) {
  114. return ComponentValue(Tokenizer::create_eof_token());
  115. }
  116. }
  117. T m_eof;
  118. };
  119. }