LibWeb: Use GenericLexer in WrapperGenerator

This commit is contained in:
Nico Weber 2020-08-21 09:51:57 -04:00 committed by Andreas Kling
parent 0d52e7f6e3
commit 064159d215
Notes: sideshowbarker 2024-07-19 03:21:35 +09:00
3 changed files with 70 additions and 92 deletions

View file

@ -56,6 +56,15 @@ bool GenericLexer::next_is(char expected) const
return peek() == expected; return peek() == expected;
} }
// Tests if the `expected` string comes next in the input
bool GenericLexer::next_is(StringView expected) const
{
for (size_t i = 0; i < expected.length(); ++i)
if (peek(i) != expected[i])
return false;
return true;
}
// Tests if the `expected` string comes next in the input // Tests if the `expected` string comes next in the input
bool GenericLexer::next_is(const char* expected) const bool GenericLexer::next_is(const char* expected) const
{ {
@ -89,15 +98,21 @@ bool GenericLexer::consume_specific(char specific)
} }
// Consume the given string if it is next in the input // Consume the given string if it is next in the input
bool GenericLexer::consume_specific(const char* str) bool GenericLexer::consume_specific(StringView str)
{ {
if (!next_is(str)) if (!next_is(str))
return false; return false;
ignore(__builtin_strlen(str)); ignore(str.length());
return true; return true;
} }
// Consume the given string if it is next in the input
bool GenericLexer::consume_specific(const char* str)
{
return consume_specific(StringView(str));
}
// Consume a number of characters // Consume a number of characters
StringView GenericLexer::consume(size_t count) StringView GenericLexer::consume(size_t count)
{ {

View file

@ -45,27 +45,29 @@ public:
char peek(size_t offset = 0) const; char peek(size_t offset = 0) const;
bool next_is(char expected) const; bool next_is(char) const;
bool next_is(const char* expected) const; bool next_is(StringView) const;
bool next_is(const char*) const;
bool next_is(Condition) const; bool next_is(Condition) const;
char consume(); char consume();
bool consume_specific(char specific); bool consume_specific(char);
bool consume_specific(const char* str); bool consume_specific(StringView);
bool consume_specific(const char*);
StringView consume(size_t count); StringView consume(size_t count);
StringView consume_all(); StringView consume_all();
StringView consume_line(); StringView consume_line();
StringView consume_while(Condition); StringView consume_while(Condition);
StringView consume_until(char stop); StringView consume_until(char);
StringView consume_until(const char* stop); StringView consume_until(const char*);
StringView consume_until(Condition); StringView consume_until(Condition);
// FIXME: provide an escape character // FIXME: provide an escape character
StringView consume_quoted_string(); StringView consume_quoted_string();
void ignore(size_t count = 1); void ignore(size_t count = 1);
void ignore_while(Condition); void ignore_while(Condition);
void ignore_until(char stop); void ignore_until(char);
void ignore_until(const char* stop); void ignore_until(const char*);
void ignore_until(Condition); void ignore_until(Condition);
protected: protected:

View file

@ -25,6 +25,7 @@
*/ */
#include <AK/ByteBuffer.h> #include <AK/ByteBuffer.h>
#include <AK/GenericLexer.h>
#include <AK/HashMap.h> #include <AK/HashMap.h>
#include <AK/LexicalPath.h> #include <AK/LexicalPath.h>
#include <AK/StringBuilder.h> #include <AK/StringBuilder.h>
@ -120,101 +121,59 @@ static OwnPtr<Interface> parse_interface(const StringView& input)
{ {
auto interface = make<Interface>(); auto interface = make<Interface>();
size_t index = 0; GenericLexer lexer(input);
auto peek = [&](size_t offset = 0) -> char { auto assert_specific = [&](char ch) {
if (index + offset > input.length()) auto consumed = lexer.consume();
return 0;
return input[index + offset];
};
auto consume = [&] {
return input[index++];
};
auto consume_if = [&](auto ch) {
if (peek() == ch) {
consume();
return true;
}
return false;
};
auto consume_specific = [&](char ch) {
auto consumed = consume();
if (consumed != ch) { if (consumed != ch) {
dbg() << "Expected '" << ch << "' at offset " << index << " but got '" << consumed << "'"; dbg() << "Expected '" << ch << "' at offset " << lexer.tell() << " but got '" << consumed << "'";
ASSERT_NOT_REACHED(); ASSERT_NOT_REACHED();
} }
}; };
auto consume_whitespace = [&] { auto consume_whitespace = [&] {
while (isspace(peek())) lexer.consume_while([](char ch) { return isspace(ch); });
consume();
}; };
auto consume_string = [&](const StringView& string) { auto assert_string = [&](const StringView& expected) {
for (size_t i = 0; i < string.length(); ++i) { bool saw_expected = lexer.consume_specific(expected);
ASSERT(consume() == string[i]); ASSERT(saw_expected);
}
}; };
auto next_is = [&](const StringView& string) { assert_string("interface");
for (size_t i = 0; i < string.length(); ++i) {
if (peek(i) != string[i])
return false;
}
return true;
};
auto consume_while = [&](auto condition) {
StringBuilder builder;
while (index < input.length() && condition(peek())) {
builder.append(consume());
}
return builder.to_string();
};
consume_string("interface");
consume_whitespace(); consume_whitespace();
interface->name = consume_while([](auto ch) { return !isspace(ch); }); interface->name = lexer.consume_until([](auto ch) { return isspace(ch); });
consume_whitespace(); consume_whitespace();
if (consume_if(':')) { if (lexer.consume_specific(':')) {
consume_whitespace(); consume_whitespace();
interface->parent_name = consume_while([](auto ch) { return !isspace(ch); }); interface->parent_name = lexer.consume_until([](auto ch) { return isspace(ch); });
consume_whitespace(); consume_whitespace();
} }
consume_specific('{'); assert_specific('{');
auto parse_type = [&] { auto parse_type = [&] {
auto name = consume_while([](auto ch) { return !isspace(ch) && ch != '?'; }); auto name = lexer.consume_until([](auto ch) { return isspace(ch) || ch == '?'; });
auto nullable = peek() == '?'; auto nullable = lexer.consume_specific('?');
if (nullable)
consume_specific('?');
return Type { name, nullable }; return Type { name, nullable };
}; };
auto parse_attribute = [&](HashMap<String, String>& extended_attributes) { auto parse_attribute = [&](HashMap<String, String>& extended_attributes) {
bool readonly = false; bool readonly = lexer.consume_specific("readonly");
bool unsigned_ = false; if (readonly)
if (next_is("readonly")) {
consume_string("readonly");
readonly = true;
consume_whitespace(); consume_whitespace();
}
if (next_is("attribute")) { if (lexer.consume_specific("attribute"))
consume_string("attribute");
consume_whitespace(); consume_whitespace();
}
if (next_is("unsigned")) { bool unsigned_ = lexer.consume_specific("unsigned");
consume_string("unsigned"); if (unsigned_)
unsigned_ = true;
consume_whitespace(); consume_whitespace();
}
auto type = parse_type(); auto type = parse_type();
consume_whitespace(); consume_whitespace();
auto name = consume_while([](auto ch) { return !isspace(ch) && ch != ';'; }); auto name = lexer.consume_until([](auto ch) { return isspace(ch) || ch == ';'; });
consume_specific(';'); consume_whitespace();
assert_specific(';');
Attribute attribute; Attribute attribute;
attribute.readonly = readonly; attribute.readonly = readonly;
attribute.unsigned_ = unsigned_; attribute.unsigned_ = unsigned_;
@ -229,25 +188,27 @@ static OwnPtr<Interface> parse_interface(const StringView& input)
auto parse_function = [&](HashMap<String, String>& extended_attributes) { auto parse_function = [&](HashMap<String, String>& extended_attributes) {
auto return_type = parse_type(); auto return_type = parse_type();
consume_whitespace(); consume_whitespace();
auto name = consume_while([](auto ch) { return !isspace(ch) && ch != '('; }); auto name = lexer.consume_until([](auto ch) { return isspace(ch) || ch == '('; });
consume_specific('('); consume_whitespace();
assert_specific('(');
Vector<Parameter> parameters; Vector<Parameter> parameters;
for (;;) { for (;;) {
if (consume_if(')')) if (lexer.consume_specific(')'))
break; break;
auto type = parse_type(); auto type = parse_type();
consume_whitespace(); consume_whitespace();
auto name = consume_while([](auto ch) { return !isspace(ch) && ch != ',' && ch != ')'; }); auto name = lexer.consume_until([](auto ch) { return isspace(ch) || ch == ',' || ch == ')'; });
parameters.append({ move(type), move(name) }); parameters.append({ move(type), move(name) });
if (consume_if(')')) if (lexer.consume_specific(')'))
break; break;
consume_specific(','); assert_specific(',');
consume_whitespace(); consume_whitespace();
} }
consume_specific(';'); consume_whitespace();
assert_specific(';');
interface->functions.append(Function { return_type, name, move(parameters), move(extended_attributes) }); interface->functions.append(Function { return_type, name, move(parameters), move(extended_attributes) });
}; };
@ -256,11 +217,11 @@ static OwnPtr<Interface> parse_interface(const StringView& input)
HashMap<String, String> extended_attributes; HashMap<String, String> extended_attributes;
for (;;) { for (;;) {
consume_whitespace(); consume_whitespace();
if (consume_if(']')) if (lexer.consume_specific(']'))
break; break;
auto name = consume_while([](auto ch) { return ch != ']' && ch != '=' && ch != ','; }); auto name = lexer.consume_until([](auto ch) { return ch == ']' || ch == '=' || ch == ','; });
if (consume_if('=')) { if (lexer.consume_specific('=')) {
auto value = consume_while([](auto ch) { return ch != ']' && ch != ','; }); auto value = lexer.consume_until([](auto ch) { return ch == ']' || ch == ','; });
extended_attributes.set(name, value); extended_attributes.set(name, value);
} else { } else {
extended_attributes.set(name, {}); extended_attributes.set(name, {});
@ -275,14 +236,14 @@ static OwnPtr<Interface> parse_interface(const StringView& input)
consume_whitespace(); consume_whitespace();
if (consume_if('}')) if (lexer.consume_specific('}'))
break; break;
if (consume_if('[')) { if (lexer.consume_specific('[')) {
extended_attributes = parse_extended_attributes(); extended_attributes = parse_extended_attributes();
} }
if (next_is("readonly") || next_is("attribute")) { if (lexer.next_is("readonly") || lexer.next_is("attribute")) {
parse_attribute(extended_attributes); parse_attribute(extended_attributes);
continue; continue;
} }