Fixed the logic error in the tokenizer,
...which skipped the first character in the buffer. Removed the workaround hack in the map, but left the debug code for now.
This commit is contained in:
parent
176b293197
commit
291bf90b24
2 changed files with 5 additions and 13 deletions
12
src/map.cpp
12
src/map.cpp
|
@ -350,16 +350,8 @@ void gamemap::read(const std::string& data, const tborder border_tiles, const tu
|
|||
config header;
|
||||
::read(header, header_str);
|
||||
|
||||
// FIXME ugly work-around hack for a bug in the parser, just to get trunk up and running again
|
||||
border_size_ = lexical_cast_default<int>(header["order_size"], -1);
|
||||
if(border_size_ == -1) {
|
||||
border_size_ = lexical_cast_default<int>(header["border_size"], 0);
|
||||
}
|
||||
|
||||
/*const*/ std::string usage = header["usage"];
|
||||
if(usage == "") {
|
||||
usage = header["sage"];
|
||||
}
|
||||
border_size_ = lexical_cast_default<int>(header["border_size"], 0);
|
||||
const std::string usage = header["usage"];
|
||||
|
||||
std::cerr << "header: " << header_str << "\n\n\n";
|
||||
std::cerr << "usage: " << usage << "\tborder: " << border_size_ << "\n";
|
||||
|
|
|
@ -208,8 +208,8 @@ void tokenizer_string::next_char()
|
|||
lineno_++;
|
||||
|
||||
do {
|
||||
if(offset_ + 1< in_.size()) {
|
||||
current_ = in_[++offset_];
|
||||
if(offset_ < in_.size()) {
|
||||
current_ = in_[offset_++];
|
||||
} else {
|
||||
current_ = EOF;
|
||||
}
|
||||
|
@ -219,6 +219,6 @@ void tokenizer_string::next_char()
|
|||
|
||||
int tokenizer_string::peek_char()
|
||||
{
|
||||
return in_[offset_ + 1];
|
||||
return in_[offset_];
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue