Fix ordering on token handling

This commit is contained in:
Hackerpilot 2014-01-23 02:29:27 -08:00
parent 62df18f489
commit 3eaf095625
2 changed files with 22 additions and 16 deletions

View File

@ -1424,7 +1424,7 @@ public struct DLexer
bool isSeparating(size_t offset) pure nothrow @safe
{
if (!range.canPeek(offset)) return false;
if (!range.canPeek(offset)) return true;
auto c = range.peekAt(offset);
if (c >= 'A' && c <= 'Z') return false;
if (c >= 'a' && c <= 'z') return false;

View File

@ -238,7 +238,13 @@ mixin template Lexer(IDType, Token, alias defaultTokenFunction,
if (tokens.length == 1 && tokens[0].length == 1)
{
if (staticTokens.countUntil(tokens[0]) >= 0)
if (pseudoTokens.countUntil(tokens[0]) >= 0)
{
return " return "
~ pseudoTokenHandlers[pseudoTokenHandlers.countUntil(tokens[0]) + 1]
~ "();\n";
}
else if (staticTokens.countUntil(tokens[0]) >= 0)
{
return " range.popFront();\n"
~ " return Token(tok!\"" ~ escape(tokens[0]) ~ "\", null, line, column, index);\n";
@ -261,19 +267,7 @@ mixin template Lexer(IDType, Token, alias defaultTokenFunction,
else
code ~= " if ((frontBytes & " ~ generateByteMask(token.length) ~ ") == " ~ mask ~ ")\n";
code ~= " {\n";
if (staticTokens.countUntil(token) >= 0)
{
if (token.length <= 8)
{
code ~= " range.popFrontN(" ~ text(token.length) ~ ");\n";
code ~= " return Token(tok!\"" ~ escape(token) ~ "\", null, line, column, index);\n";
}
else
{
code ~= " pragma(msg, \"long static tokens not supported\"); // " ~ escape(token) ~ "\n";
}
}
else if (pseudoTokens.countUntil(token) >= 0)
if (pseudoTokens.countUntil(token) >= 0)
{
if (token.length <= 8)
{
@ -289,6 +283,18 @@ mixin template Lexer(IDType, Token, alias defaultTokenFunction,
~ "();\n";
}
}
else if (staticTokens.countUntil(token) >= 0)
{
if (token.length <= 8)
{
code ~= " range.popFrontN(" ~ text(token.length) ~ ");\n";
code ~= " return Token(tok!\"" ~ escape(token) ~ "\", null, line, column, index);\n";
}
else
{
code ~= " pragma(msg, \"long static tokens not supported\"); // " ~ escape(token) ~ "\n";
}
}
else
{
// possible default
@ -365,7 +371,7 @@ mixin template Lexer(IDType, Token, alias defaultTokenFunction,
return retVal;
}
enum tokenSearch = generateCaseStatements(stupidToArray(sort(staticTokens ~ pseudoTokens ~ possibleDefaultTokens)));
enum tokenSearch = generateCaseStatements(stupidToArray(uniq(sort(staticTokens ~ pseudoTokens ~ possibleDefaultTokens))));
static ulong getFront(const ubyte[] arr) pure nothrow @trusted
{