diff --git a/stdx/d/lexer.d b/stdx/d/lexer.d index 09d0ee0..ff4c01c 100644 --- a/stdx/d/lexer.d +++ b/stdx/d/lexer.d @@ -1424,7 +1424,7 @@ public struct DLexer bool isSeparating(size_t offset) pure nothrow @safe { - if (!range.canPeek(offset)) return false; + if (!range.canPeek(offset)) return true; auto c = range.peekAt(offset); if (c >= 'A' && c <= 'Z') return false; if (c >= 'a' && c <= 'z') return false; diff --git a/stdx/lexer.d b/stdx/lexer.d index f14e788..c15c8c8 100644 --- a/stdx/lexer.d +++ b/stdx/lexer.d @@ -238,7 +238,13 @@ mixin template Lexer(IDType, Token, alias defaultTokenFunction, if (tokens.length == 1 && tokens[0].length == 1) { - if (staticTokens.countUntil(tokens[0]) >= 0) + if (pseudoTokens.countUntil(tokens[0]) >= 0) + { + return " return " + ~ pseudoTokenHandlers[pseudoTokenHandlers.countUntil(tokens[0]) + 1] + ~ "();\n"; + } + else if (staticTokens.countUntil(tokens[0]) >= 0) { return " range.popFront();\n" ~ " return Token(tok!\"" ~ escape(tokens[0]) ~ "\", null, line, column, index);\n"; @@ -261,19 +267,7 @@ mixin template Lexer(IDType, Token, alias defaultTokenFunction, else code ~= " if ((frontBytes & " ~ generateByteMask(token.length) ~ ") == " ~ mask ~ ")\n"; code ~= " {\n"; - if (staticTokens.countUntil(token) >= 0) - { - if (token.length <= 8) - { - code ~= " range.popFrontN(" ~ text(token.length) ~ ");\n"; - code ~= " return Token(tok!\"" ~ escape(token) ~ "\", null, line, column, index);\n"; - } - else - { - code ~= " pragma(msg, \"long static tokens not supported\"); // " ~ escape(token) ~ "\n"; - } - } - else if (pseudoTokens.countUntil(token) >= 0) + if (pseudoTokens.countUntil(token) >= 0) { if (token.length <= 8) { @@ -289,6 +283,18 @@ mixin template Lexer(IDType, Token, alias defaultTokenFunction, ~ "();\n"; } } + else if (staticTokens.countUntil(token) >= 0) + { + if (token.length <= 8) + { + code ~= " range.popFrontN(" ~ text(token.length) ~ ");\n"; + code ~= " return Token(tok!\"" ~ escape(token) ~ "\", null, line, column, index);\n"; + } + else + { + code ~= " pragma(msg, \"long static tokens not supported\"); // " ~ escape(token) ~ "\n"; + } + } else { // possible default @@ -365,7 +371,7 @@ mixin template Lexer(IDType, Token, alias defaultTokenFunction, return retVal; } - enum tokenSearch = generateCaseStatements(stupidToArray(sort(staticTokens ~ pseudoTokens ~ possibleDefaultTokens))); + enum tokenSearch = generateCaseStatements(stupidToArray(uniq(sort(staticTokens ~ pseudoTokens ~ possibleDefaultTokens)))); static ulong getFront(const ubyte[] arr) pure nothrow @trusted {