4 times as fast
This commit is contained in:
parent
1055a47087
commit
a5ed81b711
4
build.sh
4
build.sh
|
@ -1,2 +1,2 @@
|
|||
#dmd *.d std/d/*.d -release -noboundscheck -O -w -wi -m64 -property -ofdscanner -L-lsqlite3 #-inline
|
||||
dmd *.d std/d/*.d -g -m64 -w -wi -property -ofdscanner -L-lsqlite3 #-unittest
|
||||
dmd *.d std/d/*.d -release -inline -noboundscheck -O -w -wi -m64 -property -ofdscanner -L-lsqlite3 #-inline
|
||||
#dmd *.d std/d/*.d -g -m64 -w -wi -property -ofdscanner -L-lsqlite3 #-unittest
|
||||
|
|
|
@ -38,20 +38,18 @@ html { background-color: #fdf6e3; color: #002b36; }
|
|||
|
||||
foreach (Token t; tokens)
|
||||
{
|
||||
if (t.type > TokenType.TYPES_BEGIN && t.type < TokenType.TYPES_END)
|
||||
if (isType(t.type))
|
||||
writeSpan("type", t.value);
|
||||
else if (t.type > TokenType.KEYWORDS_BEGIN && t.type < TokenType.KEYWORDS_END)
|
||||
else if (isKeyword(t.type))
|
||||
writeSpan("kwrd", t.value);
|
||||
else if (t.type == TokenType.Comment)
|
||||
writeSpan("com", t.value);
|
||||
else if (t.type > TokenType.STRINGS_BEGIN && t.type < TokenType.STRINGS_END)
|
||||
else if (isStringLiteral(t.type))
|
||||
writeSpan("str", t.value);
|
||||
else if (t.type > TokenType.NUMBERS_BEGIN && t.type < TokenType.NUMBERS_END)
|
||||
else if (isNumberLiteral(t.type))
|
||||
writeSpan("num", t.value);
|
||||
else if (t.type > TokenType.OPERATORS_BEGIN && t.type < TokenType.OPERATORS_END)
|
||||
else if (isOperator(t.type))
|
||||
writeSpan("op", t.value);
|
||||
else if (t.type > TokenType.CONSTANTS_BEGIN && t.type < TokenType.CONSTANTS_END)
|
||||
writeSpan("cons", t.value);
|
||||
else
|
||||
stdout.write(t.value.replace("<", "<"));
|
||||
}
|
||||
|
|
38
langutils.d
38
langutils.d
|
@ -9,33 +9,6 @@ import std.array;
|
|||
import std.algorithm;
|
||||
import std.d.lexer;
|
||||
|
||||
|
||||
/**
|
||||
* Returns: true if input is a access attribute
|
||||
*/
|
||||
pure nothrow bool isAccessAttribute(TokenType input)
|
||||
{
|
||||
return input > TokenType.PROTECTION_BEGIN && input < TokenType.PROTECTION_END;
|
||||
}
|
||||
|
||||
/**
|
||||
* See_also: isAttribute(TokenType)
|
||||
*/
|
||||
pure nothrow bool isAttribute(ref const Token token)
|
||||
{
|
||||
return isAttribute(token.type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns: true if the given token type is an attribute, false otherwise
|
||||
*/
|
||||
pure nothrow bool isAttribute(TokenType input)
|
||||
{
|
||||
if (isAccessAttribute(input))
|
||||
return true;
|
||||
return input > TokenType.ATTRIBUTES_BEGIN && input < TokenType.ATTRIBUTES_END;
|
||||
}
|
||||
|
||||
string combineTokens(ref const Token[] tokens)
|
||||
{
|
||||
auto app = appender!string();
|
||||
|
@ -74,14 +47,13 @@ pure nothrow string getTypeFromToken(const Token t)
|
|||
}
|
||||
}
|
||||
|
||||
pure bool isIdentifierOrType(const Token t)
|
||||
{
|
||||
return t.type == TokenType.Identifier || (t.type > TokenType.TYPES_BEGIN
|
||||
&& TokenType.TYPES_END);
|
||||
}
|
||||
|
||||
pure bool isDocComment(ref const Token t)
|
||||
{
|
||||
return t.value.startsWith("///") || t.value.startsWith("/**")
|
||||
|| t.value.startsWith("/++");
|
||||
}
|
||||
|
||||
pure nothrow bool isIdentifierOrType(const TokenType t)
|
||||
{
|
||||
return isType(t) || t == TokenType.Identifier;
|
||||
}
|
||||
|
|
34
main.d
34
main.d
|
@ -107,27 +107,29 @@ int main(string[] args)
|
|||
string[] importDirs;
|
||||
bool sloc;
|
||||
/+bool dotComplete;+/
|
||||
bool json;
|
||||
/+bool json;+/
|
||||
/+bool parenComplete;+/
|
||||
bool highlight;
|
||||
bool ctags;
|
||||
bool recursiveCtags;
|
||||
bool format;
|
||||
bool help;
|
||||
bool tokenCount;
|
||||
|
||||
try
|
||||
{
|
||||
getopt(args, "I", &importDirs,/+ "dotComplete", &dotComplete,+/ "sloc", &sloc,
|
||||
"json", &json, /+"parenComplete", &parenComplete,+/ "highlight", &highlight,
|
||||
"ctags", &ctags, "recursive|r|R", &recursiveCtags, "help|h", &help);
|
||||
/+"json", &json,+/ /+"parenComplete", &parenComplete,+/ "highlight", &highlight,
|
||||
"ctags", &ctags, "recursive|r|R", &recursiveCtags, "help|h", &help,
|
||||
"tokenCount", &tokenCount);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
stderr.writeln(e.msg);
|
||||
}
|
||||
|
||||
if (help || (!sloc && /+!dotComplete &&+/ !json /+&& !parenComplete+/ && !highlight
|
||||
&& !ctags && !format))
|
||||
if (help || (!sloc && /+!dotComplete &&+/ /+!json &&+/ /+!parenComplete &&+/ !highlight
|
||||
&& !ctags && !format && !tokenCount))
|
||||
{
|
||||
printHelp();
|
||||
return 0;
|
||||
|
@ -135,7 +137,19 @@ int main(string[] args)
|
|||
|
||||
importDirs ~= loadConfig();
|
||||
|
||||
if (sloc)
|
||||
if (tokenCount)
|
||||
{
|
||||
/+if (args.length == 1)
|
||||
{
|
||||
writeln((cast (ubyte[]) stdin.byLine(KeepTerminator.yes).join()).byToken().walkLength());
|
||||
}
|
||||
else
|
||||
{+/
|
||||
writeln(args[1..$].map!(a => byToken(cast(ubyte[]) File(a).byLine(KeepTerminator.yes).join(), a).walkLength())());
|
||||
/+}+/
|
||||
}
|
||||
|
||||
/+if (sloc)
|
||||
{
|
||||
if (args.length == 1)
|
||||
{
|
||||
|
@ -147,12 +161,12 @@ int main(string[] args)
|
|||
.joiner().count!(a => isLineOfCode(a.type))());
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
}+/
|
||||
|
||||
if (highlight)
|
||||
{
|
||||
File f = args.length == 1 ? stdin : File(args[1]);
|
||||
highlighter.highlight(f.byLine(KeepTerminator.yes).join().byToken(
|
||||
highlighter.highlight((cast(ubyte[]) f.byLine(KeepTerminator.yes).join()).byToken(
|
||||
"", IterationStyle.Everything, TokenStyle.Source));
|
||||
return 0;
|
||||
}
|
||||
|
@ -196,7 +210,7 @@ int main(string[] args)
|
|||
return 0;
|
||||
}+/
|
||||
|
||||
if (json)
|
||||
/+if (json)
|
||||
{
|
||||
CircularBuffer!(Token) tokens;
|
||||
File f = args.length == 1 ? stdin : File(args[1]);
|
||||
|
@ -205,7 +219,7 @@ int main(string[] args)
|
|||
auto mod = parseModule(tokens);
|
||||
mod.writeJSONTo(stdout);
|
||||
return 0;
|
||||
}
|
||||
}+/
|
||||
|
||||
// if (ctags)
|
||||
// {
|
||||
|
|
10
parser.d
10
parser.d
|
@ -385,7 +385,7 @@ Module parseModule(TokenBuffer tokens, string protection = "public", string[] at
|
|||
else
|
||||
localAttributes ~= attribute;
|
||||
break;
|
||||
case TokenType.PROTECTION_BEGIN: .. case TokenType.PROTECTION_END:
|
||||
case TokenType.Export: .. case TokenType.Public:
|
||||
string p = tokens.front.value;
|
||||
tokens.popFront();
|
||||
if (tokens.front == TokenType.Colon)
|
||||
|
@ -448,7 +448,7 @@ Module parseModule(TokenBuffer tokens, string protection = "public", string[] at
|
|||
tokens.betweenBalancedBraces(); // body
|
||||
resetLocals();
|
||||
break;
|
||||
case TokenType.TYPES_BEGIN: .. case TokenType.TYPES_END:
|
||||
case TokenType.Bool: .. case TokenType.WString:
|
||||
case TokenType.Auto:
|
||||
case TokenType.Identifier:
|
||||
if (type.empty())
|
||||
|
@ -573,7 +573,7 @@ body
|
|||
if (tokens.front == TokenType.LBrace)
|
||||
goto enumBody;
|
||||
|
||||
if (isIdentifierOrType(tokens.front))
|
||||
if (isIdentifierOrType(tokens.front.type))
|
||||
{
|
||||
if (tokens.canPeek() && tokens.peek() == TokenType.Identifier)
|
||||
{
|
||||
|
@ -601,7 +601,7 @@ body
|
|||
}
|
||||
}
|
||||
|
||||
if (isIdentifierOrType(tokens.front))
|
||||
if (isIdentifierOrType(tokens.front.type))
|
||||
{
|
||||
e.name = tokens.front.value;
|
||||
tokens.popFront();
|
||||
|
@ -610,7 +610,7 @@ body
|
|||
if (tokens.front == TokenType.Colon)
|
||||
{
|
||||
tokens.popFront();
|
||||
if (!isIdentifierOrType(tokens.front))
|
||||
if (!isIdentifierOrType(tokens.front.type))
|
||||
tokens.skipBlockStatement();
|
||||
else
|
||||
{
|
||||
|
|
789
std/d/lexer.d
789
std/d/lexer.d
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue