Token dump now uses the parser behavior while lexing

This commit is contained in:
Hackerpilot 2016-01-11 17:41:39 -08:00
parent 0886f63033
commit 26500bf876
1 changed files with 4 additions and 2 deletions

View File

@ -155,15 +155,17 @@ int main(string[] args)
ubyte[] bytes = usingStdin ? readStdin() : readFile(args[1]);
LexerConfig config;
config.stringBehavior = StringBehavior.source;
auto tokens = byToken(bytes, config, &cache);
if (highlight)
{
auto tokens = byToken(bytes, config, &cache);
highlighter.highlight(tokens, args.length == 1 ? "stdin" : args[1]);
return 0;
}
else if (tokenDump)
{
writeln("text blank\tindex\tline\tcolumn\ttype\tcomment");
auto tokens = getTokensForParser(bytes, config, &cache);
writeln("text blank\tindex\tline\tcolumn\ttype\tcomment\ttrailingComment");
foreach (token; tokens)
{
writefln("<<%20s>>%b\t%d\t%d\t%d\t%d\t%s", token.text is null ? str(token.type) : token.text,