Merge branch 'range-based-lexer' of https://github.com/Hackerpilot/Dscanner into range-based-lexer

This commit is contained in:
Hackerpilot 2013-07-15 12:55:58 -07:00
commit db978177d3
10 changed files with 238 additions and 192 deletions

View File

@ -1,5 +1,5 @@
dmd *.d std/d/*.d -release -inline -noboundscheck -O -w -wi -m64 -property -ofdscanner-dmd #dmd *.d std/d/*.d -release -inline -noboundscheck -O -w -wi -m64 -property -ofdscanner-dmd
#dmd *.d std/d/*.d -g -m64 -w -wi -ofdscanner -unittest dmd *.d std/d/*.d -g -m64 -w -wi -ofdscanner
ldc2 -O3 *.d std/d/*.d -of=dscanner-ldc -release -m64 #ldc2 -O3 *.d std/d/*.d -of=dscanner-ldc -release -m64
#ldc2 *.d std/d/*.d -of=dscanner -unittest -m64 -g #ldc2 *.d std/d/*.d -of=dscanner -unittest -m64 -g
/opt/gdc/bin/gdc -O3 -odscanner-gdc -fno-bounds-check -frelease -m64 *.d std/d/*.d #/opt/gdc/bin/gdc -O3 -odscanner-gdc -fno-bounds-check -frelease -m64 *.d std/d/*.d

View File

@ -1,4 +0,0 @@
create table modules (path, mtime, id);
create table publicImports (importerId, importedId);
create table containers (name, protection, moduleId, id);
create table symbols (name, type, kind, containerId, id);

56
ctags.d
View File

@ -5,10 +5,64 @@
module ctags; module ctags;
void printCtags(Tokens)(File output, ref Tokens tokens) import std.d.parser;
import std.d.lexer;
import std.d.ast;
import std.algorithm;
import std.stdio;
import std.array;
void doNothing(string, int, int, string) {}
void printCtags(Tokens)(File output, ref Tokens tokens, string fileName)
{ {
Module m = parseModule(tokens.array(), fileName, &doNothing);
auto printer = new CTagsPrinter;
printer.fileName = fileName;
printer.visit(m);
printer.print(output);
}
class CTagsPrinter : ASTVisitor
{
alias ASTVisitor.visit visit;
override void visit(ClassDeclaration dec)
{
tagLines ~= "%s\t%s\t%d;\"\tc".format(dec.name.value, fileName, dec.name.line);
dec.structBody.accept(this);
}
override void visit(InterfaceDeclaration dec)
{
tagLines ~= "%s\t%s\t%d;\"\tc".format(dec.name.value, fileName, dec.name.line);
dec.structBody.accept(this);
}
override void visit(FunctionDeclaration dec)
{
tagLines ~= "%s\t%s\t%d;\"\tf\tarity:%d".format(dec.name.value, fileName,
dec.name.line, dec.parameters.parameters.length);
}
override void visit(EnumDeclaration dec)
{
tagLines ~= "%s\t%s\t%d;\"\tg".format(dec.name.value, fileName, dec.name.line);
}
void print(File output)
{
output.write("!_TAG_FILE_FORMAT 2\n" output.write("!_TAG_FILE_FORMAT 2\n"
~ "!_TAG_FILE_SORTED 1\n" ~ "!_TAG_FILE_SORTED 1\n"
~ "!_TAG_FILE_AUTHOR Brian Schott\n"
~ "!_TAG_PROGRAM_URL https://github.com/Hackerpilot/Dscanner/\n"); ~ "!_TAG_PROGRAM_URL https://github.com/Hackerpilot/Dscanner/\n");
foreach (str; sort(tagLines))
{
output.writeln(str);
}
}
string fileName;
string[] tagLines;
} }

View File

@ -317,7 +317,7 @@ local keywords = {
-- For this module to work the dscanner program must be installed. Configure the -- For this module to work the dscanner program must be installed. Configure the
-- path to the executable here -- path to the executable here
M.PATH_TO_DSCANNER = "/home/alaran/src/dscanner-master/dscanner" M.PATH_TO_DSCANNER = "/home/alaran/src/dscanner/dscanner"
_M.textadept.editing.comment_string.dmd = '//' _M.textadept.editing.comment_string.dmd = '//'
_M.textadept.run.compile_command.dmd = 'dmd -c -o- %(filename)' _M.textadept.run.compile_command.dmd = 'dmd -c -o- %(filename)'
@ -368,33 +368,57 @@ local function showCompletionList(r)
buffer.auto_c_choose_single = setting buffer.auto_c_choose_single = setting
end end
events.connect(events.CHAR_ADDED, function(ch) --events.connect(events.CHAR_ADDED, function(ch)
-- if buffer:get_lexer() ~= "dmd" then return end
-- if ch > 255 then return end
-- local character = string.char(ch)
-- if character == "." or character == "(" then
-- local fileName = os.tmpname()
-- local tmpFile = io.open(fileName, "w")
-- tmpFile:write(buffer:get_text())
-- local command = M.PATH_TO_DSCANNER
-- .. (character == "." and " --dotComplete " or " --parenComplete ")
-- .. fileName .. " " .. buffer.current_pos .. " -I" .. buffer.filename:match(".+[\\/]")
-- local p = io.popen(command)
-- local r = p:read("*a")
-- if r ~= "\n" then
-- if character == "." then
-- showCompletionList(r)
-- elseif character == "(" then
-- if r:find("^completions\n") then
-- showCompletionList(r)
-- elseif r:find("^calltips\n.*") then
-- r = r:gsub("^calltips\n", "")
-- buffer:call_tip_show(buffer.current_pos, r:gsub("\\n", "\n"):gsub("\\t", "\t"):match("(.*)%s+$"))
-- end
-- end
-- end
-- os.remove(fileName)
-- end
--end)
events.connect(events.FILE_AFTER_SAVE, function()
if buffer:get_lexer() ~= "dmd" then return end if buffer:get_lexer() ~= "dmd" then return end
if ch > 255 then return end buffer:annotation_clear_all()
local character = string.char(ch) --buffer.annotation_visible = _SCINTILLA.constants.ANNOTATION_STANDARD
if character == "." or character == "(" then local command = M.PATH_TO_DSCANNER .. " --syntaxCheck " .. buffer.filename
local fileName = os.tmpname()
local tmpFile = io.open(fileName, "w")
tmpFile:write(buffer:get_text())
local command = M.PATH_TO_DSCANNER
.. (character == "." and " --dotComplete " or " --parenComplete ")
.. fileName .. " " .. buffer.current_pos .. " -I" .. buffer.filename:match(".+[\\/]")
local p = io.popen(command) local p = io.popen(command)
local r = p:read("*a") for line in p:lines() do
if r ~= "\n" then lineNumber, column, level, message = string.match(line, "^.-%((%d+):(%d+)%)%[(%w+)%]: (.+)$")
if character == "." then local l = tonumber(lineNumber) - 1
showCompletionList(r) local c = tonumber(column)
elseif character == "(" then if level == "error" then
if r:find("^completions\n") then buffer.annotation_style[l] = 8
showCompletionList(r) else
elseif r:find("^calltips\n.*") then buffer.annotation_style[l] = 2
r = r:gsub("^calltips\n", "")
buffer:call_tip_show(buffer.current_pos, r:gsub("\\n", "\n"):gsub("\\t", "\t"):match("(.*)%s+$"))
end end
local t = buffer.annotation_text[l]
if #t > 0 then
buffer.annotation_text[l] = buffer.annotation_text[l] .. "\n" .. message
else
buffer.annotation_text[l] = message
end end
end end
os.remove(fileName)
end
end) end)

16
main.d
View File

@ -16,10 +16,12 @@ import std.regex;
import std.stdio; import std.stdio;
import std.range; import std.range;
import std.d.lexer; import std.d.lexer;
import std.d.parser;
import highlighter; import highlighter;
import autocomplete; import autocomplete;
import stats; import stats;
import ctags;
/** /**
* Loads any import directories specified in /etc/dmd.conf. * Loads any import directories specified in /etc/dmd.conf.
@ -92,13 +94,14 @@ int main(string[] args)
bool format; bool format;
bool help; bool help;
bool tokenCount; bool tokenCount;
bool syntaxCheck;
try try
{ {
getopt(args, "I", &importDirs, "dotComplete|d", &dotComplete, "sloc|l", &sloc, getopt(args, "I", &importDirs, "dotComplete|d", &dotComplete, "sloc|l", &sloc,
"json|j", &json, "parenComplete|p", &parenComplete, "highlight", &highlight, "json|j", &json, "parenComplete|p", &parenComplete, "highlight", &highlight,
"ctags|c", &ctags, "recursive|r|R", &recursive, "help|h", &help, "ctags|c", &ctags, "recursive|r|R", &recursive, "help|h", &help,
"tokenCount", &tokenCount, "tokenCount", &tokenCount, "syntaxCheck", &syntaxCheck,
"declaration|e", &declaration, "symbolComplete|s", &symbolComplete); "declaration|e", &declaration, "symbolComplete|s", &symbolComplete);
} }
catch (Exception e) catch (Exception e)
@ -112,7 +115,8 @@ int main(string[] args)
return 0; return 0;
} }
auto optionCount = count!"a"([sloc, highlight, ctags, json, tokenCount]); auto optionCount = count!"a"([sloc, highlight, ctags, json, tokenCount,
syntaxCheck]);
if (optionCount > 1) if (optionCount > 1)
{ {
stderr.writeln("Too many options specified"); stderr.writeln("Too many options specified");
@ -172,6 +176,14 @@ int main(string[] args)
{ {
} }
} }
else if (ctags)
{
printCtags(stdout, tokens, args[1]);
}
else if (syntaxCheck)
{
parseModule(tokens.array(), args[1]);
}
} }
return 0; return 0;

View File

@ -26,6 +26,7 @@ import std.d.lexer;
*/ */
abstract class ASTVisitor abstract class ASTVisitor
{ {
public:
/** */ void visit(AddExpression addExpression) { addExpression.accept(this); } /** */ void visit(AddExpression addExpression) { addExpression.accept(this); }
/** */ void visit(AliasDeclaration aliasDeclaration) { aliasDeclaration.accept(this); } /** */ void visit(AliasDeclaration aliasDeclaration) { aliasDeclaration.accept(this); }
/** */ void visit(AliasInitializer aliasInitializer) { aliasInitializer.accept(this); } /** */ void visit(AliasInitializer aliasInitializer) { aliasInitializer.accept(this); }
@ -846,7 +847,7 @@ class Declarator : ASTNode
{ {
public: public:
mixin(DEFAULT_ACCEPT); mixin(DEFAULT_ACCEPT);
/** */ Token identifier; /** */ Token name;
/** */ Initializer initializer; /** */ Initializer initializer;
} }
@ -912,7 +913,7 @@ class EnumDeclaration : ASTNode
{ {
public: public:
mixin(DEFAULT_ACCEPT); mixin(DEFAULT_ACCEPT);
/** */ Token identifier; /** */ Token name;
/** */ Type type; /** */ Type type;
/** */ EnumBody enumBody; /** */ EnumBody enumBody;
} }
@ -1231,7 +1232,7 @@ class InterfaceDeclaration : ASTNode
{ {
public: public:
mixin(DEFAULT_ACCEPT); mixin(DEFAULT_ACCEPT);
/** */ Token identifier; /** */ Token name;
/** */ TemplateParameters templateParameters; /** */ TemplateParameters templateParameters;
/** */ Constraint constraint; /** */ Constraint constraint;
/** */ BaseClassList baseClassList; /** */ BaseClassList baseClassList;
@ -1290,6 +1291,7 @@ class LambdaExpression : ExpressionNode
{ {
public: public:
mixin(DEFAULT_ACCEPT); mixin(DEFAULT_ACCEPT);
/** */ TokenType functionType;
/** */ Token identifier; /** */ Token identifier;
/** */ Parameters parameters; /** */ Parameters parameters;
/** */ FunctionAttribute[] functionAttributes; /** */ FunctionAttribute[] functionAttributes;

View File

@ -159,7 +159,7 @@ struct Token
* Check to see if the token is of the same type and has the same string * Check to see if the token is of the same type and has the same string
* representation as the given token. * representation as the given token.
*/ */
bool opEquals(ref const(Token) other) const bool opEquals(ref const(Token) other) const nothrow pure
{ {
return other.type == type && other.value == value; return other.type == type && other.value == value;
} }
@ -168,17 +168,23 @@ struct Token
* Checks to see if the token's string representation is equal to the given * Checks to see if the token's string representation is equal to the given
* string. * string.
*/ */
bool opEquals(string value) const { return this.value == value; } bool opEquals(string value) const nothrow pure
{
return this.value == value;
}
/** /**
* Checks to see if the token is of the given type. * Checks to see if the token is of the given type.
*/ */
bool opEquals(TokenType type) const { return this.type == type; } bool opEquals(TokenType type) const nothrow pure
{
return this.type == type;
}
/** /**
* Comparison operator orders tokens by start index. * Comparison operator orders tokens by start index.
*/ */
int opCmp(ref const(Token) other) const int opCmp(ref const(Token) other) const nothrow pure
{ {
if (startIndex < other.startIndex) return -1; if (startIndex < other.startIndex) return -1;
if (startIndex > other.startIndex) return 1; if (startIndex > other.startIndex) return 1;
@ -449,7 +455,7 @@ L_advance:
">>>", "TokenType.unsignedShiftRight", ">>>", "TokenType.unsignedShiftRight",
">>>=", "TokenType.unsignedShiftRightEqual", ">>>=", "TokenType.unsignedShiftRightEqual",
"^", "TokenType.xor", "^", "TokenType.xor",
"^=", "TokenType.xorEqual", "^=", "TokenType.xorEqual"
)); ));
case '/': case '/':
nextCharNonLF(); nextCharNonLF();
@ -3109,7 +3115,7 @@ private:
string value; string value;
Slot* next; Slot* next;
uint hash; uint hash;
}; }
void printLoadFactor() void printLoadFactor()
{ {

View File

@ -77,21 +77,23 @@ import std.string : format;
// Uncomment this if you want ALL THE OUTPUT // Uncomment this if you want ALL THE OUTPUT
// Caution: generates 180 megabytes of logging for std.datetime // Caution: generates 180 megabytes of logging for std.datetime
// version = std_parser_verbose; //version = std_parser_verbose;
/** /**
* Params: * Params:
* tokens = the tokens parsed by std.d.lexer * tokens = the tokens parsed by std.d.lexer
* Returns: the parsed module * Returns: the parsed module
*/ */
Module parseModule(const(Token)[] tokens, string fileName) Module parseModule(const(Token)[] tokens, string fileName,
void function(string, int, int, string) messageFunction = null)
{ {
auto parser = new Parser(); auto parser = new Parser();
parser.fileName = fileName; parser.fileName = fileName;
parser.tokens = tokens; parser.tokens = tokens;
parser.messageFunction = messageFunction;
auto mod = parser.parseModule(); auto mod = parser.parseModule();
writefln("Parsing finished with %d errors and %d warnings.", // writefln("Parsing finished with %d errors and %d warnings.",
parser.errorCount, parser.warningCount); // parser.errorCount, parser.warningCount);
return mod; return mod;
} }
@ -1757,6 +1759,7 @@ class ClassFour(A, B) if (someTest()) : Super {}}c;
break; break;
default: default:
error("Declaration expected"); error("Declaration expected");
if (moreTokens())
advance(); advance();
return null; return null;
} }
@ -1768,7 +1771,7 @@ class ClassFour(A, B) if (someTest()) : Super {}}c;
* *
* $(GRAMMAR $(RULEDEF declarationsAndStatements): * $(GRAMMAR $(RULEDEF declarationsAndStatements):
* $(RULE declarationOrStatement)+ * $(RULE declarationOrStatement)+
* ; * ;)
*/ */
DeclarationsAndStatements parseDeclarationsAndStatements() DeclarationsAndStatements parseDeclarationsAndStatements()
{ {
@ -1831,7 +1834,7 @@ class ClassFour(A, B) if (someTest()) : Super {}}c;
auto node = new Declarator; auto node = new Declarator;
auto id = expect(TokenType.identifier); auto id = expect(TokenType.identifier);
if (id is null) return null; if (id is null) return null;
node.identifier = *id; node.name = *id;
if (currentIsOneOf(TokenType.lBracket, TokenType.star)) if (currentIsOneOf(TokenType.lBracket, TokenType.star))
{ {
error("C-style variable declarations are not supported."); error("C-style variable declarations are not supported.");
@ -2004,7 +2007,7 @@ class ClassFour(A, B) if (someTest()) : Super {}}c;
auto node = new EnumDeclaration; auto node = new EnumDeclaration;
if (expect(TokenType.enum_) is null) return null; if (expect(TokenType.enum_) is null) return null;
if (currentIs(TokenType.identifier)) if (currentIs(TokenType.identifier))
node.identifier = advance(); node.name = advance();
if (currentIs(TokenType.colon)) if (currentIs(TokenType.colon))
{ {
advance(); advance();
@ -2667,7 +2670,8 @@ body {} // six
{ {
auto b = setBookmark(); auto b = setBookmark();
auto t = parseType(); auto t = parseType();
if (t is null || !currentIs(TokenType.identifier)) if (t is null || !currentIs(TokenType.identifier)
|| !peekIs(TokenType.assign))
{ {
goToBookmark(b); goToBookmark(b);
node.expression = parseExpression(); node.expression = parseExpression();
@ -2958,7 +2962,7 @@ import core.stdc.stdio, std.string : KeepTerminator;
if (expect(TokenType.interface_) is null) return null; if (expect(TokenType.interface_) is null) return null;
auto ident = expect(TokenType.identifier); auto ident = expect(TokenType.identifier);
if (ident is null) return null; if (ident is null) return null;
node.identifier = *ident; node.name = *ident;
if (currentIs(TokenType.lParen)) if (currentIs(TokenType.lParen))
{ {
node.templateParameters = parseTemplateParameters(); node.templateParameters = parseTemplateParameters();
@ -3159,17 +3163,26 @@ invariant() foo();
* Parses a LambdaExpression * Parses a LambdaExpression
* *
* $(GRAMMAR $(RULEDEF lambdaExpression): * $(GRAMMAR $(RULEDEF lambdaExpression):
* ($(LITERAL Identifier) | $(RULE parameters) $(RULE functionAttribute)* ) $(LITERAL '=>') $(RULE assignExpression) * $(LITERAL Identifier) $(LITERAL '=>') $(RULE assignExpression)
* | $(LITERAL 'function') $(RULE parameters) $(RULE functionAttribute)* $(LITERAL '=>') $(RULE assignExpression)
* | $(LITERAL 'delegate') $(RULE parameters) $(RULE functionAttribute)* $(LITERAL '=>') $(RULE assignExpression)
* | $(RULE parameters) $(RULE functionAttribute)* $(LITERAL '=>') $(RULE assignExpression)
* ;) * ;)
*/ */
LambdaExpression parseLambdaExpression() LambdaExpression parseLambdaExpression()
{ {
mixin(traceEnterAndExit!(__FUNCTION__)); mixin(traceEnterAndExit!(__FUNCTION__));
auto node = new LambdaExpression; auto node = new LambdaExpression;
if (currentIs(TokenType.identifier)) if (currentIsOneOf(TokenType.function_, TokenType.delegate_))
{
node.functionType = advance().type;
goto lParen;
}
else if (currentIs(TokenType.identifier))
node.identifier = advance(); node.identifier = advance();
else if (currentIs(TokenType.lParen)) else if (currentIs(TokenType.lParen))
{ {
lParen:
node.parameters = parseParameters(); node.parameters = parseParameters();
do do
{ {
@ -3284,8 +3297,13 @@ invariant() foo();
auto node = new MixinDeclaration; auto node = new MixinDeclaration;
if (peekIs(TokenType.identifier)) if (peekIs(TokenType.identifier))
node.templateMixinExpression = parseTemplateMixinExpression(); node.templateMixinExpression = parseTemplateMixinExpression();
else else if (peekIs(TokenType.lParen))
node.mixinExpression = parseMixinExpression(); node.mixinExpression = parseMixinExpression();
else
{
error(`"(" or identifier expected`);
return null;
}
expect(TokenType.semicolon); expect(TokenType.semicolon);
return node; return node;
} }
@ -4017,6 +4035,20 @@ q{(int a, ...)
break; break;
case function_: case function_:
case delegate_: case delegate_:
if (peekIs(lParen))
{
auto b = setBookmark();
advance(); // function | delegate
skipParens();
if (currentIs(goesTo))
{
goToBookmark(b);
goto lambda;
}
else
goToBookmark(b);
}
goto case;
case lBrace: case lBrace:
case in_: case in_:
case out_: case out_:
@ -4046,6 +4078,7 @@ q{(int a, ...)
if (currentIs(goesTo)) if (currentIs(goesTo))
{ {
goToBookmark(b); goToBookmark(b);
lambda:
node.lambdaExpression = parseLambdaExpression(); node.lambdaExpression = parseLambdaExpression();
} }
else if (currentIs(lBrace)) else if (currentIs(lBrace))
@ -6160,7 +6193,7 @@ private:
auto column = index < tokens.length ? tokens[index].column : 0; auto column = index < tokens.length ? tokens[index].column : 0;
auto line = index < tokens.length ? tokens[index].line : 0; auto line = index < tokens.length ? tokens[index].line : 0;
if (messageFunction is null) if (messageFunction is null)
writefln("^^ %s(%d:%d): %s", fileName, line, column, message); writefln("%s(%d:%d)[warn]: %s", fileName, line, column, message);
else else
messageFunction(fileName, line, column, message); messageFunction(fileName, line, column, message);
} }
@ -6175,7 +6208,7 @@ private:
column++; column++;
auto line = index < tokens.length ? tokens[index].line : 0; auto line = index < tokens.length ? tokens[index].line : 0;
if (messageFunction is null) if (messageFunction is null)
stderr.writefln("!! %s(%d:%d): %s", fileName, line, column, message); writefln("%s(%d:%d)[error]: %s", fileName, line, column, message);
else else
messageFunction(fileName, line, column, message); messageFunction(fileName, line, column, message);
} }
@ -6295,7 +6328,7 @@ private:
* Returns a token of the specified type if it was the next token, otherwise * Returns a token of the specified type if it was the next token, otherwise
* calls the error function and returns null. * calls the error function and returns null.
*/ */
const(Token)* expect(TokenType type, string loc = __PRETTY_FUNCTION__) const(Token)* expect(TokenType type)
{ {
if (index < tokens.length && tokens[index].type == type) if (index < tokens.length && tokens[index].type == type)
return &tokens[index++]; return &tokens[index++];
@ -6303,10 +6336,10 @@ private:
{ {
if (tokenValues[type] is null) if (tokenValues[type] is null)
error("Expected " ~ to!string(type) ~ " instead of " error("Expected " ~ to!string(type) ~ " instead of "
~ (index < tokens.length ? tokens[index].value : "EOF") ~ " at " ~ loc); ~ (index < tokens.length ? tokens[index].value : "EOF"));
else else
error("Expected " ~ tokenValues[type] ~ " instead of " error("Expected " ~ tokenValues[type] ~ " instead of "
~ (index < tokens.length ? tokens[index].value : "EOF") ~ " at " ~ loc); ~ (index < tokens.length ? tokens[index].value : "EOF"));
return null; return null;
} }
} }
@ -6407,9 +6440,9 @@ private:
if (suppressMessages > 0) if (suppressMessages > 0)
return; return;
if (index < tokens.length) if (index < tokens.length)
stderr.writeln(message, "(", current.line, ":", current.column + 1, ")"); writeln(message, "(", current.line, ":", current.column + 1, ")");
else else
stderr.writeln(message, "(EOF:0)"); writeln(message, "(EOF:0)");
} }
} }
else else

View File

@ -1,12 +0,0 @@
if [ ! -d runs ]; then
mkdir runs
fi
for file in /usr/include/d/std/*.d; do
shortFile=$(basename $file)
echo "Parsing" $shortFile "..."
outFile=runs/$shortFile.txt
./tester $file > $outFile
done
echo
grep -l "Parsing finished with 0 errors" runs/*.txt | sed -e "s/runs\//Pass /" -e "s/.txt//"
grep -L "Parsing finished with 0 errors" runs/*.txt | sed -e "s/runs\//Fail /" -e "s/.txt//"

View File

@ -1,69 +0,0 @@
import std.d.lexer;
import std.d.ast;
import std.d.parser;
import std.stdio;
import std.file;
import std.array;
class TestVisitor : ASTVisitor
{
override void visit(ClassDeclaration classDeclaration)
{
writeln("class ", classDeclaration.name.value, " on line ", classDeclaration.name.line);
}
override void visit(StructDeclaration structDeclaration)
{
writeln("struct ", structDeclaration.name.value, " on line ", structDeclaration.name.line);
}
override void visit(ModuleDeclaration md)
{
writeln("module declaration found");
}
override void visit(FunctionDeclaration funDec)
{
writeln("function ", funDec.name.value, " on line ", funDec.name.line);
}
override void visit(VariableDeclaration varDec)
{
foreach (decl; varDec.declarators)
{
writeln("variable ", decl.identifier.value,
" on line ", decl.identifier.line);
}
}
override void visit(ImportDeclaration impDec)
{
writeln("import declaration found");
}
override void visit(InterfaceDeclaration intDec)
{
writeln("Interface ", intDec.identifier.value,
" on line ", intDec.identifier.line);
}
override void visit(VersionSpecification verSpec)
{
writeln("Version specification");
}
alias ASTVisitor.visit visit;
}
void main(string[] args)
{
auto de = dirEntry(args[1]);
ubyte[] sourceBuffer = new ubyte[de.size];
auto f = File(args[1]);
ubyte[] rawSource = f.rawRead(sourceBuffer);
LexerConfig config;
auto tokens = byToken(rawSource, config).array();
Module m = parseModule(tokens, args[1]);
//ASTVisitor visitor = new TestVisitor;
//visitor.visit(m);
}