fix compile errors by COMMENTING OUT EVERYTHING

This commit is contained in:
Hackerpilot 2013-03-04 02:01:27 +00:00
parent c867b86598
commit 3c8b5c4bc4
3 changed files with 334 additions and 300 deletions

6
main.d
View File

@ -158,12 +158,12 @@ int main(string[] args)
} }
else if (dotComplete || parenComplete || symbolComplete) else if (dotComplete || parenComplete || symbolComplete)
{ {
auto app = appender!Token(); auto app = appender!(Token[])();
app.reserve(byteCount / 13); app.reserve(byteCount / 13);
while (!tokens.empty) while (!tokens.empty)
app.put(tokensn.moveFront()); app.put(tokens.moveFront());
Token[] tokenArr = app.data; Token[] tokenArr = app.data;
else if (dotComplete) if (dotComplete)
{ {
} }
else if (parenComplete) else if (parenComplete)

View File

@ -10,8 +10,27 @@ import std.container;
import std.d.lexer; import std.d.lexer;
interface ASTNode {} interface ASTNode {}
interface DeclDef : ASTNode {}
interface AttributeSpecifier : DeclDef {}
interface EnumDeclaration : DeclDef {}
interface ClassDeclaration : DeclDef {}
interface InterfaceDeclaration : DeclDef {}
interface AggregateDeclaration : DeclDef {}
interface Declaration : DeclDef {}
interface Constructor : DeclDef {}
interface Destructor : DeclDef {}
interface UnitTest : DeclDef {}
interface StaticConstructor : DeclDef {}
interface StaticDestructor : DeclDef {}
interface SharedStaticConstructor : DeclDef {}
interface SharedStaticDestructor : DeclDef {}
interface ConditionalDeclaration : DeclDef {}
interface DebugSpecification : DeclDef {}
interface VersionSpecification : DeclDef {}
interface TemplateDeclaration : DeclDef {}
interface TemplateMixinDeclaration : DeclDef {}
interface MixinDeclaration : DeclDef {}
class DeclDef : ASTNode {}
class Module : ASTNode class Module : ASTNode
{ {
@ -113,9 +132,9 @@ class PragmaStatement : NonEmptyStatementNoCaseNoDefault {}
class MixinStatement : NonEmptyStatementNoCaseNoDefault {} class MixinStatement : NonEmptyStatementNoCaseNoDefault {}
class ForeachRangeStatement : NonEmptyStatementNoCaseNoDefault {} class ForeachRangeStatement : NonEmptyStatementNoCaseNoDefault {}
class ConditionalStatement : NonEmptyStatementNoCaseNoDefault {} class ConditionalStatement : NonEmptyStatementNoCaseNoDefault {}
class StaticAssert : NonEmptyStatementNoCaseNoDefault {} class StaticAssert : NonEmptyStatementNoCaseNoDefault, DeclDef {}
class TemplateMixin : NonEmptyStatementNoCaseNoDefault {} class TemplateMixin : NonEmptyStatementNoCaseNoDefault, DeclDef {}
class ImportDeclaration : NonEmptyStatementNoCaseNoDefault class ImportDeclaration : NonEmptyStatementNoCaseNoDefault, DeclDef
{ {
bool isStatic; bool isStatic;
Import[] importList; Import[] importList;

View File

@ -14,18 +14,18 @@ struct Parser
{ {
public: public:
Module parseModule(Token[] tokens) Module parseModule()
{ {
Module m = new Module; Module m = new Module;
while (!tokens.empty) while (index < tokens.length)
{ {
switch (tokens[i].type) switch (tokens[index].type)
{ {
case TokenType.module_: case TokenType.module_:
if (m.declaration !is null) if (m.declaration !is null)
m.declaration = parseModuleDeclaration(tokens); m.declaration = parseModuleDeclaration();
else else
error(tokens, "Only one module declaration is allowed per module"); error("Only one module declaration is allowed per module");
break; break;
default: default:
m.declDefs.insert(parseDeclDef()); m.declDefs.insert(parseDeclDef());
@ -34,6 +34,8 @@ public:
return m; return m;
} }
private:
ModuleDeclaration parseModuleDeclaration() ModuleDeclaration parseModuleDeclaration()
in in
{ {
@ -43,37 +45,92 @@ public:
{ {
ModuleDeclaration declaration = new ModuleDeclaration; ModuleDeclaration declaration = new ModuleDeclaration;
string recent; string recent;
loop: while (!tokens.empty) loop: while (index < tokens.length)
{ {
if (tokens[index].type == TokenType.identifier) if (tokens[index].type == TokenType.identifier)
{ {
recent = tokens.moveFront().value; recent = tokens[index++].value;
switch (tokens[i].type) switch (tokens[index].type)
{ {
case TokenType.dot: case TokenType.dot:
declaration.packageName ~= recent; declaration.packageName ~= recent;
tokens.popFront(); index++;
break; break;
case TokenType.semicolon: case TokenType.semicolon:
declaration.moduleName = recent; declaration.moduleName = recent;
tokens.popFront(); index++;
break loop; break loop;
default: default:
break; break;
} }
} }
else else
error(tokens, "Identifier expected"); error("Identifier expected");
} }
return declaration; return declaration;
} }
private: DeclDef parseDeclDef()
{
switch (tokens[index].type)
{
// case TokenType.identifier:
// if (nextIs(TokenType.colon))
// return parseLabeledStatement();
// break;
// case TokenType.this_:
// return parseConstructor();
// case TokenType.tilde:
// if (nextIs(TokenType.this_))
// return parseDestructor();
// break;
default:
return null;
}
}
// LabeledStatement parseLabeledStatement()
// in
// {
// assert (tokens[index].type == TokenType.identifier);
// }
// body
// {
// auto ls = new LabeledStatement;
// ls.label = tokens[index++].value;
// ls.statement = parseNoScopeStatement();
// return ls;
// }
// NoScopeStatement parseNoScopeStatement()
// {
// switch (tokens[index].type)
// {
// case TokenType.semicolon:
// return new EmptyStatement;
// case TokenType.lBrace:
// return parseBlockStatement();
// default:
// return parseNonEmptyStatement();
// }
// }
void error(string message)
{
import std.stdio;
stderr.writefln("%s(%d:%d): %s", fileName, tokens[index].line,
tokens[index].column, message);
while (index < tokens.length)
{
if (tokens[++index].type == TokenType.semicolon)
break;
}
}
Token* peekPast(alias O, alias C)() Token* peekPast(alias O, alias C)()
in in
{ {
assert (tokens[index].type == T); assert (tokens[index].type == O);
} }
body body
{ {
@ -84,7 +141,7 @@ private:
{ {
if (tokens[i] == O) if (tokens[i] == O)
++depth; ++depth;
else (tokens[i] == C) else if (tokens[i] == C)
{ {
--depth; --depth;
++i; ++i;
@ -114,297 +171,255 @@ private:
Token* expect(TokenType type) Token* expect(TokenType type)
{ {
if (tokens[index].type == type) if (tokens[index].type == type)
return tokens[index++]; return &tokens[index++];
else else
return null; return null;
} }
bool peekIs(TokenType t) Token* peek()
{
return index + 1 < tokens.length ? &tokens[index + 1] : null;
}
bool nextIs(TokenType t)
{ {
return peek() && peek().type == t; return peek() && peek().type == t;
} }
bool moreTokens()
{
return index < tokens.length;
}
Token[] tokens; Token[] tokens;
size_t index; size_t index;
string fileName;
} }
//
//unittest
//{
// auto a = cast(ubyte[]) q{/** */ module a.b.c;};
unittest // LexerConfig config;
{ // auto ta = byToken(a, config);
auto a = cast(ubyte[]) q{/** */ module a.b.c;}; // auto moda = parseModuleDeclaration(ta);
LexerConfig config; // assert (moda.packageName == ["a", "b"]);
auto ta = byToken(a, config); // assert (moda.moduleName == "c");
auto moda = parseModuleDeclaration(ta); //
assert (moda.packageName == ["a", "b"]); // auto b = cast(ubyte[]) q{module a;};
assert (moda.moduleName == "c"); // auto tb = byToken(b, config);
// auto modb = parseModuleDeclaration(tb);
auto b = cast(ubyte[]) q{module a;}; // assert (modb.packageName.length == 0);
auto tb = byToken(b, config); // assert (modb.moduleName == "a");
auto modb = parseModuleDeclaration(tb); //}
assert (modb.packageName.length == 0); //
assert (modb.moduleName == "a"); //NonEmptyStatement parseNonEmptyStatement(Token[] tokens)
} //{
// switch (tokens[i].type)
DeclDef parseDeclDef(Token[] tokens) // {
{ // case TokenType.case_:
switch (tokens[i].type) // return null;
{ // case TokenType.default_:
case TokenType.identifier: // return parseDefaultStatement(tokens);
if (tokens.canPeek && tokens.peek.type == TokenType.colon) // default:
return parseLabeledStatement(tokens); // return parseNonEmptyStatementNoCaseNoDefault(tokens);
break; // }
default: //}
break; //
} //NonEmptyStatementNoCaseNoDefault parseNonEmptyStatementNoCaseNoDefault(Token[] tokens)
} //{
// switch (tokens[i].type)
LabeledStatement parseLabeledStatement(Token[] tokens) // {
in // case TokenType.identifier:
{ // case TokenType.if_:
assert (tokens[i].type == TokenType.identifier); // return parseIfStatement(tokens);
} // case TokenType.while_:
body // return parseWhileStatement(tokens);
{ // case TokenType.do_:
auto ls = new LabeledStatement; // return parseDoStatement(tokens);
ls.label = tokens.moveFront().value; // case TokenType.for_:
ls.statement = parseNoScopeStatement(tokens); // return parseForStatement(tokens);
return ls; // case TokenType.foreach_:
} // return parseForeachStatement(tokens);
// case TokenType.switch_:
NoScopeStatement parseNoScopeStatement(Token[] tokens) // return parseSwitchStatement(tokens);
{ // case TokenType.final_:
switch (tokens[i].type) // if (tokens.peek(1).type == TokenType.switch_)
{ // return parseFinalSwitchStatement(tokens);
case TokenType.semicolon: // else
return new EmptyStatement; // goto default;
case TokenType.lBrace: // case TokenType.continue_:
return parseBlockStatement(tokens); // return parseContinueStatement(tokens);
default: // case TokenType.break_:
return parseNonEmptyStatement(tokens); // return parseBreakStatement(tokens);
} // case TokenType.return_:
} // return parseReturnStatement(tokens);
// case TokenType.goto_:
NonEmptyStatement parseNonEmptyStatement(Token[] tokens) // return parseGotoStatement(tokens);
{ // case TokenType.with_:
switch (tokens[i].type) // return parseWithStatement(tokens);
{ // case TokenType.synchronized_:
case TokenType.case_: // return parseSynchronizedStatement(tokens);
return null; // case TokenType.try_:
case TokenType.default_: // return parseTryStatement(tokens);
return parseDefaultStatement(tokens); // case TokenType.scope_:
default: // return parseScopeGuardStatement(tokens);
return parseNonEmptyStatementNoCaseNoDefault(tokens); // case TokenType.throw_:
} // return parseThrowStatement(tokens);
} // case TokenType.asm_:
// return parseAsmStatement(tokens);
NonEmptyStatementNoCaseNoDefault parseNonEmptyStatementNoCaseNoDefault(Token[] tokens) // case TokenType.pragma_:
{ // return parsePragmaStatement(tokens);
switch (tokens[i].type) // case TokenType.mixin_:
{ // if (tokens.peek(1).type == TokenType.lParen)
case TokenType.identifier: // return parseMixinStatement(tokens);
case TokenType.if_: // else if (tokens.peek(1).type == TokenType.identifier)
return parseIfStatement(tokens); // return parseTemplateMixinStatement(tokens);
case TokenType.while_: // else
return parseWhileStatement(tokens); // {
case TokenType.do_: // error(tokens, "Expected identifier or ( following \"mixin\"");
return parseDoStatement(tokens); // return null;
case TokenType.for_: // }
return parseForStatement(tokens); // case TokenType.version_:
case TokenType.foreach_: // if (tokens.peek(1).type == TokenType.lParen)
return parseForeachStatement(tokens); // return parseConditionalStatement(tokens);
case TokenType.switch_: // else
return parseSwitchStatement(tokens); // {
case TokenType.final_: // error(tokens, "Expected ( following \"version\"");
if (tokens.peek(1).type == TokenType.switch_) // return null;
return parseFinalSwitchStatement(tokens); // }
else // case TokenType.debug_:
goto default; // return parseConditionalStatement(tokens);
case TokenType.continue_: // case TokenType.static_:
return parseContinueStatement(tokens); // if (tokens.peek(1).type == TokenType.if_)
case TokenType.break_: // return parseConditionalStatement(tokens);
return parseBreakStatement(tokens); // else if (tokens.peek(1).type == TokenType.assert_)
case TokenType.return_: // return parseStaticAssert(tokens);
return parseReturnStatement(tokens); // else
case TokenType.goto_: // {
return parseGotoStatement(tokens); // error(tokens, "Expected \"if\" or \"assert\" following \"static\"");
case TokenType.with_: // return null;
return parseWithStatement(tokens); // }
case TokenType.synchronized_: // case TokenType.import_:
return parseSynchronizedStatement(tokens); // return parseImportDeclaration(tokens);
case TokenType.try_: // default:
return parseTryStatement(tokens); // auto d = parseDeclarationStatement(tokens);
case TokenType.scope_: // if (d is null)
return parseScopeGuardStatement(tokens); // {
case TokenType.throw_: // auto e = parseExpressionStatement(tokens);
return parseThrowStatement(tokens); // if (e is null)
case TokenType.asm_: // {
return parseAsmStatement(tokens); // error(tokens, "OMGWTF");
case TokenType.pragma_: // return null;
return parsePragmaStatement(tokens); // }
case TokenType.mixin_: // else
if (tokens.peek(1).type == TokenType.lParen) // return e;
return parseMixinStatement(tokens); // }
else if (tokens.peek(1).type == TokenType.identifier) // else
return parseTemplateMixinStatement(tokens); // return d;
else // }
{ //}
error(tokens, "Expected identifier or ( following \"mixin\""); //
return null; //GotoStatement parseGotoStatement(Token[] tokens)
} //in
case TokenType.version_: //{
if (tokens.peek(1).type == TokenType.lParen) // assert (tokens[i] == TokenType.goto_);
return parseConditionalStatement(tokens); //}
else //body
{ //{
error(tokens, "Expected ( following \"version\""); // tokens.popFront();
return null; // auto g = new GotoExpression;
} // switch (tokens[i].type)
case TokenType.debug_: // {
return parseConditionalStatement(tokens); // case TokenType.identifier:
case TokenType.static_: // g.type = GotoStatement.GotoType.identifier;
if (tokens.peek(1).type == TokenType.if_) // g.identifier = tokens.moveFront().value;
return parseConditionalStatement(tokens); // break;
else if (tokens.peek(1).type == TokenType.assert_) // case TokenType.default_:
return parseStaticAssert(tokens); // tokens.popFront();
else // g.type = GotoStatement.GotoType.break_;
{ // case TokenType.case_:
error(tokens, "Expected \"if\" or \"assert\" following \"static\""); // g.type = GotoStatement.GotoType.case_;
return null; // tokens.popFront();
} // default:
case TokenType.import_: // error(tokens, "Expected an identifier, \"default\", or \"case\" following \"goto\"");
return parseImportDeclaration(tokens); // return null;
default: // }
auto d = parseDeclarationStatement(tokens); //}
if (d is null) //
{ //ContinueStatement parseContinueStatement(Token[] tokens)
auto e = parseExpressionStatement(tokens); //in
if (e is null) //{
{ // assert (tokens[i] == TokenType.continue_);
error(tokens, "OMGWTF"); //}
return null; //body
} //{
else // return parseContinueBreakStatement!(R, ContinueStatement)(tokens);
return e; //}
} //
else //BreakStatement parseBreakStatement(Token[] tokens)
return d; //in
} //{
} // assert (tokens[i] == TokenType.break_);
//}
GotoStatement parseGotoStatement(Token[] tokens) //body
in //{
{ // return parseBreakStatement!(R, BreakStatement)(tokens);
assert (tokens[i] == TokenType.goto_); //}
} //
body //statementType parseContinueBreakStatement(R, alias statementType)(ref R tokens)
{ //{
tokens.popFront(); // tokens.popFront();
auto g = new GotoExpression; // auto c = new statementType;
switch (tokens[i].type) // switch (tokens[i].type)
{ // {
case TokenType.identifier: // case TokenType.identifier:
g.type = GotoStatement.GotoType.identifier; // c.identifier = tokens.moveFront().value;
g.identifier = tokens.moveFront().value; // goto case;
break; // case TokenType.semicolon:
case TokenType.default_: // return c;
tokens.popFront(); // default:
g.type = GotoStatement.GotoType.break_; // error(tokens, "Identifier or semicolon expected");
case TokenType.case_: // return null;
g.type = GotoStatement.GotoType.case_; // }
tokens.popFront(); //
default: //}
error(tokens, "Expected an identifier, \"default\", or \"case\" following \"goto\""); //
return null; //
} //T parseSingleTokenExpression(TokType, AstType, R)(ref R range)
} //{
// auto node = new AstType;
ContinueStatement parseContinueStatement(Token[] tokens) // node.token = range.moveFront();
in // return node;
{ //}
assert (tokens[i] == TokenType.continue_); //
} //AssignExpression parseAssignExpression(Tokens)(ref Tokens tokens)
body //{
{ // auto expr = new AssignExpression;
return parseContinueBreakStatement!(R, ContinueStatement)(tokens); // expr.left = parseConditionalExpression(tokens);
} // switch (tokens[i].type)
// {
BreakStatement parseBreakStatement(Token[] tokens) // case TokenType.assign:
in // case TokenType.plusEqual:
{ // case TokenType.minusEqual:
assert (tokens[i] == TokenType.break_); // case TokenType.mulEqual:
} // case TokenType.divEqual:
body // case TokenType.modEqual:
{ // case TokenType.bitAndEqual:
return parseBreakStatement!(R, BreakStatement)(tokens); // case TokenType.bitOrEqual:
} // case TokenType.xorEqual:
// case TokenType.catEqual:
statementType parseContinueBreakStatement(R, alias statementType)(ref R tokens) // case TokenType.shiftLeftEqual:
{ // case TokenType.shiftRightEqual:
tokens.popFront(); // case TokenType.unsignedShiftRightEqual:
auto c = new statementType; // case TokenType.powEqual:
switch (tokens[i].type) // expr.operator = tokens.moveFront().type;
{ // expr.right = parseAssignExpression();
case TokenType.identifier: // default:
c.identifier = tokens.moveFront().value; // break;
goto case; // }
case TokenType.semicolon: // return expr;
return c; //}
default:
error(tokens, "Identifier or semicolon expected");
return null;
}
}
void error(R)(ref TokenRange!R range, string message)
{
import std.stdio;
stderr.writefln("%s(%d:%d): %s", range.fileName, range[i].line,
range[i].column, message);
while (!range.empty)
{
if (range.moveFront().type == TokenType.semicolon)
break;
}
}
T parseSingleTokenExpression(TokType, AstType, R)(ref R range)
{
auto node = new AstType;
node.token = range.moveFront();
return node;
}
AssignExpression parseAssignExpression(Tokens)(ref Tokens tokens)
{
auto expr = new AssignExpression;
expr.left = parseConditionalExpression(tokens);
switch (tokens[i].type)
{
case TokenType.assign:
case TokenType.plusEqual:
case TokenType.minusEqual:
case TokenType.mulEqual:
case TokenType.divEqual:
case TokenType.modEqual:
case TokenType.bitAndEqual:
case TokenType.bitOrEqual:
case TokenType.xorEqual:
case TokenType.catEqual:
case TokenType.shiftLeftEqual:
case TokenType.shiftRightEqual:
case TokenType.unsignedShiftRightEqual:
case TokenType.powEqual:
expr.operator = tokens.moveFront().type;
expr.right = parseAssignExpression();
default:
break;
}
return expr;
}
//void main(string[] args) {} //void main(string[] args) {}