Some random messing about as well as some changes to make the code compile on git HEAD dmd

This commit is contained in:
Hackerpilot 2013-12-16 01:23:23 +00:00
parent 629c93eca7
commit 84005dc200
3 changed files with 161 additions and 135 deletions

View File

@ -421,7 +421,7 @@ public struct DLexer(R)
Token lexWhitespace(LR)(ref LR range)
{
range.mark();
loop: while (!range.empty)
loop: do
{
switch (range.front)
{
@ -456,7 +456,7 @@ public struct DLexer(R)
default:
break loop;
}
}
} while (!range.empty);
return Token(tok!"whitespace", cast(string) range.getMarked(), range.line,
range.column, range.index);
}
@ -861,7 +861,10 @@ public struct DLexer(R)
while (true)
{
if (range.empty)
goto error;
{
writeln("Error: unterminated string literal");
return Token(tok!"");
}
else if (range.front == '`')
{
range.popFront();
@ -875,12 +878,18 @@ public struct DLexer(R)
{
range.popFront();
if (range.empty)
goto error;
{
writeln("Error: unterminated string literal");
return Token(tok!"");
}
range.popFront();
while (true)
{
if (range.empty)
goto error;
{
writeln("Error: unterminated string literal");
return Token(tok!"");
}
else if (range.front == '"')
{
range.popFront();
@ -893,9 +902,6 @@ public struct DLexer(R)
lexStringSuffix(range, type);
return Token(type, cast(string) range.getMarked(), range.line, range.column,
range.index);
error:
writeln("Error: unterminated string literal");
return Token();
}
static void lexStringSuffix(R)(ref R range, ref IdType type)
@ -1232,7 +1238,7 @@ public struct DLexer(R)
range.column, range.index);
}
static bool isSeparating(C)(C c)
static bool isSeparating(C)(C c) nothrow pure
{
if (c <= 0x2f) return true;
if (c >= ':' && c <= '@') return true;

View File

@ -80,7 +80,7 @@ import std.string : format;
* Returns: the parsed module
*/
Module parseModule(const(Token)[] tokens, string fileName,
void function(string, int, int, string) messageFunction = null)
void function(string, size_t, size_t, string) messageFunction = null)
{
auto parser = new Parser();
parser.fileName = fileName;
@ -4117,7 +4117,8 @@ q{(int a, ...)
if (currentIs(tok!"=>"))
{
goToBookmark(b);
goto lambda;
node.lambdaExpression = parseLambdaExpression();
break;
}
else
goToBookmark(b);
@ -4152,7 +4153,6 @@ q{(int a, ...)
if (currentIs(tok!"=>"))
{
goToBookmark(b);
lambda:
node.lambdaExpression = parseLambdaExpression();
}
else if (currentIs(tok!"{"))
@ -5549,7 +5549,10 @@ q{(int a, ...)
node.array = true;
advance();
if (currentIs(tok!"]"))
goto end;
{
if (expect(tok!"]") is null) return null;
return node;
}
auto bookmark = setBookmark();
auto type = parseType();
if (type !is null && currentIs(tok!"]"))
@ -5570,7 +5573,6 @@ q{(int a, ...)
if (node.high is null) return null;
}
}
end:
if (expect(tok!"]") is null) return null;
return node;
case tok!"delegate":
@ -6023,7 +6025,7 @@ q{doStuff(5)}c;
* The parameters are the file name, line number, column number,
* and the error or warning message.
*/
void function(string, int, int, string) messageFunction;
void function(string, size_t, size_t, string) messageFunction;
bool isSliceExpression()
{

View File

@ -90,11 +90,25 @@ struct TokenStructure(IDType)
return this.type == type;
}
IDType type;
this(IDType type)
{
this.type = type;
}
this(IDType type, string text, size_t line, size_t column, size_t index)
{
this.text = text;
this.line = line;
this.column = column;
this.type = type;
this.index = index;
}
string text;
size_t line;
size_t column;
size_t index;
IDType type;
}
mixin template Lexer(R, IDType, Token, alias isSeparating, alias defaultTokenFunction,
@ -159,7 +173,10 @@ mixin template Lexer(R, IDType, Token, alias isSeparating, alias defaultTokenFun
string code;
if (staticTokens.countUntil(token) >= 0)
{
code ~= indent ~ "range.popFrontN(" ~ text(token.length) ~ ");\n";
if (token.length == 1)
code ~= indent ~ "range.popFront();\n";
else
code ~= indent ~ "range.popFrontN(" ~ text(token.length) ~ ");\n";
code ~= indent ~ "return Token(tok!\"" ~ escape(token) ~"\", null, range.line, range.column, range.index);\n";
}
else if (pseudoTokens.countUntil(token) >= 0)
@ -168,7 +185,10 @@ mixin template Lexer(R, IDType, Token, alias isSeparating, alias defaultTokenFun
{
code ~= indent ~ "if (!range.canPeek(" ~ text(token.length) ~ ") || isSeparating(range.peek(" ~ text(token.length) ~ ")))\n";
code ~= indent ~ "{\n";
code ~= indent ~ " range.popFrontN(" ~ text(token.length) ~ ");\n";
if (token.length == 1)
code ~= indent ~ " range.popFront();\n";
else
code ~= indent ~ " range.popFrontN(" ~ text(token.length) ~ ");\n";
code ~= indent ~ " return Token(tok!\"" ~ escape(token) ~"\", null, range.line, range.column, range.index);\n";
code ~= indent ~ "}\n";
code ~= indent ~ "else\n";
@ -233,7 +253,7 @@ mixin template Lexer(R, IDType, Token, alias isSeparating, alias defaultTokenFun
lexerLoop: switch (range.front)
{
mixin(generateCaseStatements(stupidToArray(sort(staticTokens ~ pseudoTokens ~ possibleDefaultTokens))));
//pragma(msg, generateCaseStatements(stupidToArray(sort(staticTokens ~ pseudoTokens ~ possibleDefaultTokens))));
pragma(msg, generateCaseStatements(stupidToArray(sort(staticTokens ~ pseudoTokens ~ possibleDefaultTokens))));
outer_default:
default:
range = r;
@ -309,9 +329,9 @@ public:
return range[_index + offset];
}
bool canPeek(int offset = 1) pure nothrow const
bool canPeek(size_t offset = 1) pure nothrow const
{
return _index + offset >= 0 && _index + offset < range.length;
return _index + offset < range.length;
}
typeof(this) save() @property
@ -352,117 +372,115 @@ private:
R range;
}
struct PeekRange(R, size_t peekSupported = 1)
if (!isRandomAccessRange!R && isForwardRange!R)
{
public:
this(R range)
{
this.range = range;
for (size_t i = 0; !this.range.empty && i < peekSupported; i++)
{
rangeSizeCount++;
buffer[i] = this.range.front;
range.popFront();
}
}
ElementType!R front() const @property
in
{
assert (!empty);
}
body
{
return buffer[bufferIndex];
}
void popFront()
in
{
assert (!empty);
}
body
{
index++;
column++;
count++;
bufferIndex = bufferIndex + 1 > buffer.length ? 0 : bufferIndex + 1;
if (marking)
markBuffer.put(buffer[bufferIndex]);
if (!range.empty)
{
buffer[bufferIndex + peekSupported % buffer.length] = range.front();
range.popFront();
rangeSizeCount++;
}
}
bool empty() const nothrow pure @property
{
return rangeSizeCount == count;
}
ElementType!R peek(int offset = 1) pure nothrow const
in
{
assert (canPeek(offset));
}
body
{
return buffer[(bufferIndex + offset) % buffer.length];
}
bool canPeek(int offset = 1) pure nothrow const
{
return offset >= 0
? offset <= peekSupported && count + offset <= rangeSizeCount
: abs(offset) <= peekSupported && (count - abs(offset)) >= 0;
}
typeof(this) save() @property
{
typeof(this) newRange;
newRange.count = count;
newRange.rangeSizeCount = count;
newRange.buffer = buffer.dup;
newRange.bufferIndex = bufferIndex;
newRange.range = range.save;
return newRange;
}
void mark()
{
marking = true;
markBuffer.clear();
}
ElementEncodingType!R[] getMarked()
{
marking = false;
return markBuffer.data;
}
void incrementLine() pure nothrow
{
_column = 1;
_line++;
}
size_t line() pure nothrow const @property { return _line; }
size_t column() pure nothrow const @property { return _column; }
size_t index() pure nothrow const @property { return _index; }
private:
auto markBuffer = appender!(ElementType!R[])();
bool marking;
size_t count;
size_t rangeSizeCount;
ElementType!(R)[(peekSupported * 2) + 1] buffer;
size_t bufferIndex;
size_t _column = 1;
size_t _line = 1;
size_t _index = 0;
R range;
}
//struct PeekRange(R, size_t peekSupported = 1)
// if (!isRandomAccessRange!R && isForwardRange!R)
//{
//public:
//
// this(R range)
// {
// this.range = range;
// for (size_t i = 0; !this.range.empty && i < peekSupported; i++)
// {
// rangeSizeCount++;
// buffer[i] = this.range.front;
// range.popFront();
// }
// }
//
// ElementType!R front() const @property
// in
// {
// assert (!empty);
// }
// body
// {
// return buffer[bufferIndex];
// }
//
// void popFront()
// in
// {
// assert (!empty);
// }
// body
// {
// index++;
// column++;
// count++;
// bufferIndex = bufferIndex + 1 > buffer.length ? 0 : bufferIndex + 1;
// if (marking)
// markBuffer.put(buffer[bufferIndex]);
// if (!range.empty)
// {
// buffer[bufferIndex + peekSupported % buffer.length] = range.front();
// range.popFront();
// rangeSizeCount++;
// }
// }
//
// bool empty() const nothrow pure @property
// {
// return rangeSizeCount == count;
// }
//
// ElementType!R peek(int offset = 1) pure nothrow const
// in
// {
// assert (canPeek(offset));
// }
// body
// {
// return buffer[(bufferIndex + offset) % buffer.length];
// }
//
// bool canPeek(size_t int offset = 1) pure nothrow const
// {
// return offset <= peekSupported && count + offset <= rangeSizeCount;
// }
//
// typeof(this) save() @property
// {
// typeof(this) newRange;
// newRange.count = count;
// newRange.rangeSizeCount = count;
// newRange.buffer = buffer.dup;
// newRange.bufferIndex = bufferIndex;
// newRange.range = range.save;
// return newRange;
// }
//
// void mark()
// {
// marking = true;
// markBuffer.clear();
// }
//
// ElementEncodingType!R[] getMarked()
// {
// marking = false;
// return markBuffer.data;
// }
//
// void incrementLine() pure nothrow
// {
// _column = 1;
// _line++;
// }
//
// size_t line() pure nothrow const @property { return _line; }
// size_t column() pure nothrow const @property { return _column; }
// size_t index() pure nothrow const @property { return _index; }
//
//private:
// auto markBuffer = appender!(ElementType!R[])();
// bool marking;
// size_t count;
// size_t rangeSizeCount;
// ElementType!(R)[peekSupported + 1] buffer;
// size_t bufferIndex;
// size_t _column = 1;
// size_t _line = 1;
// size_t _index = 0;
// R range;
//}