Moved lexer over to std.d

This commit is contained in:
Hackerpilot 2013-01-21 17:39:59 +00:00
parent fb66baa36a
commit fbfdc37cf5
11 changed files with 2032 additions and 1759 deletions

View File

@ -13,11 +13,11 @@ import std.stdio;
import std.typecons;
import std.path;
import std.file;
import std.d.lexer;
import parser;
import langutils;
import types;
import tokenizer;
immutable string[] versions = ["AIX", "all", "Alpha", "ARM", "BigEndian", "BSD",
"Cygwin", "D_Coverage", "D_Ddoc", "DigitalMars", "D_InlineAsm_X86",
@ -31,348 +31,348 @@ immutable string[] versions = ["AIX", "all", "Alpha", "ARM", "BigEndian", "BSD",
immutable string[] scopes = ["exit", "failure", "success"];
/**
* Returns: indicies into the token array
*/
size_t findEndOfExpression(const Token[] tokens, const size_t index)
out (result)
{
assert (result < tokens.length);
assert (result >= index);
}
body
{
size_t i = index;
loop: while (i < tokens.length)
{
switch (tokens[i].type)
{
case TokenType.Return:
case TokenType.New:
case TokenType.Delete:
case TokenType.Comma:
case TokenType.RBrace:
case TokenType.RParen:
case TokenType.RBracket:
case TokenType.Semicolon:
break loop;
case TokenType.LParen:
skipParens(tokens, i);
break;
case TokenType.LBrace:
skipBraces(tokens, i);
break;
case TokenType.LBracket:
skipBrackets(tokens, i);
break;
default:
++i;
break;
}
}
return i;
}
size_t findBeginningOfExpression(const Token[] tokens, const size_t index)
in
{
assert (index < tokens.length);
assert (tokens.length > 0);
}
out (result)
{
import std.string;
assert (result < tokens.length);
assert (result <= index, format("findBeginningOfExpression %d, %d", result, index));
}
body
{
size_t i = index;
loop: while (i < tokens.length)
{
switch (tokens[i].type)
{
case TokenType.Assign: case TokenType.BitAnd: case TokenType.BitAndEquals:
case TokenType.BitOr: case TokenType.BitOrEquals: case TokenType.CatEquals:
case TokenType.Colon: case TokenType.Comma: case TokenType.Decrement:
case TokenType.Div: case TokenType.DivEquals: case TokenType.Dollar:
case TokenType.Equals: case TokenType.GoesTo:
case TokenType.Greater: case TokenType.GreaterEqual: case TokenType.Hash:
case TokenType.Increment: case TokenType.LBrace: case TokenType.LBracket:
case TokenType.Less: case TokenType.LessEqual: case TokenType.LessEqualGreater:
case TokenType.LessOrGreater: case TokenType.LogicAnd: case TokenType.LogicOr:
case TokenType.LParen: case TokenType.Minus: case TokenType.MinusEquals:
case TokenType.Mod: case TokenType.ModEquals: case TokenType.MulEquals:
case TokenType.Not: case TokenType.NotEquals: case TokenType.NotGreater:
case TokenType.NotGreaterEqual: case TokenType.NotLess: case TokenType.NotLessEqual:
case TokenType.NotLessEqualGreater: case TokenType.Plus: case TokenType.PlusEquals:
case TokenType.Pow: case TokenType.PowEquals: case TokenType.RBrace:
case TokenType.Semicolon: case TokenType.ShiftLeft: case TokenType.ShiftLeftEqual:
case TokenType.ShiftRight: case TokenType.ShiftRightEqual: case TokenType.Slice:
case TokenType.Star: case TokenType.Ternary: case TokenType.Tilde:
case TokenType.Unordered: case TokenType.UnsignedShiftRight: case TokenType.UnsignedShiftRightEqual:
case TokenType.Vararg: case TokenType.Xor: case TokenType.XorEquals:
case TokenType.KEYWORDS_BEGIN: .. case TokenType.KEYWORDS_END:
return i + 1;
case TokenType.RParen:
if (i == 0)
break loop;
skipParens(tokens, i);
break;
case TokenType.RBracket:
if (i == 0)
break loop;
skipBrackets(tokens, i);
break;
default:
if (i == 0)
break loop;
i--;
break;
}
}
return i + 1;
}
const(Token)[] splitCallChain(const(Token)[] tokens)
{
auto app = appender!(Token[])();
size_t i = 0;
while (i < tokens.length)
{
app.put(tokens[i++]);
while (i < tokens.length && tokens[i] == TokenType.LParen) skipParens(tokens, i);
while (i < tokens.length && tokens[i] == TokenType.LBracket) skipBrackets(tokens, i);
while (i < tokens.length && tokens[i] == TokenType.Dot) ++i;
}
return app.data;
}
unittest
{
auto code = `a.b[10].c("grcl").x`;
auto tokens = tokenize(code);
assert (splitCallChain(tokens) == ["a", "b", "c", "x"]);
}
struct AutoComplete
{
this(const (Token)[] tokens, CompletionContext context)
{
this.tokens = tokens;
this.context = context;
}
string getTypeOfExpression(const(Token)[] expression, const Token[] tokens, size_t cursor)
{
stderr.writeln("getting type of ", expression);
if (expression.length == 0)
return "void";
auto type = typeOfVariable(expression[0], cursor);
if (type is null)
return "void";
size_t index = 1;
while (index < expression.length)
{
const Tuple!(string, string)[string] typeMap = context.getMembersOfType(
type);
const Tuple!(string, string)* memberType = expression[index].value in typeMap;
if (memberType is null)
return "void";
else
type = (*memberType)[0];
index++;
}
return type;
}
string typeOfVariable(Token symbol, size_t cursor)
{
// int is of type int, double of type double, and so on
if (symbol.value in typeProperties)
return symbol.value;
string tokenType = getTypeFromToken(symbol);
if (tokenType !is null)
return tokenType;
if (context.getMembersOfType(symbol.value))
return symbol.value;
// Arbitrarily define the depth of the cursor position as zero
// iterate backwards through the code to try to find the variable
int depth = 0;
auto preceedingTokens = assumeSorted(tokens).lowerBound(cursor);
auto index = preceedingTokens.length - 1;
while (true)
{
if (preceedingTokens[index] == TokenType.LBrace)
--depth;
else if (preceedingTokens[index] == TokenType.RBrace)
++depth;
else if (depth <= 0 && preceedingTokens[index].value == symbol)
{
// Found the symbol, now determine if it was declared here.
auto p = preceedingTokens[index - 1];
if ((p == TokenType.Auto || p == TokenType.Immutable
|| p == TokenType.Const)
&& preceedingTokens[index + 1] == TokenType.Assign)
{
// Try to determine the type of a variable declared as "auto"
return getTypeOfExpression(
tokens[index + 2 .. findEndOfExpression(tokens, index + 2)],
tokens, cursor);
}
else if (p == TokenType.Identifier
|| (p.type > TokenType.TYPES_BEGIN
&& p.type < TokenType.TYPES_END))
{
// Handle simple cases like "int a;" or "Someclass instance;"
return p.value;
}
else if (p == TokenType.RBracket || p == TokenType.RParen)
{
return combineTokens(tokens[findBeginningOfExpression(tokens, index) .. index]);
}
}
if (index == 0)
break;
else
--index;
}
// Find all struct or class bodies that we're in.
// Check for the symbol in those class/struct/interface bodies
// if match is found, return it
auto structs = context.getStructsContaining(cursor);
if (symbol == "this" && structs.length > 0)
{
return minCount!("a.bodyStart > b.bodyStart")(structs)[0].name;
}
foreach (s; structs)
{
auto t = s.getMemberType(symbol.value);
if (t !is null)
return t;
}
return "void";
}
string symbolAt(size_t cursor) const
{
auto r = assumeSorted(tokens).lowerBound(cursor)[$ - 1];
if (r.value.length + r.startIndex > cursor)
return r.value;
else
return null;
}
string parenComplete(size_t cursor)
{
auto index = assumeSorted(tokens).lowerBound(cursor).length - 2;
Token t = tokens[index];
switch (tokens[index].type)
{
case TokenType.Version:
return "completions\n" ~ to!string(join(map!`a ~ " k"`(versions), "\n").array());
case TokenType.Scope:
return "completions\n" ~ to!string(join(map!`a ~ " k"`(scopes), "\n").array());
case TokenType.If:
case TokenType.Cast:
case TokenType.While:
case TokenType.For:
case TokenType.Foreach:
case TokenType.Switch:
return "";
default:
size_t startIndex = findBeginningOfExpression(tokens, index);
auto callChain = splitCallChain(tokens[startIndex .. index + 1]);
auto expressionType = getTypeOfExpression(
callChain[0 .. $ - 1], tokens, cursor);
return "calltips\n" ~ to!string(context.getCallTipsFor(expressionType,
callChain[$ - 1].value, cursor).join("\n").array());
}
}
string dotComplete(size_t cursor)
{
stderr.writeln("dotComplete");
auto index = assumeSorted(tokens).lowerBound(cursor).length - 1;
Token t = tokens[index];
// If the last character entered before the cursor isn't a dot, give up.
// The user was probably in the middle of typing the slice or vararg
// operators
if (t != TokenType.Dot)
return null;
size_t startIndex = findBeginningOfExpression(tokens, index);
if (startIndex - 1 < tokens.length && tokens[startIndex - 1] == TokenType.Import)
{
return importComplete(splitCallChain(tokens[startIndex .. index]));
}
auto expressionType = getTypeOfExpression(
splitCallChain(tokens[startIndex .. index]), tokens, cursor);
stderr.writeln("expression type is ", expressionType);
// Complete pointers and references the same way
if (expressionType[$ - 1] == '*')
expressionType = expressionType[0 .. $ - 1];
const Tuple!(string, string)[string] typeMap = context.getMembersOfType(
expressionType);
if (typeMap is null)
return "";
auto app = appender!(string[])();
foreach (k, t; typeMap)
app.put(k ~ " " ~ t[1]);
return to!string(array(join(sort!("a.toLower() < b.toLower()")(app.data), "\n")));
}
string importComplete(const(Token)[] tokens)
{
stderr.writeln("importComplete");
auto app = appender!(string[])();
string part = to!string(map!"a.value.dup"(tokens).join("/").array());
foreach (path; context.importDirectories)
{
stderr.writeln("Searching for ", path, "/", part);
if (!exists(buildPath(path, part)))
continue;
stderr.writeln("found it");
foreach (DirEntry dirEntry; dirEntries(buildPath(path, part),
SpanMode.shallow))
{
if (dirEntry.isDir)
app.put(baseName(dirEntry.name) ~ " P");
else if (dirEntry.name.endsWith(".d", ".di"))
app.put(stripExtension(baseName(dirEntry.name)) ~ " M");
}
}
return to!string(sort!("a.toLower() < b.toLower()")(app.data).join("\n").array());
}
const(Token)[] tokens;
CompletionContext context;
}
unittest
{
auto code = q{
struct TestStruct { int a; int b; }
TestStruct ts;
ts.a.
};
auto tokens = tokenize(code);
auto mod = parseModule(tokens);
auto context = new CompletionContext(mod);
auto completion = AutoComplete(tokens, context);
assert (completion.getTypeOfExpression(splitCallChain(tokens[13 .. 16]),
tokens, 56) == "int");
}
///**
// * Returns: indicies into the token array
// */
//size_t findEndOfExpression(const Token[] tokens, const size_t index)
//out (result)
//{
// assert (result < tokens.length);
// assert (result >= index);
//}
//body
//{
// size_t i = index;
// loop: while (i < tokens.length)
// {
// switch (tokens[i].type)
// {
// case TokenType.Return:
// case TokenType.New:
// case TokenType.Delete:
// case TokenType.Comma:
// case TokenType.RBrace:
// case TokenType.RParen:
// case TokenType.RBracket:
// case TokenType.Semicolon:
// break loop;
// case TokenType.LParen:
// skipParens(tokens, i);
// break;
// case TokenType.LBrace:
// skipBraces(tokens, i);
// break;
// case TokenType.LBracket:
// skipBrackets(tokens, i);
// break;
// default:
// ++i;
// break;
// }
// }
// return i;
//}
//
//size_t findBeginningOfExpression(const Token[] tokens, const size_t index)
//in
//{
// assert (index < tokens.length);
// assert (tokens.length > 0);
//}
//out (result)
//{
// import std.string;
// assert (result < tokens.length);
// assert (result <= index, format("findBeginningOfExpression %d, %d", result, index));
//}
//body
//{
// size_t i = index;
// loop: while (i < tokens.length)
// {
// switch (tokens[i].type)
// {
// case TokenType.Assign: case TokenType.BitAnd: case TokenType.BitAndEquals:
// case TokenType.BitOr: case TokenType.BitOrEquals: case TokenType.CatEquals:
// case TokenType.Colon: case TokenType.Comma: case TokenType.Decrement:
// case TokenType.Div: case TokenType.DivEquals: case TokenType.Dollar:
// case TokenType.Equals: case TokenType.GoesTo:
// case TokenType.Greater: case TokenType.GreaterEqual: case TokenType.Hash:
// case TokenType.Increment: case TokenType.LBrace: case TokenType.LBracket:
// case TokenType.Less: case TokenType.LessEqual: case TokenType.LessEqualGreater:
// case TokenType.LessOrGreater: case TokenType.LogicAnd: case TokenType.LogicOr:
// case TokenType.LParen: case TokenType.Minus: case TokenType.MinusEquals:
// case TokenType.Mod: case TokenType.ModEquals: case TokenType.MulEquals:
// case TokenType.Not: case TokenType.NotEquals: case TokenType.NotGreater:
// case TokenType.NotGreaterEqual: case TokenType.NotLess: case TokenType.NotLessEqual:
// case TokenType.NotLessEqualGreater: case TokenType.Plus: case TokenType.PlusEquals:
// case TokenType.Pow: case TokenType.PowEquals: case TokenType.RBrace:
// case TokenType.Semicolon: case TokenType.ShiftLeft: case TokenType.ShiftLeftEqual:
// case TokenType.ShiftRight: case TokenType.ShiftRightEqual: case TokenType.Slice:
// case TokenType.Star: case TokenType.Ternary: case TokenType.Tilde:
// case TokenType.Unordered: case TokenType.UnsignedShiftRight: case TokenType.UnsignedShiftRightEqual:
// case TokenType.Vararg: case TokenType.Xor: case TokenType.XorEquals:
// case TokenType.KEYWORDS_BEGIN: .. case TokenType.KEYWORDS_END:
// return i + 1;
// case TokenType.RParen:
// if (i == 0)
// break loop;
// skipParens(tokens, i);
// break;
// case TokenType.RBracket:
// if (i == 0)
// break loop;
// skipBrackets(tokens, i);
// break;
// default:
// if (i == 0)
// break loop;
// i--;
// break;
// }
// }
// return i + 1;
//}
//
//const(Token)[] splitCallChain(const(Token)[] tokens)
//{
// auto app = appender!(Token[])();
// size_t i = 0;
// while (i < tokens.length)
// {
// app.put(tokens[i++]);
// while (i < tokens.length && tokens[i] == TokenType.LParen) skipParens(tokens, i);
// while (i < tokens.length && tokens[i] == TokenType.LBracket) skipBrackets(tokens, i);
// while (i < tokens.length && tokens[i] == TokenType.Dot) ++i;
// }
// return app.data;
//}
//
//unittest
//{
// auto code = `a.b[10].c("grcl").x`;
// auto tokens = tokenize(code);
// assert (splitCallChain(tokens) == ["a", "b", "c", "x"]);
//}
//
//struct AutoComplete
//{
// this(const (Token)[] tokens, CompletionContext context)
// {
// this.tokens = tokens;
// this.context = context;
// }
//
// string getTypeOfExpression(const(Token)[] expression, const Token[] tokens, size_t cursor)
// {
// stderr.writeln("getting type of ", expression);
// if (expression.length == 0)
// return "void";
// auto type = typeOfVariable(expression[0], cursor);
// if (type is null)
// return "void";
// size_t index = 1;
// while (index < expression.length)
// {
// const Tuple!(string, string)[string] typeMap = context.getMembersOfType(
// type);
// const Tuple!(string, string)* memberType = expression[index].value in typeMap;
// if (memberType is null)
// return "void";
// else
// type = (*memberType)[0];
// index++;
// }
// return type;
// }
//
// string typeOfVariable(Token symbol, size_t cursor)
// {
// // int is of type int, double of type double, and so on
// if (symbol.value in typeProperties)
// return symbol.value;
//
// string tokenType = getTypeFromToken(symbol);
// if (tokenType !is null)
// return tokenType;
//
// if (context.getMembersOfType(symbol.value))
// return symbol.value;
//
// // Arbitrarily define the depth of the cursor position as zero
// // iterate backwards through the code to try to find the variable
// int depth = 0;
// auto preceedingTokens = assumeSorted(tokens).lowerBound(cursor);
// auto index = preceedingTokens.length - 1;
// while (true)
// {
// if (preceedingTokens[index] == TokenType.LBrace)
// --depth;
// else if (preceedingTokens[index] == TokenType.RBrace)
// ++depth;
// else if (depth <= 0 && preceedingTokens[index].value == symbol)
// {
// // Found the symbol, now determine if it was declared here.
// auto p = preceedingTokens[index - 1];
//
//
// if ((p == TokenType.Auto || p == TokenType.Immutable
// || p == TokenType.Const)
// && preceedingTokens[index + 1] == TokenType.Assign)
// {
// // Try to determine the type of a variable declared as "auto"
// return getTypeOfExpression(
// tokens[index + 2 .. findEndOfExpression(tokens, index + 2)],
// tokens, cursor);
// }
// else if (p == TokenType.Identifier
// || (p.type > TokenType.TYPES_BEGIN
// && p.type < TokenType.TYPES_END))
// {
// // Handle simple cases like "int a;" or "Someclass instance;"
// return p.value;
// }
// else if (p == TokenType.RBracket || p == TokenType.RParen)
// {
// return combineTokens(tokens[findBeginningOfExpression(tokens, index) .. index]);
// }
// }
// if (index == 0)
// break;
// else
// --index;
// }
//
// // Find all struct or class bodies that we're in.
// // Check for the symbol in those class/struct/interface bodies
// // if match is found, return it
// auto structs = context.getStructsContaining(cursor);
// if (symbol == "this" && structs.length > 0)
// {
// return minCount!("a.bodyStart > b.bodyStart")(structs)[0].name;
// }
//
// foreach (s; structs)
// {
// auto t = s.getMemberType(symbol.value);
// if (t !is null)
// return t;
// }
// return "void";
// }
//
// string symbolAt(size_t cursor) const
// {
// auto r = assumeSorted(tokens).lowerBound(cursor)[$ - 1];
// if (r.value.length + r.startIndex > cursor)
// return r.value;
// else
// return null;
// }
//
// string parenComplete(size_t cursor)
// {
// auto index = assumeSorted(tokens).lowerBound(cursor).length - 2;
// Token t = tokens[index];
// switch (tokens[index].type)
// {
// case TokenType.Version:
// return "completions\n" ~ to!string(join(map!`a ~ " k"`(versions), "\n").array());
// case TokenType.Scope:
// return "completions\n" ~ to!string(join(map!`a ~ " k"`(scopes), "\n").array());
// case TokenType.If:
// case TokenType.Cast:
// case TokenType.While:
// case TokenType.For:
// case TokenType.Foreach:
// case TokenType.Switch:
// return "";
// default:
// size_t startIndex = findBeginningOfExpression(tokens, index);
// auto callChain = splitCallChain(tokens[startIndex .. index + 1]);
// auto expressionType = getTypeOfExpression(
// callChain[0 .. $ - 1], tokens, cursor);
// return "calltips\n" ~ to!string(context.getCallTipsFor(expressionType,
// callChain[$ - 1].value, cursor).join("\n").array());
// }
// }
//
// string dotComplete(size_t cursor)
// {
// stderr.writeln("dotComplete");
// auto index = assumeSorted(tokens).lowerBound(cursor).length - 1;
// Token t = tokens[index];
//
// // If the last character entered before the cursor isn't a dot, give up.
// // The user was probably in the middle of typing the slice or vararg
// // operators
// if (t != TokenType.Dot)
// return null;
//
// size_t startIndex = findBeginningOfExpression(tokens, index);
// if (startIndex - 1 < tokens.length && tokens[startIndex - 1] == TokenType.Import)
// {
// return importComplete(splitCallChain(tokens[startIndex .. index]));
// }
//
// auto expressionType = getTypeOfExpression(
// splitCallChain(tokens[startIndex .. index]), tokens, cursor);
//
// stderr.writeln("expression type is ", expressionType);
//
// // Complete pointers and references the same way
// if (expressionType[$ - 1] == '*')
// expressionType = expressionType[0 .. $ - 1];
//
// const Tuple!(string, string)[string] typeMap = context.getMembersOfType(
// expressionType);
// if (typeMap is null)
// return "";
// auto app = appender!(string[])();
// foreach (k, t; typeMap)
// app.put(k ~ " " ~ t[1]);
// return to!string(array(join(sort!("a.toLower() < b.toLower()")(app.data), "\n")));
// }
//
// string importComplete(const(Token)[] tokens)
// {
// stderr.writeln("importComplete");
// auto app = appender!(string[])();
// string part = to!string(map!"a.value.dup"(tokens).join("/").array());
// foreach (path; context.importDirectories)
// {
// stderr.writeln("Searching for ", path, "/", part);
// if (!exists(buildPath(path, part)))
// continue;
// stderr.writeln("found it");
// foreach (DirEntry dirEntry; dirEntries(buildPath(path, part),
// SpanMode.shallow))
// {
// if (dirEntry.isDir)
// app.put(baseName(dirEntry.name) ~ " P");
// else if (dirEntry.name.endsWith(".d", ".di"))
// app.put(stripExtension(baseName(dirEntry.name)) ~ " M");
// }
// }
// return to!string(sort!("a.toLower() < b.toLower()")(app.data).join("\n").array());
// }
//
// const(Token)[] tokens;
// CompletionContext context;
//}
//
//unittest
//{
// auto code = q{
//struct TestStruct { int a; int b; }
//TestStruct ts;
//ts.a.
// };
//
// auto tokens = tokenize(code);
// auto mod = parseModule(tokens);
// auto context = new CompletionContext(mod);
// auto completion = AutoComplete(tokens, context);
// assert (completion.getTypeOfExpression(splitCallChain(tokens[13 .. 16]),
// tokens, 56) == "int");
//}

View File

@ -1,2 +1,2 @@
dmd *.d -release -noboundscheck -O -w -wi -m64 -property -ofdscanner -L-lsqlite3 #-inline
#dmd *.d -g -m64 -w -wi -property -ofdscanner -L-lsqlite3 #-unittest
dmd *.d std/d/*.d -release -noboundscheck -O -w -wi -m64 -property -ofdscanner -L-lsqlite3 #-inline
#dmd *.d std/d/*.d -g -m64 -w -wi -property -ofdscanner -L-lsqlite3 #-unittest

10
cache.d
View File

@ -13,11 +13,11 @@ import std.uuid;
import std.array;
import std.string;
import std.conv;
import std.d.lexer;
import location;
import parser;
import types;
import tokenizer;
private sqlite3* database;
@ -104,10 +104,10 @@ void updateCache(string dirs[], string moduleNames[])
if (timeLastModified.stdTime == mtime)
continue;
// re-parse the module
Module m = parseModule(byToken(readText(filePath)).array());
updateCache(m);
// // re-parse the module
// Module m = parseModule(byToken(readText(filePath)).array());
//
// updateCache(m);
sqlite3_reset(statement);
}

View File

@ -9,11 +9,12 @@ import std.math;
import std.array;
import std.range;
struct CircularBuffer(T, R) if (isInputRange!(R) && is (ElementType!(R) == T))
class CircularBuffer(T) : InputRange!(T)
{
public:
this (size_t size, R range)
this (size_t size, InputRange!(T) range)
{
this.range = range;
this.margin = size;
@ -31,41 +32,33 @@ public:
}
}
T opIndex(size_t index) const
in
{
assert (index <= sourceIndex + margin);
assert (index >= sourceIndex - margin);
}
body
{
return data[index % data.length];
}
T front() const @property
override T front() const @property
{
return data[index];
}
T peek(int offset)
T peek(int offset = 1)
in
{
assert(abs(offset) <= margin);
assert(sourceIndex + offset >= 0);
assert(canPeek(offset));
}
body
{
return data[(index + offset) % data.length];
}
T popFront()
bool canPeek(int offset = 1)
{
return abs(offset) <= margin && sourceIndex + offset >= 0;
}
override void popFront()
in
{
assert (!_empty);
}
body
{
T v = data[index];
index = (index + 1) % data.length;
++sourceIndex;
if (range.empty())
@ -79,7 +72,6 @@ public:
end = (end + 1) % data.length;
range.popFront();
}
return v;
}
bool empty() const @property
@ -87,8 +79,40 @@ public:
return _empty;
}
override T moveFront()
{
auto r = front();
popFront();
return r;
}
override int opApply(int delegate(T) dg)
{
int result = 0;
while (!empty)
{
result = dg(front);
if (result)
break;
}
return result;
}
override int opApply(int delegate(size_t, T) dg)
{
int result = 0;
int i = 0;
while (!empty)
{
result = dg(i, front);
if (result)
break;
}
return result;
}
private:
R range;
InputRange!(T) range;
immutable size_t margin;
T[] data;
size_t sourceIndex;
@ -123,8 +147,6 @@ unittest
buf.popFront();
buf.popFront();
assert (buf.front == 4);
assert (buf[2] == 2);
assert (buf[6] == 6);
}
unittest

111
codegen.d
View File

@ -1,111 +0,0 @@
// Copyright Brian Schott (Sir Alaran) 2012.
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
// This module triggers DMD bug 7900 if compiled with -inline
module codegen;
import std.range;
class Trie(K, V) if (isInputRange!K): TrieNode!(K, V)
{
/**
* Adds the given value to the trie with the given key
*/
void add(K key, V value) pure
{
TrieNode!(K,V) current = this;
foreach(keyPart; key)
{
if ((keyPart in current.children) is null)
{
auto node = new TrieNode!(K, V);
current.children[keyPart] = node;
current = node;
}
else
current = current.children[keyPart];
}
current.value = value;
}
}
class TrieNode(K, V) if (isInputRange!K)
{
V value;
TrieNode!(K,V)[ElementType!K] children;
}
string printCaseStatements(K, V)(TrieNode!(K,V) node, string indentString)
{
string caseStatement = "";
foreach(dchar k, TrieNode!(K,V) v; node.children)
{
caseStatement ~= indentString;
caseStatement ~= "case '";
caseStatement ~= k;
caseStatement ~= "':\n";
caseStatement ~= indentString;
caseStatement ~= "\tcurrent.value ~= '";
caseStatement ~= k;
caseStatement ~= "';\n";
caseStatement ~= indentString;
caseStatement ~= "\t++index;\n";
caseStatement ~= indentString;
caseStatement ~= "\trange.popFront();\n";
if (v.children.length > 0)
{
caseStatement ~= indentString;
caseStatement ~= "\tif (range.isEoF())\n";
caseStatement ~= indentString;
caseStatement ~= "\t{\n";
caseStatement ~= indentString;
caseStatement ~= "\t\tcurrent.type = " ~ node.children[k].value;
caseStatement ~= ";\n";
caseStatement ~= indentString;
caseStatement ~= "\t\tbreak;\n";
caseStatement ~= indentString;
caseStatement ~= "\t}\n";
caseStatement ~= indentString;
caseStatement ~= "\tswitch (range.front)\n";
caseStatement ~= indentString;
caseStatement ~= "\t{\n";
caseStatement ~= printCaseStatements(v, indentString ~ "\t");
caseStatement ~= indentString;
caseStatement ~= "\tdefault:\n";
caseStatement ~= indentString;
caseStatement ~= "\t\tcurrent.type = ";
caseStatement ~= v.value;
caseStatement ~= ";\n";
caseStatement ~= indentString;
caseStatement ~= "\t\tbreak;\n";
caseStatement ~= indentString;
caseStatement ~= "\t}\n";
caseStatement ~= indentString;
caseStatement ~= "\tbreak;\n";
}
else
{
caseStatement ~= indentString;
caseStatement ~= "\tcurrent.type = ";
caseStatement ~= v.value;
caseStatement ~= ";\n";
caseStatement ~= indentString;
caseStatement ~= "\tbreak;\n";
}
}
return caseStatement;
}
string generateCaseTrie(string[] args ...)
{
auto t = new Trie!(string, string);
for(int i = 0; i < args.length; i+=2)
{
t.add(args[i], args[i+1]);
}
return printCaseStatements(t, "");
}

View File

@ -7,8 +7,10 @@
module highlighter;
import std.stdio;
import langutils;
import std.array;
import std.d.lexer;
import langutils;
void writeSpan(string cssClass, string value)
{
@ -23,13 +25,13 @@ void highlight(R)(R tokens)
<meta http-equiv="content-type" content="text/html; charset=UTF-8"/>
<body>
<style type="text/css">
html { background-color: #111; color: #ccc; }
.kwrd { font-weight: bold; color: DeepSkyBlue; }
.com { color: lightgreen; font-style: italic;}
.num { color: red; font-weigth: bold; }
.str { color: Tomato; font-style: italic; }
.op { color: tan; font-weight: bold; }
.type { color: cyan; font-weight: bold; }
html { background-color: #fff; color: #222; }
.kwrd { font-weight: bold; color: blue; }
.com { color: green; font-style: italic;}
.num { color: orangered; font-weigth: bold; }
.str { color: red; font-style: italic; }
.op { color: 333; font-weight: bold; }
.type { color: magenta; font-weight: bold; }
</style>
<pre>]");

View File

@ -4,7 +4,9 @@
// http://www.boost.org/LICENSE_1_0.txt)
module langutils;
import std.array;
import std.d.lexer;
/**
@ -33,18 +35,6 @@ pure nothrow bool isAttribute(TokenType input)
return input > TokenType.ATTRIBUTES_BEGIN && input < TokenType.ATTRIBUTES_END;
}
/**
* Returns: the token type for the given string. Defaults to "identifier"
*/
pure nothrow TokenType lookupTokenType(const string input)
{
immutable(TokenType)* type = input in tokenLookup;
if (type !is null)
return *type;
else
return TokenType.Identifier;
}
string combineTokens(ref const Token[] tokens)
{
auto app = appender!string();
@ -53,533 +43,7 @@ string combineTokens(ref const Token[] tokens)
return app.data;
}
pure nothrow TokenType lookupTokenTypeOptimized(const string input)
{
switch(input.length)
{
case 2:
switch (input)
{
case "do": return TokenType.Do;
case "if": return TokenType.If;
case "in": return TokenType.In;
case "is": return TokenType.Is;
default: break;
}
break;
case 3:
switch (input)
{
case "asm": return TokenType.Asm;
case "for": return TokenType.For;
case "int": return TokenType.Int;
case "new": return TokenType.New;
case "out": return TokenType.Out;
case "ref": return TokenType.Ref;
case "try": return TokenType.Try;
default: break;
}
break;
case 4:
switch (input)
{
case "auto": return TokenType.Auto;
case "body": return TokenType.Body;
case "bool": return TokenType.Bool;
case "byte": return TokenType.Byte;
case "case": return TokenType.Case;
case "cast": return TokenType.Cast;
case "cent": return TokenType.Cent;
case "char": return TokenType.Char;
case "else": return TokenType.Else;
case "enum": return TokenType.Enum;
case "goto": return TokenType.Goto;
case "lazy": return TokenType.Lazy;
case "long": return TokenType.Long;
case "null": return TokenType.Null;
case "pure": return TokenType.Pure;
case "real": return TokenType.Real;
case "this": return TokenType.This;
case "true": return TokenType.True;
case "uint": return TokenType.Uint;
case "void": return TokenType.Void;
case "with": return TokenType.With;
default: break;
}
break;
case 5:
switch (input)
{
case "alias": return TokenType.Alias;
case "align": return TokenType.Align;
case "break": return TokenType.Break;
case "catch": return TokenType.Catch;
case "class": return TokenType.Class;
case "const": return TokenType.Const;
case "creal": return TokenType.Creal;
case "dchar": return TokenType.Dchar;
case "debug": return TokenType.Debug;
case "false": return TokenType.False;
case "final": return TokenType.Final;
case "float": return TokenType.Float;
case "inout": return TokenType.Inout;
case "ireal": return TokenType.Ireal;
case "macro": return TokenType.Macro;
case "mixin": return TokenType.Mixin;
case "scope": return TokenType.Scope;
case "short": return TokenType.Short;
case "super": return TokenType.Super;
case "throw": return TokenType.Throw;
case "ubyte": return TokenType.Ubyte;
case "ucent": return TokenType.Ucent;
case "ulong": return TokenType.Ulong;
case "union": return TokenType.Union;
case "wchar": return TokenType.Wchar;
case "while": return TokenType.While;
default: break;
}
break;
case 6:
switch (input)
{
case "assert": return TokenType.Assert;
case "cfloat": return TokenType.Cfloat;
case "delete": return TokenType.Delete;
case "double": return TokenType.Double;
case "export": return TokenType.Export;
case "extern": return TokenType.Extern;
case "ifloat": return TokenType.Ifloat;
case "import": return TokenType.Import;
case "module": return TokenType.Module;
case "pragma": return TokenType.Pragma;
case "public": return TokenType.Public;
case "return": return TokenType.Return;
case "shared": return TokenType.Shared;
case "static": return TokenType.Static;
case "string": return TokenType.String;
case "struct": return TokenType.Struct;
case "switch": return TokenType.Switch;
case "typeid": return TokenType.Typeid;
case "typeof": return TokenType.Typeof;
case "ushort": return TokenType.Ushort;
default: break;
}
break;
case 7:
switch (input)
{
case "cdouble": return TokenType.Cdouble;
case "default": return TokenType.Default;
case "dstring": return TokenType.DString;
case "finally": return TokenType.Finally;
case "foreach": return TokenType.Foreach;
case "idouble": return TokenType.Idouble;
case "nothrow": return TokenType.Nothrow;
case "package": return TokenType.Package;
case "private": return TokenType.Private;
case "typedef": return TokenType.Typedef;
case "version": return TokenType.Version;
case "wstring": return TokenType.WString;
default: break;
}
break;
case 8:
switch (input)
{
case "override": return TokenType.Override;
case "continue": return TokenType.Continue;
case "__LINE__": return TokenType.Line;
case "template": return TokenType.Template;
case "abstract": return TokenType.Abstract;
case "__thread": return TokenType.Thread;
case "__traits": return TokenType.Traits;
case "volatile": return TokenType.Volatile;
case "delegate": return TokenType.Delegate;
case "function": return TokenType.Function;
case "unittest": return TokenType.Unittest;
case "__FILE__": return TokenType.File;
default: break;
}
break;
case 9:
switch (input)
{
case "__gshared": return TokenType.Gshared;
case "immutable": return TokenType.Immutable;
case "interface": return TokenType.Interface;
case "invariant": return TokenType.Invariant;
case "protected": return TokenType.Protected;
default: break;
}
break;
case 10:
if (input == "deprecated")
return TokenType.Deprecated;
break;
case 11:
if (input == "synchronized")
return TokenType.Synchronized;
break;
case 13:
if (input == "foreach_reverse")
return TokenType.Foreach_reverse;
break;
default: break;
}
return TokenType.Identifier;
}
/**
* Listing of all the tokens in the D language
*/
enum TokenType: uint
{
// Operators
OPERATORS_BEGIN,
Assign, /// =
At, /// @
BitAnd, /// &
BitAndEquals, /// &=
BitOr, /// |
BitOrEquals, /// |=
CatEquals, /// ~=
Colon, /// :
Comma, /// ,
Decrement, /// --
Div, /// /
DivEquals, /// /=
Dollar, /// $
Dot, /// .
Equals, /// ==
GoesTo, // =>
Greater, /// >
GreaterEqual, /// >=
Hash, // #
Increment, /// ++
LBrace, /// {
LBracket, /// [
Less, /// <
LessEqual, /// <=
LessEqualGreater, // <>=
LessOrGreater, /// <>
LogicAnd, /// &&
LogicOr, /// ||
LParen, /// $(LPAREN)
Minus, /// -
MinusEquals, /// -=
Mod, /// %
ModEquals, /// %=
MulEquals, /// *=
Not, /// !
NotEquals, /// !=
NotGreater, /// !>
NotGreaterEqual, /// !>=
NotLess, /// !<
NotLessEqual, /// !<=
NotLessEqualGreater, /// !<>
Plus, /// +
PlusEquals, /// +=
Pow, /// ^^
PowEquals, /// ^^=
RBrace, /// }
RBracket, /// ]
RParen, /// $(RPAREN)
Semicolon, /// ;
ShiftLeft, /// <<
ShiftLeftEqual, /// <<=
ShiftRight, /// >>
ShiftRightEqual, /// >>=
Slice, // ..
Star, /// *
Ternary, /// ?
Tilde, /// ~
Unordered, /// !<>=
UnsignedShiftRight, /// >>>
UnsignedShiftRightEqual, /// >>>=
Vararg, /// ...
Xor, /// ^
XorEquals, /// ^=
OPERATORS_END,
// Types
TYPES_BEGIN,
Bool, /// bool,
Byte, /// byte,
Cdouble, /// cdouble,
Cent, /// cent,
Cfloat, /// cfloat,
Char, /// char,
Creal, /// creal,
Dchar, /// dchar,
Double, /// double,
DString, /// dstring
Float, /// float,
Function, /// function,
Idouble, /// idouble,
Ifloat, /// ifloat,
Int, /// int,
Ireal, /// ireal,
Long, /// long,
Real, /// real,
Short, /// short,
String, /// string
Ubyte, /// ubyte,
Ucent, /// ucent,
Uint, /// uint,
Ulong, /// ulong,
Ushort, /// ushort,
Void, /// void,
Wchar, /// wchar,
WString, /// wstring
TYPES_END,
Template, /// template,
// Keywords
KEYWORDS_BEGIN,
ATTRIBUTES_BEGIN,
Align, /// align,
Deprecated, /// deprecated,
Extern, /// extern,
Pragma, /// pragma,
PROTECTION_BEGIN,
Export, /// export,
Package, /// package,
Private, /// private,
Protected, /// protected,
Public, /// public,
PROTECTION_END,
Abstract, /// abstract,
AtDisable, /// @disable
Auto, /// auto,
Const, /// const,
Final, /// final
Gshared, /// __gshared,
Immutable, // immutable,
Inout, // inout,
Scope, /// scope,
Shared, // shared,
Static, /// static,
Synchronized, /// synchronized,
ATTRIBUTES_END,
Alias, /// alias,
Asm, /// asm,
Assert, /// assert,
Body, /// body,
Break, /// break,
Case, /// case,
Cast, /// cast,
Catch, /// catch,
Class, /// class,
Continue, /// continue,
Debug, /// debug,
Default, /// default,
Delegate, /// delegate,
Delete, /// delete,
Do, /// do,
Else, /// else,
Enum, /// enum,
False, /// false,
Finally, /// finally,
Foreach, /// foreach,
Foreach_reverse, /// foreach_reverse,
For, /// for,
Goto, /// goto,
If, /// if ,
Import, /// import,
In, /// in,
Interface, /// interface,
Invariant, /// invariant,
Is, /// is,
Lazy, /// lazy,
Macro, /// macro,
Mixin, /// mixin,
Module, /// module,
New, /// new,
Nothrow, /// nothrow,
Null, /// null,
Out, /// out,
Override, /// override,
Pure, /// pure,
Ref, /// ref,
Return, /// return,
Struct, /// struct,
Super, /// super,
Switch, /// switch ,
This, /// this,
Throw, /// throw,
True, /// true,
Try, /// try,
Typedef, /// typedef,
Typeid, /// typeid,
Typeof, /// typeof,
Union, /// union,
Unittest, /// unittest,
Version, /// version,
Volatile, /// volatile,
While, /// while ,
With, /// with,
KEYWORDS_END,
// Constants
CONSTANTS_BEGIN,
File, /// __FILE__,
Line, /// __LINE__,
Thread, /// __thread,
Traits, /// __traits,
CONSTANTS_END,
// Misc
MISC_BEGIN,
Blank, /// unknown token type
Comment, /// /** comment */ or // comment or ///comment
Identifier, /// anything else
ScriptLine, // Line at the beginning of source file that starts from #!
Whitespace, /// whitespace
NUMBERS_BEGIN,
DoubleLiteral, /// 123.456
FloatLiteral, /// 123.456f or 0x123_45p-af
IDoubleLiteral, /// 123.456i
IFloatLiteral, /// 123.456fi
IntLiteral, /// 123 or 0b1101010101
LongLiteral, /// 123L
RealLiteral, /// 123.456L
IRealLiteral, /// 123.456Li
UnsignedIntLiteral, /// 123u
UnsignedLongLiteral, /// 123uL
NUMBERS_END,
STRINGS_BEGIN,
DStringLiteral, /// "32-bit character string"d
StringLiteral, /// "a string"
WStringLiteral, /// "16-bit character string"w
STRINGS_END,
MISC_END,
}
/**
* lookup table for converting strings to tokens
*/
immutable TokenType[string] tokenLookup;
static this()
{
tokenLookup = [
"abstract" : TokenType.Abstract,
"alias" : TokenType.Alias,
"align" : TokenType.Align,
"asm" : TokenType.Asm,
"assert" : TokenType.Assert,
"auto" : TokenType.Auto,
"body" : TokenType.Body,
"bool" : TokenType.Bool,
"break" : TokenType.Break,
"byte" : TokenType.Byte,
"case" : TokenType.Case,
"cast" : TokenType.Cast,
"catch" : TokenType.Catch,
"cdouble" : TokenType.Cdouble,
"cent" : TokenType.Cent,
"cfloat" : TokenType.Cfloat,
"char" : TokenType.Char,
"class" : TokenType.Class,
"const" : TokenType.Const,
"continue" : TokenType.Continue,
"creal" : TokenType.Creal,
"dchar" : TokenType.Dchar,
"debug" : TokenType.Debug,
"default" : TokenType.Default,
"delegate" : TokenType.Delegate,
"delete" : TokenType.Delete,
"deprecated" : TokenType.Deprecated,
"do" : TokenType.Do,
"double" : TokenType.Double,
"dstring" : TokenType.DString,
"else" : TokenType.Else,
"enum" : TokenType.Enum,
"export" : TokenType.Export,
"extern" : TokenType.Extern,
"false" : TokenType.False,
"__FILE__" : TokenType.File,
"finally" : TokenType.Finally,
"final" : TokenType.Final,
"float" : TokenType.Float,
"foreach_reverse" : TokenType.Foreach_reverse,
"foreach" : TokenType.Foreach,
"for" : TokenType.For,
"function" : TokenType.Function,
"goto" : TokenType.Goto,
"__gshared" : TokenType.Gshared,
"idouble" : TokenType.Idouble,
"ifloat" : TokenType.Ifloat,
"if" : TokenType.If,
"immutable" : TokenType.Immutable,
"import" : TokenType.Import,
"inout" : TokenType.Inout,
"interface" : TokenType.Interface,
"in" : TokenType.In,
"int" : TokenType.Int,
"invariant" : TokenType.Invariant,
"ireal" : TokenType.Ireal,
"is" : TokenType.Is,
"lazy" : TokenType.Lazy,
"__LINE__" : TokenType.Line,
"long" : TokenType.Long,
"macro" : TokenType.Macro,
"mixin" : TokenType.Mixin,
"module" : TokenType.Module,
"new" : TokenType.New,
"nothrow" : TokenType.Nothrow,
"null" : TokenType.Null,
"out" : TokenType.Out,
"override" : TokenType.Override,
"package" : TokenType.Package,
"pragma" : TokenType.Pragma,
"private" : TokenType.Private,
"protected" : TokenType.Protected,
"public" : TokenType.Public,
"pure" : TokenType.Pure,
"real" : TokenType.Real,
"ref" : TokenType.Ref,
"return" : TokenType.Return,
"scope" : TokenType.Scope,
"shared" : TokenType.Shared,
"short" : TokenType.Short,
"static" : TokenType.Static,
"string" : TokenType.String,
"struct" : TokenType.Struct,
"super" : TokenType.Super,
"switch" : TokenType.Switch,
"synchronized" : TokenType.Synchronized,
"template" : TokenType.Template,
"this" : TokenType.This,
"__thread" : TokenType.Thread,
"throw" : TokenType.Throw,
"__traits" : TokenType.Traits,
"true" : TokenType.True,
"try" : TokenType.Try,
"typedef" : TokenType.Typedef,
"typeid" : TokenType.Typeid,
"typeof" : TokenType.Typeof,
"ubyte" : TokenType.Ubyte,
"ucent" : TokenType.Ucent,
"uint" : TokenType.Uint,
"ulong" : TokenType.Ulong,
"union" : TokenType.Union,
"unittest" : TokenType.Unittest,
"ushort" : TokenType.Ushort,
"version" : TokenType.Version,
"void" : TokenType.Void,
"volatile" : TokenType.Volatile,
"wchar" : TokenType.Wchar,
"while" : TokenType.While,
"with" : TokenType.With,
"wstring" : TokenType.WString,
];
}
pure string getTypeFromToken(ref const Token t)
pure string getTypeFromToken(const Token t)
{
switch (t.type)
{
@ -609,56 +73,8 @@ pure string getTypeFromToken(ref const Token t)
}
}
pure bool isIdentifierOrType(ref const Token t)
pure bool isIdentifierOrType(inout Token t)
{
return t.type == TokenType.Identifier || (t.type > TokenType.TYPES_BEGIN
&& TokenType.TYPES_END);
}
/**
* Token structure
*/
struct Token
{
/// The token type
TokenType type;
/// The representation of the token in the original source code
string value;
/// The number of the line the token is on
uint lineNumber;
/// The character index of the start of the token in the original text
uint startIndex;
/**
* Check to see if the token is of the same type and has the same string
* representation as the given token
*/
bool opEquals(ref const(Token) other) const
{
return other.type == type && other.value == value;
}
/**
* Checks to see if the token's string representation is equal to the given
* string
*/
bool opEquals(string range) const { return range == value; }
/**
* Checks to see if the token is of the given type
*/
bool opEquals(TokenType t) const { return type == t; }
/**
* Comparison operator orders by start index
*/
int opCmp(size_t i) const
{
if (startIndex < i) return -1;
if (startIndex > i) return 1;
return 0;
}
}

83
main.d
View File

@ -15,13 +15,18 @@ import std.parallelism;
import std.path;
import std.regex;
import std.stdio;
import std.d.lexer;
import autocomplete;
import highlighter;
import langutils;
import location;
import parser;
import tokenizer;
import types;
import circularbuffer;
immutable size_t CIRC_BUFF_SIZE = 4;
pure bool isLineOfCode(TokenType t)
{
@ -100,9 +105,9 @@ int main(string[] args)
{
string[] importDirs;
bool sloc;
bool dotComplete;
/+bool dotComplete;+/
bool json;
bool parenComplete;
/+bool parenComplete;+/
bool highlight;
bool ctags;
bool recursiveCtags;
@ -111,8 +116,8 @@ int main(string[] args)
try
{
getopt(args, "I", &importDirs, "dotComplete", &dotComplete, "sloc", &sloc,
"json", &json, "parenComplete", &parenComplete, "highlight", &highlight,
getopt(args, "I", &importDirs,/+ "dotComplete", &dotComplete,+/ "sloc", &sloc,
"json", &json, /+"parenComplete", &parenComplete,+/ "highlight", &highlight,
"ctags", &ctags, "recursive|r|R", &recursiveCtags, "help|h", &help);
}
catch (Exception e)
@ -120,7 +125,7 @@ int main(string[] args)
stderr.writeln(e.msg);
}
if (help || (!sloc && !dotComplete && !json && !parenComplete && !highlight
if (help || (!sloc && /+!dotComplete &&+/ !json /+&& !parenComplete+/ && !highlight
&& !ctags && !format))
{
printHelp();
@ -166,7 +171,7 @@ int main(string[] args)
return 0;
}
if (dotComplete || parenComplete)
/+if (dotComplete || parenComplete)
{
if (isAbsolute(args[1]))
importDirs ~= dirName(args[1]);
@ -203,11 +208,11 @@ int main(string[] args)
else if (dotComplete)
writeln(complete.dotComplete(to!size_t(args[1])));
return 0;
}
}+/
if (json)
{
Token[] tokens;
CircularBuffer!(Token) tokens;
if (args.length == 1)
{
// Read from stdin
@ -215,46 +220,46 @@ int main(string[] args)
char[] buf;
while (stdin.readln(buf))
f.put(buf);
tokens = byToken(f.data).array();
tokens = new CircularBuffer!(Token)(CIRC_BUFF_SIZE, byToken!string(f.data));
}
else
{
// read given file
tokens = byToken(readText(args[1])).array();
tokens = new CircularBuffer!(Token)(CIRC_BUFF_SIZE, byToken!string(readText(args[1])));
}
auto mod = parseModule(tokens);
mod.writeJSONTo(stdout);
return 0;
}
if (ctags)
{
if (!recursiveCtags)
{
auto tokens = byToken(readText(args[1]));
auto mod = parseModule(tokens.array());
mod.writeCtagsTo(stdout, args[1]);
}
else
{
Module m;
foreach (dirEntry; dirEntries(args[1], SpanMode.breadth))
{
if (!dirEntry.name.endsWith(".d", ".di"))
continue;
stderr.writeln("Generating tags for ", dirEntry.name);
auto tokens = byToken(readText(dirEntry.name));
if (m is null)
m = parseModule(tokens.array());
else
{
auto mod = parseModule(tokens.array());
m.merge(mod);
}
}
m.writeCtagsTo(stdout, "");
}
}
// if (ctags)
// {
// if (!recursiveCtags)
// {
// auto tokens = byToken(readText(args[1]));
// auto mod = parseModule(tokens.array());
// mod.writeCtagsTo(stdout, args[1]);
// }
// else
// {
// Module m;
// foreach (dirEntry; dirEntries(args[1], SpanMode.breadth))
// {
// if (!dirEntry.name.endsWith(".d", ".di"))
// continue;
// stderr.writeln("Generating tags for ", dirEntry.name);
// auto tokens = byToken(readText(dirEntry.name));
// if (m is null)
// m = parseModule(tokens.array());
// else
// {
// auto mod = parseModule(tokens.array());
// m.merge(mod);
// }
// }
// m.writeCtagsTo(stdout, "");
// }
// }
return 0;
}

750
parser.d

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,15 @@
// Copyright Brian Schott (Sir Alaran) 2012.
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)module entities;
// Written in the D programming language
/**
* Contains listing of named entities for the D lexer.
*
* Copyright: Brian Schott 2013
* License: <a href="http://www.boost.org/LICENSE_1_0.txt">Boost License 1.0</a>.
* Authors: Brian Schott
* Source: $(PHOBOSSRC std/d/_lexer.d)
*/
module std.d.entities;
/**
* Generated from $(LINK http://www.w3.org/TR/html5/entities.json)

File diff suppressed because it is too large Load Diff