mirror of https://gitlab.com/basile.b/dexed.git
dastworks, fix several issues
+ spped up compilation, disable inlining + improve todolist & symlist streams size + deep option of the symlist was broken
This commit is contained in:
parent
693c1324f8
commit
94eddd0ca8
|
@ -20,7 +20,6 @@ object CurrentProject: TCENativeProject
|
||||||
end
|
end
|
||||||
item
|
item
|
||||||
name = 'release'
|
name = 'release'
|
||||||
outputOptions.inlining = True
|
|
||||||
outputOptions.boundsCheck = offAlways
|
outputOptions.boundsCheck = offAlways
|
||||||
outputOptions.optimizations = True
|
outputOptions.optimizations = True
|
||||||
outputOptions.release = True
|
outputOptions.release = True
|
||||||
|
|
|
@ -50,9 +50,13 @@ q{
|
||||||
else version(unittest) log();
|
else version(unittest) log();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Contains all the D version identifiers that are not valid
|
||||||
|
* for this operating system.
|
||||||
|
*/
|
||||||
immutable string[] badVersions;
|
immutable string[] badVersions;
|
||||||
|
|
||||||
private enum predefinedVersions = [
|
private static immutable predefinedVersions = [
|
||||||
"AArch64",
|
"AArch64",
|
||||||
"AIX",
|
"AIX",
|
||||||
"all",
|
"all",
|
||||||
|
@ -183,7 +187,7 @@ string patchPascalString(string value)
|
||||||
bool skip;
|
bool skip;
|
||||||
foreach (immutable i; 0..value.length)
|
foreach (immutable i; 0..value.length)
|
||||||
{
|
{
|
||||||
char c = value[i];
|
const char c = value[i];
|
||||||
if (c > 0x7F)
|
if (c > 0x7F)
|
||||||
{
|
{
|
||||||
app ~= value[i];
|
app ~= value[i];
|
||||||
|
|
|
@ -12,7 +12,7 @@ import
|
||||||
common, todos, symlist, imports, mainfun, runnableflags;
|
common, todos, symlist, imports, mainfun, runnableflags;
|
||||||
|
|
||||||
|
|
||||||
private __gshared bool storeAstErrors = void, deepSymList = true;
|
private __gshared bool storeAstErrors = void, deepSymList;
|
||||||
private __gshared const(Token)[] tokens;
|
private __gshared const(Token)[] tokens;
|
||||||
private __gshared Module module_ = void;
|
private __gshared Module module_ = void;
|
||||||
private __gshared static Appender!(ubyte[]) source;
|
private __gshared static Appender!(ubyte[]) source;
|
||||||
|
@ -36,13 +36,13 @@ void main(string[] args)
|
||||||
{
|
{
|
||||||
mixin(logCall);
|
mixin(logCall);
|
||||||
File f = File(__FILE__, "r");
|
File f = File(__FILE__, "r");
|
||||||
foreach(buffer; f.byChunk(4096))
|
foreach(ref buffer; f.byChunk(4096))
|
||||||
source.put(buffer);
|
source.put(buffer);
|
||||||
f.close;
|
f.close;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
foreach(buffer; stdin.byChunk(4096))
|
foreach(ref buffer; stdin.byChunk(4096))
|
||||||
source.put(buffer);
|
source.put(buffer);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -68,22 +68,24 @@ void main(string[] args)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Handles the "-s" option: create the symbol list in the output
|
||||||
void handleSymListOption()
|
void handleSymListOption()
|
||||||
{
|
{
|
||||||
mixin(logCall);
|
mixin(logCall);
|
||||||
bool deep;
|
|
||||||
storeAstErrors = true;
|
storeAstErrors = true;
|
||||||
lex!false;
|
lex!false;
|
||||||
parseTokens;
|
parseTokens;
|
||||||
listSymbols(module_, errors.data, deepSymList);
|
listSymbols(module_, errors.data, deepSymList);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Handles the "-t" option: create the list of todo comments in the output
|
||||||
void handleTodosOption()
|
void handleTodosOption()
|
||||||
{
|
{
|
||||||
mixin(logCall);
|
mixin(logCall);
|
||||||
getTodos(files);
|
getTodos(files);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Handles the "-r" option:
|
||||||
void handleRunnableFlags()
|
void handleRunnableFlags()
|
||||||
{
|
{
|
||||||
mixin(logCall);
|
mixin(logCall);
|
||||||
|
@ -91,6 +93,7 @@ void handleRunnableFlags()
|
||||||
getRunnableFlags(tokens);
|
getRunnableFlags(tokens);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Handles the "-i" option: create the import list in the output
|
||||||
void handleImportsOption()
|
void handleImportsOption()
|
||||||
{
|
{
|
||||||
mixin(logCall);
|
mixin(logCall);
|
||||||
|
@ -100,6 +103,7 @@ void handleImportsOption()
|
||||||
listImports(module_);
|
listImports(module_);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Handles the "-m" option: writes if a main() is present in the module
|
||||||
void handleMainfunOption()
|
void handleMainfunOption()
|
||||||
{
|
{
|
||||||
mixin(logCall);
|
mixin(logCall);
|
||||||
|
@ -109,24 +113,21 @@ void handleMainfunOption()
|
||||||
detectMainFun(module_);
|
detectMainFun(module_);
|
||||||
}
|
}
|
||||||
|
|
||||||
void handleErrors(string fname, size_t line, size_t col, string message, bool err)
|
private void handleErrors(string fname, size_t line, size_t col, string message, bool err)
|
||||||
{
|
{
|
||||||
if (storeAstErrors)
|
if (storeAstErrors)
|
||||||
errors ~= construct!(AstError)(cast(ErrorType) err, message, line, col);
|
errors ~= construct!(AstError)(cast(ErrorType) err, message, line, col);
|
||||||
}
|
}
|
||||||
|
|
||||||
void lex(bool keepComments = false)()
|
private void lex(bool keepComments = false)()
|
||||||
{
|
{
|
||||||
static if (keepComments)
|
static if (keepComments)
|
||||||
{
|
tokens = DLexer(source.data, config, cache).array;
|
||||||
DLexer dlx = DLexer(source.data, config, cache);
|
|
||||||
tokens = dlx.array;
|
|
||||||
}
|
|
||||||
else
|
else
|
||||||
tokens = getTokensForParser(source.data, config, cache);
|
tokens = getTokensForParser(source.data, config, cache);
|
||||||
}
|
}
|
||||||
|
|
||||||
void parseTokens()
|
private void parseTokens()
|
||||||
{
|
{
|
||||||
mixin(logCall);
|
mixin(logCall);
|
||||||
if (!module_)
|
if (!module_)
|
||||||
|
|
|
@ -15,10 +15,10 @@ private __gshared bool deep = void;
|
||||||
/**
|
/**
|
||||||
* Serializes the symbols in the standard output
|
* Serializes the symbols in the standard output
|
||||||
*/
|
*/
|
||||||
void listSymbols(const(Module) mod, AstErrors errors, bool deep = true)
|
void listSymbols(const(Module) mod, AstErrors errors, bool ddeep = true)
|
||||||
{
|
{
|
||||||
mixin(logCall);
|
mixin(logCall);
|
||||||
symlist.deep = deep;
|
symlist.deep = ddeep;
|
||||||
alias SL = SymbolListBuilder!(ListFmt.Pas);
|
alias SL = SymbolListBuilder!(ListFmt.Pas);
|
||||||
SL.addAstErrors(errors);
|
SL.addAstErrors(errors);
|
||||||
SL sl = construct!(SL);
|
SL sl = construct!(SL);
|
||||||
|
@ -26,6 +26,8 @@ void listSymbols(const(Module) mod, AstErrors errors, bool deep = true)
|
||||||
sl.serialize.writeln;
|
sl.serialize.writeln;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
|
||||||
enum ListFmt
|
enum ListFmt
|
||||||
{
|
{
|
||||||
Pas,
|
Pas,
|
||||||
|
@ -81,7 +83,7 @@ class SymbolListBuilder(ListFmt Fmt): ASTVisitor
|
||||||
{
|
{
|
||||||
static if (Fmt == ListFmt.Pas)
|
static if (Fmt == ListFmt.Pas)
|
||||||
{
|
{
|
||||||
pasStream.put("object TSymbolList\rsymbols = <");
|
pasStream.put("object TSymbolList\rsymbols=<");
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
@ -100,10 +102,10 @@ class SymbolListBuilder(ListFmt Fmt): ASTVisitor
|
||||||
static if (Fmt == ListFmt.Pas)
|
static if (Fmt == ListFmt.Pas)
|
||||||
{
|
{
|
||||||
pasStream.put("\ritem\r");
|
pasStream.put("\ritem\r");
|
||||||
pasStream.put(format("line = %d\r", error.line));
|
pasStream.put(format("line=%d\r", error.line));
|
||||||
pasStream.put(format("col = %d\r", error.column));
|
pasStream.put(format("col=%d\r", error.column));
|
||||||
pasStream.put(format("name = '%s'\r", patchPascalString(error.message)));
|
pasStream.put(format("name='%s'\r", patchPascalString(error.message)));
|
||||||
pasStream.put(format("symType = %s\r", type));
|
pasStream.put(format("symType=%s\r", type));
|
||||||
pasStream.put("end");
|
pasStream.put("end");
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
|
@ -122,7 +124,7 @@ class SymbolListBuilder(ListFmt Fmt): ASTVisitor
|
||||||
{
|
{
|
||||||
static if (Fmt == ListFmt.Pas)
|
static if (Fmt == ListFmt.Pas)
|
||||||
{
|
{
|
||||||
pasStream.put(">\rend\r\n");
|
pasStream.put(">\rend");
|
||||||
return pasStream.data;
|
return pasStream.data;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
|
@ -143,10 +145,10 @@ class SymbolListBuilder(ListFmt Fmt): ASTVisitor
|
||||||
static if (Fmt == ListFmt.Pas)
|
static if (Fmt == ListFmt.Pas)
|
||||||
{
|
{
|
||||||
pasStream.put("\ritem\r");
|
pasStream.put("\ritem\r");
|
||||||
pasStream.put(format("line = %d\r", dt.name.line));
|
pasStream.put(format("line=%d\r", dt.name.line));
|
||||||
pasStream.put(format("col = %d\r", dt.name.column));
|
pasStream.put(format("col=%d\r", dt.name.column));
|
||||||
pasStream.put(format("name = '%s'\r", dt.name.text));
|
pasStream.put(format("name='%s'\r", dt.name.text));
|
||||||
pasStream.put("symType = " ~ symbolTypeStrings[st] ~ "\r");
|
pasStream.put("symType=" ~ symbolTypeStrings[st] ~ "\r");
|
||||||
static if (dig) if (deep)
|
static if (dig) if (deep)
|
||||||
{
|
{
|
||||||
pasStream.put("subs = <");
|
pasStream.put("subs = <");
|
||||||
|
@ -165,7 +167,7 @@ class SymbolListBuilder(ListFmt Fmt): ASTVisitor
|
||||||
static if (dig) if (deep)
|
static if (dig) if (deep)
|
||||||
{
|
{
|
||||||
JSONValue subs = parseJSON("[]");
|
JSONValue subs = parseJSON("[]");
|
||||||
JSONValue* old = jarray;
|
const JSONValue* old = jarray;
|
||||||
jarray = &subs;
|
jarray = &subs;
|
||||||
dt.accept(this);
|
dt.accept(this);
|
||||||
item["items"] = subs;
|
item["items"] = subs;
|
||||||
|
@ -182,10 +184,10 @@ class SymbolListBuilder(ListFmt Fmt): ASTVisitor
|
||||||
static if (Fmt == ListFmt.Pas)
|
static if (Fmt == ListFmt.Pas)
|
||||||
{
|
{
|
||||||
pasStream.put("\ritem\r");
|
pasStream.put("\ritem\r");
|
||||||
pasStream.put(format("line = %d\r", line));
|
pasStream.put(format("line=%d\r", line));
|
||||||
pasStream.put(format("col = %d\r", col));
|
pasStream.put(format("col=%d\r", col));
|
||||||
pasStream.put(format("name = '%s'\r", name));
|
pasStream.put(format("name='%s'\r", name));
|
||||||
pasStream.put("symType = " ~ symbolTypeStrings[st] ~ "\r");
|
pasStream.put("symType=" ~ symbolTypeStrings[st] ~ "\r");
|
||||||
static if (dig)
|
static if (dig)
|
||||||
{
|
{
|
||||||
pasStream.put("subs = <");
|
pasStream.put("subs = <");
|
||||||
|
@ -204,7 +206,7 @@ class SymbolListBuilder(ListFmt Fmt): ASTVisitor
|
||||||
static if (dig)
|
static if (dig)
|
||||||
{
|
{
|
||||||
JSONValue subs = parseJSON("[]");
|
JSONValue subs = parseJSON("[]");
|
||||||
JSONValue* old = jarray;
|
const JSONValue* old = jarray;
|
||||||
jarray = &subs;
|
jarray = &subs;
|
||||||
dt.accept(this);
|
dt.accept(this);
|
||||||
item["items"] = subs;
|
item["items"] = subs;
|
||||||
|
|
|
@ -2,7 +2,7 @@ module todos;
|
||||||
|
|
||||||
import
|
import
|
||||||
std.stdio, std.string, std.algorithm, std.array, std.conv, std.traits,
|
std.stdio, std.string, std.algorithm, std.array, std.conv, std.traits,
|
||||||
std.ascii, std.range;
|
std.ascii, std.range, std.file;
|
||||||
import
|
import
|
||||||
dparse.lexer;
|
dparse.lexer;
|
||||||
import
|
import
|
||||||
|
@ -10,27 +10,21 @@ import
|
||||||
|
|
||||||
private __gshared Appender!string stream;
|
private __gshared Appender!string stream;
|
||||||
|
|
||||||
//TODO: sdfsfd
|
|
||||||
|
|
||||||
void getTodos(string[] files)
|
void getTodos(string[] files)
|
||||||
{
|
{
|
||||||
mixin(logCall);
|
mixin(logCall);
|
||||||
//stream.reserve(2^^16);
|
stream.reserve(32 + 256 * files.length);
|
||||||
stream.put("object TTodoItems\r items = <");
|
stream.put("object TTodoItems\ritems=<");
|
||||||
foreach(fname; files)
|
foreach(fname; files)
|
||||||
{
|
{
|
||||||
ubyte[] source;
|
|
||||||
StringCache cache = StringCache(StringCache.defaultBucketCount);
|
StringCache cache = StringCache(StringCache.defaultBucketCount);
|
||||||
LexerConfig config = LexerConfig(fname, StringBehavior.source);
|
LexerConfig config = LexerConfig(fname, StringBehavior.source);
|
||||||
File f = File(fname, "r");
|
ubyte[] source = cast(ubyte[]) std.file.read(fname);
|
||||||
foreach (buffer; f.byChunk(4096))
|
foreach(ref token; DLexer(source, config, &cache).array
|
||||||
source ~= buffer;
|
|
||||||
f.close;
|
|
||||||
foreach(token; DLexer(source, config, &cache).array
|
|
||||||
.filter!((a) => a.type == tok!"comment"))
|
.filter!((a) => a.type == tok!"comment"))
|
||||||
analyze(token, fname);
|
analyze(token, fname);
|
||||||
}
|
}
|
||||||
stream.put(">\rend\r\n");
|
stream.put(">end");
|
||||||
writeln(stream.data);
|
writeln(stream.data);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -101,7 +95,7 @@ private void analyze(const(Token) token, string fname)
|
||||||
fields.popFront;
|
fields.popFront;
|
||||||
if ((front == '-' || fields.empty) && identifier.length > 2)
|
if ((front == '-' || fields.empty) && identifier.length > 2)
|
||||||
{
|
{
|
||||||
string fieldContent = identifier[2..$].strip;
|
const string fieldContent = identifier[2..$].strip;
|
||||||
switch(identifier[0..2].toUpper)
|
switch(identifier[0..2].toUpper)
|
||||||
{
|
{
|
||||||
default: break;
|
default: break;
|
||||||
|
@ -118,19 +112,17 @@ private void analyze(const(Token) token, string fname)
|
||||||
if (text.length > 1 && text[$-2..$].among("*/", "+/"))
|
if (text.length > 1 && text[$-2..$].among("*/", "+/"))
|
||||||
text.length -=2;
|
text.length -=2;
|
||||||
|
|
||||||
|
stream.put("\ritem\r");
|
||||||
|
stream.put(format("filename='%s'\r", fname));
|
||||||
stream.put("\r item\r");
|
stream.put(format("line='%s'\r", token.line));
|
||||||
stream.put(format("filename = '%s'\r", fname));
|
stream.put(format("text='%s'\r", text));
|
||||||
stream.put(format("line = '%s'\r", token.line));
|
|
||||||
stream.put(format("text = '%s'\r", text));
|
|
||||||
if (c.length)
|
if (c.length)
|
||||||
stream.put(format("category = '%s'\r", c));
|
stream.put(format("category='%s'\r", c));
|
||||||
if (a.length)
|
if (a.length)
|
||||||
stream.put(format("assignee = '%s'\r", a));
|
stream.put(format("assignee='%s'\r", a));
|
||||||
if (p.length)
|
if (p.length)
|
||||||
stream.put(format("priority = '%s'\r", p));
|
stream.put(format("priority='%s'\r", p));
|
||||||
if (s.length)
|
if (s.length)
|
||||||
stream.put(format("status = '%s'\r", s));
|
stream.put(format("status='%s'\r", s));
|
||||||
stream.put("end");
|
stream.put("end");
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue