Update DustMite

Commits:

* 4d53db1 dustmite: Fix incorrect path with --no-redirect
* 4414dd6 Fix parsing of empty files
* fe562e6 splitter: Improve removing of template arguments
* 6517c6e splitter: Fix usage of auto-decoding strip()
* 0263dab splitter: Remove arguments in any order
* ef5a2ed splitter: Recognize template parameters
* 3370e2c splitter: Optimize identifier token check
* 56f3122 splitter: Use tokenLookup in arg paren search
* 2546a5a splitter: Don't attempt argument reduction for D keywords
* c671d72 splitter: Add basic parameter removal
* 91ec2aa dustmite: Propagate noRemove through dependencies
* 10c1209 dustmite: Fix building on Windows
* 15693cb dustmite: Optimize lookahead
* acf667d dustmite: Improve parsing of lookahead option
* b61c5f9 dustmite: Optimize lookahead
* 7e76bb9 dustmite: Add lookahead
* 2df20e7 dustmite: Set directory via spawnShell argument, not chdir
* 44e8fa2 dustmite: Sort imports
* 1cbe15e dustmite: Refactor reduction iteration into a state machine
* 2ca8f1f dustmite: Delete old imperative code
* 54321df dustmite: Refactor strategy code from imperative style to state machines
* c15b2ca splitter: Fix compilation
* 270206c dustmite: Detect some common test program mistakes
* 4f41eec dustmite: Add --no-redirect hint on initial test failure
* cec7180 splitter: Unconditionally show load progress
This commit is contained in:
Vladimir Panteleev 2016-08-08 19:42:04 +00:00
parent e32fe5dc81
commit 48c042409e
2 changed files with 834 additions and 202 deletions

View file

@ -4,20 +4,21 @@
module dustmite; module dustmite;
import std.stdio;
import std.file;
import std.path;
import std.string;
import std.getopt;
import std.array;
import std.process;
import std.algorithm; import std.algorithm;
import std.exception; import std.array;
import std.datetime;
import std.regex;
import std.conv;
import std.ascii; import std.ascii;
import std.conv;
import std.datetime;
import std.exception;
import std.file;
import std.getopt;
import std.path;
import std.parallelism : totalCPUs;
import std.process;
import std.random; import std.random;
import std.regex;
import std.stdio;
import std.string;
import splitter; import splitter;
@ -33,7 +34,6 @@ string dirSuffix(string suffix) { return (dir.absolutePath().buildNormalizedPath
size_t maxBreadth; size_t maxBreadth;
Entity root; Entity root;
size_t origDescendants; size_t origDescendants;
bool concatPerformed;
int tests; bool foundAnything; int tests; bool foundAnything;
bool noSave, trace, noRedirect; bool noSave, trace, noRedirect;
string strategy = "inbreadth"; string strategy = "inbreadth";
@ -100,6 +100,19 @@ int main(string[] args)
bool force, dump, dumpHtml, showTimes, stripComments, obfuscate, keepLength, showHelp, noOptimize; bool force, dump, dumpHtml, showTimes, stripComments, obfuscate, keepLength, showHelp, noOptimize;
string coverageDir; string coverageDir;
string[] reduceOnly, noRemoveStr, splitRules; string[] reduceOnly, noRemoveStr, splitRules;
uint lookaheadCount;
args = args.filter!(
(arg)
{
if (arg.startsWith("-j"))
{
arg = arg[2..$];
lookaheadCount = arg.length ? arg.to!uint : totalCPUs;
return false;
}
return true;
}).array();
getopt(args, getopt(args,
"force", &force, "force", &force,
@ -119,7 +132,7 @@ int main(string[] args)
"trace", &trace, // for debugging "trace", &trace, // for debugging
"nosave|no-save", &noSave, // for research "nosave|no-save", &noSave, // for research
"nooptimize|no-optimize", &noOptimize, // for research "nooptimize|no-optimize", &noOptimize, // for research
"h|help", &showHelp "h|help", &showHelp,
); );
if (showHelp || args.length == 1 || args.length>3) if (showHelp || args.length == 1 || args.length>3)
@ -145,7 +158,8 @@ Supported options:
splitter. Can be repeated. MODE must be one of: splitter. Can be repeated. MODE must be one of:
%-(%s, %) %-(%s, %)
--no-redirect Don't redirect stdout/stderr streams of test command. --no-redirect Don't redirect stdout/stderr streams of test command.
EOS", args[0], splitterNames); -j[N] Use N look-ahead processes (%d by default)
EOS", args[0], splitterNames, totalCPUs);
if (!showHelp) if (!showHelp)
{ {
@ -217,6 +231,7 @@ EOS");
applyNoRemoveMagic(); applyNoRemoveMagic();
applyNoRemoveRegex(noRemoveStr, reduceOnly); applyNoRemoveRegex(noRemoveStr, reduceOnly);
applyNoRemoveDeps();
if (coverageDir) if (coverageDir)
loadCoverage(coverageDir); loadCoverage(coverageDir);
if (!obfuscate && !noOptimize) if (!obfuscate && !noOptimize)
@ -241,7 +256,21 @@ EOS");
enforce(!exists(resultDir), "Result directory already exists"); enforce(!exists(resultDir), "Result directory already exists");
if (!test(nullReduction)) if (!test(nullReduction))
throw new Exception("Initial test fails"); {
auto testerFile = dir.buildNormalizedPath(tester);
version (Posix)
{
if (testerFile.exists && (testerFile.getAttributes() & octal!111) == 0)
writeln("Hint: test program seems to be a non-executable file, try: chmod +x " ~ testerFile.escapeShellFileName());
}
if (!testerFile.exists && tester.exists)
writeln("Hint: test program path should be relative to the source directory, try " ~
tester.absolutePath.relativePath(dir.absolutePath).escapeShellFileName() ~
" instead of " ~ tester.escapeShellFileName());
throw new Exception("Initial test fails" ~ (noRedirect ? "" : " (try using --no-redirect for details)"));
}
lookaheadProcesses = new Lookahead[lookaheadCount];
foundAnything = false; foundAnything = false;
if (obfuscate) if (obfuscate)
@ -314,24 +343,118 @@ size_t countFiles(Entity e)
} }
} }
/// Try reductions at address. Edit set, save result and return true on successful reduction.
bool testAddress(size_t[] address) struct ReductionIterator
{ {
auto e = entityAt(address); Strategy strategy;
bool done = false;
bool concatPerformed;
Reduction.Type type = Reduction.Type.None;
Entity e;
this(Strategy strategy)
{
this.strategy = strategy;
next(false);
if (countFiles(root) < 2)
concatPerformed = true;
}
this(this)
{
strategy = strategy.dup;
}
@property Reduction front() { return Reduction(type, strategy.front, e); }
void next(bool success)
{
while (true)
{
final switch (type)
{
case Reduction.Type.None:
if (strategy.done)
{
done = true;
return;
}
e = entityAt(strategy.front);
if (e.noRemove)
{
strategy.next(false);
continue;
}
if (e is root && !root.children.length) if (e is root && !root.children.length)
return false; {
strategy.next(false);
continue;
}
// Try next reduction type
type = Reduction.Type.Remove;
return;
case Reduction.Type.Remove:
if (success)
{
// Next node
type = Reduction.Type.None;
strategy.next(true);
continue;
}
// Try next reduction type
type = Reduction.Type.Unwrap;
if (e.head.length && e.tail.length)
return; // Try this
else else
if (tryReduction(Reduction(Reduction.Type.Remove, address, e))) {
return true; success = false; // Skip
continue;
}
case Reduction.Type.Unwrap:
if (success)
{
// Next node
type = Reduction.Type.None;
strategy.next(true);
continue;
}
// Try next reduction type
type = Reduction.Type.Concat;
if (e.isFile && !concatPerformed)
return; // Try this
else else
if (e.head.length && e.tail.length && tryReduction(Reduction(Reduction.Type.Unwrap, address, e))) {
return true; success = false; // Skip
else continue;
if (e.isFile && !concatPerformed && tryReduction(Reduction(Reduction.Type.Concat, address, e))) }
return concatPerformed = true;
else case Reduction.Type.Concat:
return false; if (success)
concatPerformed = true;
// Next node
type = Reduction.Type.None;
strategy.next(success);
continue;
case Reduction.Type.ReplaceWord:
assert(false);
}
}
}
} }
void resetProgress() void resetProgress()
@ -339,72 +462,248 @@ void resetProgress()
origDescendants = root.descendants; origDescendants = root.descendants;
} }
void testLevel(int testDepth, out bool tested, out bool changed) class Strategy
{ {
tested = changed = false; uint progressGeneration = 0;
resetProgress(); bool done = false;
enum MAX_DEPTH = 1024; void copy(Strategy result) const
size_t[MAX_DEPTH] address; {
result.progressGeneration = this.progressGeneration;
result.done = this.done;
}
void scan(Entity e, int depth) abstract @property size_t[] front();
abstract void next(bool success);
int getIteration() { return -1; }
int getDepth() { return -1; }
final Strategy dup()
{ {
if (depth < testDepth) auto result = cast(Strategy)this.classinfo.create();
{ copy(result);
// recurse return result;
foreach_reverse (i, c; e.children)
{
address[depth] = i;
scan(c, depth+1);
} }
} }
else
if (e.noRemove) class SimpleStrategy : Strategy
{ {
// skip, but don't stop going deeper size_t[] address;
tested = true;
override void copy(Strategy target) const
{
super.copy(target);
auto result = cast(SimpleStrategy)target;
result.address = this.address.dup;
} }
override @property size_t[] front()
{
assert(!done, "Done");
return address;
}
override void next(bool success)
{
assert(!done, "Done");
}
}
class IterativeStrategy : SimpleStrategy
{
int iteration = 0;
bool iterationChanged;
override int getIteration() { return iteration; }
override void copy(Strategy target) const
{
super.copy(target);
auto result = cast(IterativeStrategy)target;
result.iteration = this.iteration;
result.iterationChanged = this.iterationChanged;
}
override void next(bool success)
{
super.next(success);
iterationChanged |= success;
}
void nextIteration()
{
assert(iterationChanged, "Starting new iteration after no changes");
iteration++;
iterationChanged = false;
address = null;
progressGeneration++;
}
}
/// Find the first address at the depth of address.length,
/// and populate address[] accordingly.
/// Return false if no address at that level could be found.
bool findAddressAtLevel(size_t[] address, Entity root)
{
if (!address.length)
return true;
foreach_reverse (i, child; root.children)
{
if (findAddressAtLevel(address[1..$], child))
{
address[0] = i;
return true;
}
}
return false;
}
/// Find the next address at the depth of address.length,
/// and update address[] accordingly.
/// Return false if no more addresses at that level could be found.
bool nextAddressInLevel(size_t[] address, lazy Entity root)
{
if (!address.length)
return false;
if (nextAddressInLevel(address[1..$], root.children[address[0]]))
return true;
if (!address[0])
return false;
foreach_reverse (i; 0..address[0])
{
if (findAddressAtLevel(address[1..$], root.children[i]))
{
address[0] = i;
return true;
}
}
return false;
}
/// Find the next address, starting from the given one
/// (going depth-first). Update address accordingly.
/// If descend is false, then skip addresses under the given one.
/// Return false if no more addresses could be found.
bool nextAddress(ref size_t[] address, lazy Entity root, bool descend)
{
if (!address.length)
{
if (descend && root.children.length)
{
address ~= [root.children.length-1];
return true;
}
return false;
}
auto cdr = address[1..$];
if (nextAddress(cdr, root.children[address[0]], descend))
{
address = address[0] ~ cdr;
return true;
}
if (address[0])
{
address = [address[0] - 1];
return true;
}
return false;
}
void validateAddress(size_t[] address, Entity root = root)
{
if (!address.length)
return;
assert(address[0] < root.children.length);
validateAddress(address[1..$], root.children[address[0]]);
}
class LevelStrategy : IterativeStrategy
{
bool levelChanged;
bool invalid;
override int getDepth() { return cast(int)address.length; }
override void copy(Strategy target) const
{
super.copy(target);
auto result = cast(LevelStrategy)target;
result.levelChanged = this.levelChanged;
result.invalid = this.invalid;
}
override void next(bool success)
{
super.next(success);
levelChanged |= success;
}
override void nextIteration()
{
super.nextIteration();
invalid = false;
levelChanged = false;
}
final bool nextInLevel()
{
assert(!invalid, "Choose a level!");
if (nextAddressInLevel(address, root))
return true;
else else
{ {
// test invalid = true;
tested = true; return false;
if (testAddress(address[0..depth]))
changed = true;
} }
} }
scan(root, 0); final @property size_t currentLevel() const { return address.length; }
//writefln("Scan results: tested=%s, changed=%s", tested, changed); final bool setLevel(size_t level)
}
void startIteration(int iterCount)
{ {
writefln("############### ITERATION %d ################", iterCount); address.length = level;
resetProgress(); if (findAddressAtLevel(address, root))
{
invalid = false;
levelChanged = false;
progressGeneration++;
return true;
}
else
return false;
}
} }
/// Keep going deeper until we find a successful reduction. /// Keep going deeper until we find a successful reduction.
/// When found, finish tests at current depth and restart from top depth (new iteration). /// When found, finish tests at current depth and restart from top depth (new iteration).
/// If we reach the bottom (depth with no nodes on it), we're done. /// If we reach the bottom (depth with no nodes on it), we're done.
void reduceCareful() class CarefulStrategy : LevelStrategy
{ {
bool tested; override void next(bool success)
int iterCount;
do
{ {
startIteration(iterCount++); super.next(success);
bool changed;
int depth = 0;
do
{
writefln("============= Depth %d =============", depth);
testLevel(depth, tested, changed); if (!nextInLevel())
{
depth++; // End of level
} while (tested && !changed); // go deeper while we found something to test, but no results if (levelChanged)
} while (tested); // stop when we didn't find anything to test {
nextIteration();
}
else
if (!setLevel(currentLevel + 1))
{
if (iterationChanged)
nextIteration();
else
done = true;
}
}
}
} }
/// Keep going deeper until we find a successful reduction. /// Keep going deeper until we find a successful reduction.
@ -413,39 +712,48 @@ void reduceCareful()
/// Once no new reductions are found at higher depths, jump to the next unvisited depth in this iteration. /// Once no new reductions are found at higher depths, jump to the next unvisited depth in this iteration.
/// If we reach the bottom (depth with no nodes on it), start a new iteration. /// If we reach the bottom (depth with no nodes on it), start a new iteration.
/// If we finish an iteration without finding any reductions, we're done. /// If we finish an iteration without finding any reductions, we're done.
void reduceLookback() class LookbackStrategy : LevelStrategy
{ {
bool iterationChanged; size_t maxLevel = 0;
int iterCount;
do override void copy(Strategy target) const
{ {
iterationChanged = false; super.copy(target);
startIteration(iterCount++); auto result = cast(LookbackStrategy)target;
result.maxLevel = this.maxLevel;
}
int depth = 0, maxDepth = 0; override void nextIteration()
bool depthTested;
do
{ {
writefln("============= Depth %d =============", depth); super.nextIteration();
bool depthChanged; maxLevel = 0;
}
testLevel(depth, depthTested, depthChanged); override void next(bool success)
if (depthChanged)
{ {
iterationChanged = true; super.next(success);
depth--;
if (depth < 0) if (!nextInLevel())
depth = 0; {
// End of level
if (levelChanged)
{
setLevel(currentLevel ? currentLevel - 1 : 0);
}
else
if (setLevel(maxLevel + 1))
{
maxLevel = currentLevel;
} }
else else
{ {
maxDepth++; if (iterationChanged)
depth = maxDepth; nextIteration();
else
done = true;
}
}
} }
} while (depthTested); // keep going up/down while we found something to test
} while (iterationChanged); // stop when we couldn't reduce anything this iteration
} }
/// Keep going deeper until we find a successful reduction. /// Keep going deeper until we find a successful reduction.
@ -454,66 +762,52 @@ void reduceLookback()
/// Once no new reductions are found at higher depths, start going downwards again. /// Once no new reductions are found at higher depths, start going downwards again.
/// If we reach the bottom (depth with no nodes on it), start a new iteration. /// If we reach the bottom (depth with no nodes on it), start a new iteration.
/// If we finish an iteration without finding any reductions, we're done. /// If we finish an iteration without finding any reductions, we're done.
void reducePingPong() class PingPongStrategy : LevelStrategy
{ {
bool iterationChanged; override void next(bool success)
int iterCount;
do
{ {
iterationChanged = false; super.next(success);
startIteration(iterCount++);
int depth = 0; if (!nextInLevel())
bool depthTested;
do
{ {
writefln("============= Depth %d =============", depth); // End of level
bool depthChanged; if (levelChanged)
testLevel(depth, depthTested, depthChanged);
if (depthChanged)
{ {
iterationChanged = true; setLevel(currentLevel ? currentLevel - 1 : 0);
depth--;
if (depth < 0)
depth = 0;
} }
else else
depth++; if (!setLevel(currentLevel + 1))
} while (depthTested); // keep going up/down while we found something to test {
} while (iterationChanged); // stop when we couldn't reduce anything this iteration if (iterationChanged)
nextIteration();
else
done = true;
}
}
}
} }
/// Keep going deeper. /// Keep going deeper.
/// If we reach the bottom (depth with no nodes on it), start a new iteration. /// If we reach the bottom (depth with no nodes on it), start a new iteration.
/// If we finish an iteration without finding any reductions, we're done. /// If we finish an iteration without finding any reductions, we're done.
void reduceInBreadth() class InBreadthStrategy : LevelStrategy
{ {
bool iterationChanged; override void next(bool success)
int iterCount;
do
{ {
iterationChanged = false; super.next(success);
startIteration(iterCount++);
int depth = 0; if (!nextInLevel())
bool depthTested;
do
{ {
writefln("============= Depth %d =============", depth); // End of level
bool depthChanged; if (!setLevel(currentLevel + 1))
{
testLevel(depth, depthTested, depthChanged); if (iterationChanged)
nextIteration();
if (depthChanged) else
iterationChanged = true; done = true;
}
depth++; }
} while (depthTested); // keep going down while we found something to test }
} while (iterationChanged); // stop when we couldn't reduce anything this iteration
} }
/// Look at every entity in the tree. /// Look at every entity in the tree.
@ -521,63 +815,75 @@ void reduceInBreadth()
/// Otherwise, recurse and look at its children. /// Otherwise, recurse and look at its children.
/// End an iteration once we looked at an entire tree. /// End an iteration once we looked at an entire tree.
/// If we finish an iteration without finding any reductions, we're done. /// If we finish an iteration without finding any reductions, we're done.
void reduceInDepth() class InDepthStrategy : IterativeStrategy
{ {
bool changed; final bool nextAddress(bool descend)
int iterCount;
do
{ {
changed = false; return .nextAddress(address, root, descend);
startIteration(iterCount++);
enum MAX_DEPTH = 1024;
size_t[MAX_DEPTH] address;
void scan(Entity e, int depth)
{
if (e.noRemove)
{
// skip, but don't stop going deeper
} }
override void next(bool success)
{
super.next(success);
if (!nextAddress(!success))
{
if (iterationChanged)
nextIteration();
else else
{ done = true;
// test }
if (testAddress(address[0..depth]))
{
changed = true;
return;
} }
} }
// recurse ReductionIterator iter;
foreach_reverse (i, c; e.children)
void reduceByStrategy(Strategy strategy)
{ {
address[depth] = i; int lastIteration = -1;
scan(c, depth+1); int lastDepth = -1;
int lastProgressGeneration = -1;
iter = ReductionIterator(strategy);
while (!iter.done)
{
if (lastIteration != strategy.getIteration())
{
writefln("############### ITERATION %d ################", strategy.getIteration());
lastIteration = strategy.getIteration();
} }
if (lastDepth != strategy.getDepth())
{
writefln("============= Depth %d =============", strategy.getDepth());
lastDepth = strategy.getDepth();
}
if (lastProgressGeneration != strategy.progressGeneration)
{
resetProgress();
lastProgressGeneration = strategy.progressGeneration;
} }
scan(root, 0); auto result = tryReduction(iter.front);
} while (changed && root.children.length); // stop when we couldn't reduce anything this iteration
iter.next(result);
}
} }
void reduce() void reduce()
{ {
if (countFiles(root) < 2)
concatPerformed = true;
switch (strategy) switch (strategy)
{ {
case "careful": case "careful":
return reduceCareful(); return reduceByStrategy(new CarefulStrategy());
case "lookback": case "lookback":
return reduceLookback(); return reduceByStrategy(new LookbackStrategy());
case "pingpong": case "pingpong":
return reducePingPong(); return reduceByStrategy(new PingPongStrategy());
case "indepth": case "indepth":
return reduceInDepth(); return reduceByStrategy(new InDepthStrategy());
case "inbreadth": case "inbreadth":
return reduceInBreadth(); return reduceByStrategy(new InBreadthStrategy());
default: default:
throw new Exception("Unknown strategy"); throw new Exception("Unknown strategy");
} }
@ -1016,6 +1322,23 @@ else
alias toHexString formatHash; alias toHexString formatHash;
} }
struct Lookahead
{
Pid pid;
string testdir;
HASH digest;
}
Lookahead[] lookaheadProcesses;
bool[HASH] lookaheadResults;
bool lookaheadPredict() { return false; }
version (Windows)
enum nullFileName = "nul";
else
enum nullFileName = "/dev/null";
bool[HASH] cache; bool[HASH] cache;
bool test(Reduction reduction) bool test(Reduction reduction)
@ -1068,25 +1391,114 @@ bool test(Reduction reduction)
return fallback; return fallback;
} }
bool lookahead(lazy bool fallback)
{
if (iter.strategy)
{
// Handle existing lookahead jobs
bool reap(ref Lookahead process, int status)
{
safeDelete(process.testdir);
process.pid = null;
return lookaheadResults[process.digest] = status == 0;
}
foreach (ref process; lookaheadProcesses)
{
if (process.pid)
{
auto waitResult = process.pid.tryWait();
if (waitResult.terminated)
reap(process, waitResult.status);
}
}
// Start new lookahead jobs
auto lookaheadIter = iter;
if (!lookaheadIter.done)
lookaheadIter.next(lookaheadPredict());
foreach (ref process; lookaheadProcesses)
{
if (!process.pid && !lookaheadIter.done)
{
while (true)
{
auto reduction = lookaheadIter.front;
auto digest = hash(reduction);
if (digest in cache || digest in lookaheadResults || lookaheadProcesses[].canFind!(p => p.digest == digest))
{
bool prediction;
if (digest in cache)
prediction = cache[digest];
else
if (digest in lookaheadResults)
prediction = lookaheadResults[digest];
else
prediction = lookaheadPredict();
lookaheadIter.next(prediction);
if (lookaheadIter.done)
break;
continue;
}
process.digest = digest;
static int counter;
process.testdir = dirSuffix("lookahead.%d".format(counter++));
save(reduction, process.testdir);
auto nul = File(nullFileName, "w+");
process.pid = spawnShell(tester, nul, nul, nul, null, Config.none, process.testdir);
lookaheadIter.next(lookaheadPredict());
break;
}
}
}
// Find a result for the current test.
auto plookaheadResult = digest in lookaheadResults;
if (plookaheadResult)
{
writeln(*plookaheadResult ? "Yes" : "No", " (lookahead)");
return *plookaheadResult;
}
foreach (ref process; lookaheadProcesses)
{
if (process.pid && process.digest == digest)
{
// Current test is already being tested in the background, wait for its result.
auto exitCode = process.pid.wait();
auto result = reap(process, exitCode);
writeln(result ? "Yes" : "No", " (lookahead-wait)");
return result;
}
}
}
return fallback;
}
bool doTest() bool doTest()
{ {
string testdir = dirSuffix("test"); string testdir = dirSuffix("test");
measure!"testSave"({save(reduction, testdir);}); scope(exit) measure!"clean"({safeDelete(testdir);}); measure!"testSave"({save(reduction, testdir);}); scope(exit) measure!"clean"({safeDelete(testdir);});
auto lastdir = getcwd(); scope(exit) chdir(lastdir);
chdir(testdir);
Pid pid; Pid pid;
if (noRedirect) if (noRedirect)
pid = spawnShell(tester); pid = spawnShell(tester, null, Config.none, testdir);
else else
{ {
File nul; auto nul = File(nullFileName, "w+");
version (Windows) pid = spawnShell(tester, nul, nul, nul, null, Config.none, testdir);
nul.open("nul", "w+");
else
nul.open("/dev/null", "w+");
pid = spawnShell(tester, nul, nul, nul);
} }
bool result; bool result;
@ -1095,7 +1507,7 @@ bool test(Reduction reduction)
return result; return result;
} }
auto result = ramCached(diskCached(doTest())); auto result = ramCached(diskCached(lookahead(doTest())));
if (trace) saveTrace(reduction, dirSuffix("trace"), result); if (trace) saveTrace(reduction, dirSuffix("trace"), result);
return result; return result;
} }
@ -1227,6 +1639,36 @@ void applyNoRemoveRegex(string[] noRemoveStr, string[] reduceOnly)
} }
} }
void applyNoRemoveDeps()
{
static void applyDeps(Entity e)
{
e.noRemove = true;
foreach (d; e.dependencies)
applyDeps(d);
}
static void scan(Entity e)
{
if (e.noRemove)
applyDeps(e);
foreach (c; e.children)
scan(c);
}
scan(root);
// Propagate upwards
static bool fill(Entity e)
{
foreach (c; e.children)
e.noRemove |= fill(c);
return e.noRemove;
}
fill(root);
}
void loadCoverage(string dir) void loadCoverage(string dir)
{ {
void scanFile(Entity f) void scanFile(Entity f)

View file

@ -14,7 +14,7 @@ import std.path;
import std.range; import std.range;
import std.string; import std.string;
import std.traits; import std.traits;
debug import std.stdio; import std.stdio : stderr;
/// Represents a slice of the original code. /// Represents a slice of the original code.
class Entity class Entity
@ -44,18 +44,19 @@ class Entity
this.tail = tail; this.tail = tail;
} }
string[] comments;
@property string comment() @property string comment()
{ {
string[] result = comments;
if (isPair) if (isPair)
{ {
assert(token == DSplitter.Token.none); assert(token == DSplitter.Token.none);
return "Pair"; result ~= "Pair";
} }
else if (token && DSplitter.tokenText[token])
if (token) result ~= DSplitter.tokenText[token];
return DSplitter.tokenText[token]; return result.length ? result.join(" / ") : null;
else
return null;
} }
override string toString() override string toString()
@ -161,6 +162,7 @@ private:
/// Override std.string nonsense, which does UTF-8 decoding /// Override std.string nonsense, which does UTF-8 decoding
bool startsWith(in char[] big, in char[] small) { return big.length >= small.length && big[0..small.length] == small; } bool startsWith(in char[] big, in char[] small) { return big.length >= small.length && big[0..small.length] == small; }
bool startsWith(in char[] big, char c) { return big.length && big[0] == c; } bool startsWith(in char[] big, char c) { return big.length && big[0] == c; }
string strip(string s) { while (s.length && isWhite(s[0])) s = s[1..$]; while (s.length && isWhite(s[$-1])) s = s[0..$-1]; return s; }
immutable ParseRule[] defaultRules = immutable ParseRule[] defaultRules =
[ [
@ -171,7 +173,7 @@ immutable ParseRule[] defaultRules =
Entity loadFile(string name, string path, ParseOptions options) Entity loadFile(string name, string path, ParseOptions options)
{ {
debug writeln("Loading ", path); stderr.writeln("Loading ", path);
auto result = new Entity(); auto result = new Entity();
result.filename = name.replace(`\`, `/`); result.filename = name.replace(`\`, `/`);
result.contents = cast(string)read(path); result.contents = cast(string)read(path);
@ -811,6 +813,20 @@ struct DSplitter
} }
} }
static void postProcessTemplates(ref Entity[] entities)
{
if (!entities.length)
return;
foreach_reverse (i, e; entities[0..$-1])
if (e.token == tokenLookup["!"] && entities[i+1].children.length && entities[i+1].children[0].token == tokenLookup["("])
{
auto dependency = new Entity;
e.dependencies ~= dependency;
entities[i+1].children[0].dependencies ~= dependency;
entities = entities[0..i+1] ~ dependency ~ entities[i+1..$];
}
}
static void postProcessDependencyBlock(ref Entity[] entities) static void postProcessDependencyBlock(ref Entity[] entities)
{ {
foreach (i, e; entities) foreach (i, e; entities)
@ -941,13 +957,60 @@ struct DSplitter
postProcessParens(e.children); postProcessParens(e.children);
} }
static void postProcess(ref Entity[] entities) static bool isValidIdentifier(string s)
{
if (!s.length)
return false;
if (!isAlpha(s[0]))
return false;
foreach (c; s[1..$])
if (!isAlphaNum(c))
return false;
return true;
}
/// Get all nodes between (exclusively) two addresses.
/// If either address is empty, then the respective bound is the respective extreme.
static Entity[] nodesBetween(Entity root, size_t[] a, size_t[] b)
{
while (a.length && b.length && a[0] == b[0])
{
root = root.children[a[0]];
a = a[1..$];
b = b[1..$];
}
size_t index0, index1;
Entity[] children0, children1;
if (a.length)
{
index0 = a[0] + 1;
if (a.length > 1)
children0 = nodesBetween(root.children[a[0]], a[1..$], null);
}
else
index0 = 0;
if (b.length)
{
index1 = b[0];
if (b.length > 1)
children1 = nodesBetween(root.children[b[0]], null, b[1..$]);
}
else
index1 = root.children.length;
assert(index0 <= index1);
return children0 ~ root.children[index0 .. index1] ~ children1;
}
static void postProcessRecursive(ref Entity[] entities)
{ {
foreach (e; entities) foreach (e; entities)
if (e.children.length) if (e.children.length)
postProcess(e.children); postProcessRecursive(e.children);
postProcessSimplify(entities); postProcessSimplify(entities);
postProcessTemplates(entities);
postProcessDependency(entities); postProcessDependency(entities);
postProcessBlockKeywords(entities); postProcessBlockKeywords(entities);
postProcessDependencyBlock(entities); postProcessDependencyBlock(entities);
@ -956,6 +1019,133 @@ struct DSplitter
postProcessParens(entities); postProcessParens(entities);
} }
/// Attempt to link together function arguments / parameters for
/// things that look like calls to the same function, to allow removing
/// unused function arguments / parameters.
static void postProcessArgs(ref Entity[] entities)
{
string lastID;
Entity[][][string] calls;
void visit(Entity entity)
{
auto id = entity.head.strip();
if (entity.token == Token.other && isValidIdentifier(id) && !entity.tail && !entity.children)
lastID = id;
else
if (lastID && entity.token == tokenLookup["("])
{
size_t[] stack;
struct Comma { size_t[] addr, after; }
Comma[] commas;
bool afterComma;
// Find all top-level commas
void visit2(size_t i, Entity entity)
{
stack ~= i;
if (afterComma)
{
commas[$-1].after = stack;
//entity.comments ~= "After-comma %d".format(commas.length);
afterComma = false;
}
if (entity.token == tokenLookup[","])
{
commas ~= Comma(stack);
//entity.comments ~= "Comma %d".format(commas.length);
afterComma = true;
}
else
if (entity.head.length || entity.tail.length)
{}
else
foreach (j, child; entity.children)
visit2(j, child);
stack = stack[0..$-1];
}
foreach (i, child; entity.children)
visit2(i, child);
// Find all nodes between commas, effectively obtaining the arguments
size_t[] last = null;
commas ~= [Comma()];
Entity[][] args;
foreach (i, comma; commas)
{
//Entity entityAt(Entity root, size_t[] address) { return address.length ? entityAt(root.children[address[0]], address[1..$]) : root; }
//entityAt(entity, last).comments ~= "nodesBetween-left %d".format(i);
//entityAt(entity, comma.after).comments ~= "nodesBetween-right %d".format(i);
args ~= nodesBetween(entity, last, comma.after);
last = comma.addr;
}
// Register the arguments
foreach (i, arg; args)
{
debug
foreach (j, e; arg)
e.comments ~= "%s arg %d node %d".format(lastID, i, j);
if (arg.length == 1)
{
if (lastID !in calls)
calls[lastID] = null;
while (calls[lastID].length < i+1)
calls[lastID] ~= null;
calls[lastID][i] ~= arg[0];
}
}
lastID = null;
return;
}
else
if (entity.token == tokenLookup["!"])
{}
else
if (entity.head || entity.tail)
lastID = null;
foreach (child; entity.children)
visit(child);
}
foreach (entity; entities)
visit(entity);
// For each parameter, create a dummy empty node which is a dependency for all of the arguments.
auto callRoot = new Entity();
debug callRoot.comments ~= "Args root";
entities ~= callRoot;
foreach (id, params; calls)
{
auto funRoot = new Entity();
debug funRoot.comments ~= "%s root".format(id);
callRoot.children ~= funRoot;
foreach (i, args; params)
{
auto e = new Entity();
debug e.comments ~= "%s param %d".format(id, i);
funRoot.children ~= e;
foreach (arg; args)
arg.dependencies ~= e;
}
}
}
static void postProcess(ref Entity[] entities)
{
postProcessRecursive(entities);
postProcessArgs(entities);
}
static Entity* firstHead(ref Entity e) static Entity* firstHead(ref Entity e)
{ {
if (e.head.length) if (e.head.length)