Add unit test runner.

This will allow to use the compiler as a library to implement more
unit test like tests. These tests will be able to inspect the
internals of the compiler to perform new kinds of tests that are not
possible today.

Unit tests live in the `test/unit` directory. They are written using
the built-in `unittest` blocks. The unit test framework supports
callbacks executed before and after each test. The unit test runner
allows to limit the tests executed either by file(s) and/or by UDAs.
Example:

```d
module self_test;

import support : afterEach, beforeEach;

@beforeEach initializeFrontend()
{
    import dmd.frontend : initDMD;
    initDMD();
}

@afterEach deinitializeFrontend()
{
    import dmd.frontend : deinitializeDMD;
    deinitializeDMD();
}

@("self test")
unittest
{
    import std.algorithm : each;
    import dmd.frontend;

    findImportPaths.each!addImport;

    auto t = parseModule("test.d", q{
        int a = 3;
    });

    assert(!t.diagnostics.hasErrors);
    assert(!t.diagnostics.hasWarnings);
}
```

* To run all unit tests, run: `./run.d -u`
* To run only the unit tests in a single file, run: `./run.d -u unit/self_test.d`
* To run only the unit tests matching a UDA, run: `./run.d -u --filter "self test"`
This commit is contained in:
Jacob Carlborg 2018-07-31 09:32:23 +02:00
parent e9920a3ab9
commit f80c3f3a85
9 changed files with 692 additions and 95 deletions

View file

@ -173,7 +173,7 @@ check_clean_git()
check_run_individual()
{
local build_path=generated/linux/release/$MODEL
"${build_path}/dmd" -i -run ./test/run.d test/runnable/template2962.d ./test/compilable/test14275.d
"${build_path}/dmd" -I./test -i -run ./test/run.d test/runnable/template2962.d ./test/compilable/test14275.d
}
# Checks the D build.d script

View file

@ -33,19 +33,14 @@ subPackage {
preGenerateCommands `
"$${DUB_EXE}" \
--arch=$${DUB_ARCH} \
--compiler=$${DC} \
--single "$${DUB_PACKAGE_DIR}config.d" \
-- "$${DUB_PACKAGE_DIR}generated/dub" \
"$${DUB_PACKAGE_DIR}VERSION" \
/etc
` platform="posix"
preGenerateCommands `
"%DUB_EXE%" ^
--arch=%DUB_ARCH% ^
--single "%DUB_PACKAGE_DIR%config.d" ^
-- "%DUB_PACKAGE_DIR%generated/dub" ^
"%DUB_PACKAGE_DIR%VERSION"
` platform="windows"
preGenerateCommands `"%DUB_EXE%" --arch=%DUB_ARCH% --compiler="%DC%" --single "%DUB_PACKAGE_DIR%config.d" -- "%DUB_PACKAGE_DIR%generated/dub" "%DUB_PACKAGE_DIR%VERSION"` platform="windows"
stringImportPaths "generated/dub"

View file

@ -158,7 +158,11 @@ $(RESULTS_DIR)/.created:
$(QUIET)if [ ! -d $(RESULTS_DIR)/fail_compilation ]; then mkdir $(RESULTS_DIR)/fail_compilation; fi
$(QUIET)touch $(RESULTS_DIR)/.created
run_tests: start_runnable_tests start_compilable_tests start_fail_compilation_tests
run_tests: unit_tests start_runnable_tests start_compilable_tests start_fail_compilation_tests
unit_tests: $(RESULTS_DIR)/unit_test_runner$(EXE)
@echo "Running unit tests"
$<
run_runnable_tests: $(runnable_test_results)
@ -193,3 +197,9 @@ $(RESULTS_DIR)/sanitize_json$(EXE): tools/sanitize_json.d $(RESULTS_DIR)/.create
@echo "PIC: '$(PIC_FLAG)'"
$(DMD) -conf= $(MODEL_FLAG) $(DEBUG_FLAGS) -od$(RESULTS_DIR) -of$(RESULTS_DIR)$(DSEP)sanitize_json$(EXE) -i $<
$(RESULTS_DIR)/unit_test_runner$(EXE): tools/unit_test_runner.d $(RESULTS_DIR)/.created | $(DMD)
@echo "Building unit_test_runner tool"
@echo "OS: '$(OS)'"
@echo "MODEL: '$(MODEL)'"
@echo "PIC: '$(PIC_FLAG)'"
$(DMD) -conf= $(MODEL_FLAG) $(DEBUG_FLAGS) -od$(RESULTS_DIR) -of$(RESULTS_DIR)$(DSEP)unit_test_runner$(EXE) -i $<

View file

@ -65,6 +65,64 @@ Note:
- `AUTO_UPDATE` doesn't work with tests that have multiple `TEST_OUTPUT` segments
- `AUTO_UPDATE` can be set as an environment variable or as Makefile-like argument assignment
### Running the Unit Tests
The unit tests will automatically run when all tests are run using `./run.d` or
`make`. To only run the unit tests the `./run.d unit_tests` command can be used.
For a more finer grain control over the unit tests the `./run.d -u` command can
be used:
To run all unit tests:
```sh
./run.d -u
```
To only run the unit tests in one or more specific files:
```sh
./run.d -u unit/deinitialization.d
```
To only run a subset of the unit tests in a single file:
```sh
./run.d -u unit/deinitialization.d --filter Expression
```
In the above example, the `--filter` flag will filter to only run the tests with
a UDA matching the given value, in this case `Expression`.
```d
@("Target.deinitialize")
unittest {}
@("Expression.deinitialize")
unittest {}
```
Of the above unit tests, only the second one will be run, since
`--filter Expression` was specified.
The `--filter` flag works when no files are specified as well.
## Types of Tests
There are two types of tests in the DMD test suite:
* **End-to-end test**. These are tests that invokes the compiler as an external
process in some kind of way. Then it asserts either the exit code or the output
of the compiler. These tests are located in `compilable`, `fail_compilation` and
`runnable`.
* **Unit tests**. These tests are more of a unit test, integration or
functional style tests. These tests are using the compiler as a library. They
are more flexible because they can assert state internal to the compiler which
the end-to-end tests would never have access to. The unit test runner will
compile all files in the `unit` directory into a single executable and run the
tests. This should make it quick to run the tests since only a single process
need to be started.
Makefile targets
----------------
@ -74,6 +132,7 @@ Makefile targets
run_runnable_tests: run just the runnable tests
run_compilable_tests: run just the compilable tests
run_fail_compilation_tests: run just the fail compilation tests
unit_test: run all unit tests (those in the "unit" directory)
quick: run all tests with no default permuted args
(individual test specified options still honored)

View file

@ -12,9 +12,12 @@ See the README.md for all available test targets
*/
import std.algorithm, std.conv, std.datetime, std.exception, std.file, std.format,
std.getopt, std.parallelism, std.path, std.process, std.range, std.stdio, std.string;
std.getopt, std.parallelism, std.path, std.process, std.range, std.stdio,
std.string, std.traits;
import core.stdc.stdlib : exit;
import tools.paths;
const scriptDir = __FILE_FULL_PATH__.dirName.buildNormalizedPath;
auto testPath(R)(R path) { return buildNormalizedPath(scriptDir, path); }
string resultsDir = testPath("test_results");
@ -22,14 +25,38 @@ immutable testDirs = ["runnable", "compilable", "fail_compilation"];
shared bool verbose; // output verbose logging
shared bool force; // always run all tests (ignores timestamp checking)
shared string hostDMD; // path to host DMD binary (used for building the tools)
shared string unitTestRunnerCommand;
void main(string[] args)
enum toolsDir = testPath("tools");
enum TestTools
{
unitTestRunner = TestTool("unit_test_runner", [toolsDir.buildPath("paths")]),
testRunner = TestTool("d_do_test"),
jsonSanitizer = TestTool("sanitize_json")
}
immutable struct TestTool
{
/// The name of the tool.
string name;
/// Extra arguments that should be supplied to the compiler when compiling the tool.
string[] extraArgs;
alias name this;
}
int main(string[] args)
{
bool runUnitTests;
int jobs = totalCPUs;
auto res = getopt(args,
std.getopt.config.passThrough,
"j|jobs", "Specifies the number of jobs (commands) to run simultaneously (default: %d)".format(totalCPUs), &jobs,
"v", "Verbose command output", (cast(bool*) &verbose),
"f", "Force run (ignore timestamps and always run all tests)", (cast(bool*) &force),
"u|unit-tests", "Runs the unit tests", &runUnitTests
);
if (res.helpWanted)
{
@ -42,11 +69,12 @@ Examples:
./run.d fail_compilation # runs all tests in fail_compilation
./run.d all # runs all tests
./run.d clean # remove all test results
./run.d -u -- unit/deinitialization.d -f Module # runs the unit tests in the file "unit/deinitialization.d" with a UDA containing "Module"
Options:
`, res.options);
"\nSee the README.md for a more in-depth explanation of the test-runner.".writeln;
return;
return 0;
}
// parse arguments
@ -56,28 +84,30 @@ Options:
// allow overwrites from the environment
resultsDir = environment.get("RESULTS_DIR", resultsDir);
hostDMD = environment.get("HOST_DMD", "dmd");
unitTestRunnerCommand = resultsDir.buildPath("unit_test_runner");
// bootstrap all needed environment variables
auto env = getEnvironment;
if (runUnitTests)
{
verifyCompilerExists(env);
ensureToolsExists(TestTools.unitTestRunner);
return spawnProcess(unitTestRunnerCommand ~ args).wait();
}
// default target
if (!args.length)
args = ["all"];
alias normalizeTestName = f => f.absolutePath.dirName.baseName.buildPath(f.baseName);
auto targets = args
.predefinedTargets // preprocess
.map!normalizeTestName
.array
.filterTargets(env);
if (targets.length > 0)
{
if (!env["DMD"].exists)
{
stderr.writefln("%s doesn't exist, try building dmd with:\nmake -fposix.mak -j8 -C%s", env["DMD"], scriptDir.dirName.relativePath);
exit(1);
}
verifyCompilerExists(env);
if (verbose)
{
@ -90,15 +120,26 @@ Options:
int ret;
auto taskPool = new TaskPool(jobs);
scope(exit) taskPool.finish();
ensureToolsExists;
ensureToolsExists(EnumMembers!TestTools);
foreach (target; taskPool.parallel(targets, 1))
{
auto args = [resultsDir.buildPath("d_do_test"), target];
log("run: %-(%s %)", args);
ret |= spawnProcess(args, env, Config.none, scriptDir).wait;
log("run: %-(%s %)", target.args);
ret |= spawnProcess(target.args, env, Config.none, scriptDir).wait;
}
if (ret)
exit(1);
return 1;
}
return 0;
}
/// Verify that the compiler has been built.
void verifyCompilerExists(string[string] env)
{
if (!env["DMD"].exists)
{
stderr.writefln("%s doesn't exist, try building dmd with:\nmake -fposix.mak -j8 -C%s", env["DMD"], scriptDir.dirName.relativePath);
exit(1);
}
}
@ -106,23 +147,24 @@ Options:
Builds the binary of the tools required by the testsuite.
Does nothing if the tools already exist and are newer than their source.
*/
void ensureToolsExists()
void ensureToolsExists(const TestTool[] tools ...)
{
static toolsDir = testPath("tools");
resultsDir.mkdirRecurse;
auto tools = [
"d_do_test",
"sanitize_json",
];
foreach (tool; tools.parallel(1))
{
auto targetBin = resultsDir.buildPath(tool).exeName;
auto sourceFile = toolsDir.buildPath(tool ~ ".d");
const targetBin = resultsDir.buildPath(tool).exeName;
const sourceFile = toolsDir.buildPath(tool ~ ".d");
if (targetBin.timeLastModified.ifThrown(SysTime.init) >= sourceFile.timeLastModified)
writefln("%s is already up-to-date", tool);
else
{
auto command = [hostDMD, "-of"~targetBin, sourceFile];
const command = [
hostDMD,
"-of"~targetBin,
sourceFile
] ~ tool.extraArgs;
writefln("Executing: %-(%s %)", command);
spawnProcess(command).wait;
}
@ -133,6 +175,45 @@ void ensureToolsExists()
resultsDir.buildPath(dir).mkdirRecurse;
}
/// A single target to execute.
immutable struct Target
{
/**
The filename of the target.
Might be `null` if the target is not for a single file.
*/
string filename;
/// The arguments how to execute the target.
string[] args;
/// Returns: the normalized test name
static string normalizedTestName(string filename)
{
return filename
.absolutePath
.dirName
.baseName
.buildPath(filename.baseName);
}
string normalizedTestName()
{
return Target.normalizedTestName(filename);
}
/// Returns: `true` if the test exists
bool exists()
{
// This is assumed to be the `unit_tests` target which always exists
if (filename.empty)
return true;
return testPath(normalizedTestName).exists;
}
}
/**
Goes through the target list and replaces short-hand targets with their expanded version.
Special targets:
@ -145,7 +226,26 @@ auto predefinedTargets(string[] targets)
return testPath(dir).dirEntries("*{.d,.sh}", SpanMode.shallow).map!(e => e.name);
}
Appender!(string[]) newTargets;
static Target createUnitTestTarget()
{
Target target = { args: [unitTestRunnerCommand] };
return target;
}
static Target createTestTarget(string filename)
{
Target target = {
filename: filename,
args: [
resultsDir.buildPath(TestTools.testRunner.name),
Target.normalizedTestName(filename)
]
};
return target;
}
Appender!(Target[]) newTargets;
foreach (t; targets)
{
t = t.buildNormalizedPath; // remove trailing slashes
@ -157,51 +257,55 @@ auto predefinedTargets(string[] targets)
break;
case "run_runnable_tests", "runnable":
newTargets.put(findFiles("runnable"));
newTargets.put(findFiles("runnable").map!createTestTarget);
break;
case "run_fail_compilation_tests", "fail_compilation", "fail":
newTargets.put(findFiles("fail_compilation"));
newTargets.put(findFiles("fail_compilation").map!createTestTarget);
break;
case "run_compilable_tests", "compilable", "compile":
newTargets.put(findFiles("compilable"));
newTargets.put(findFiles("compilable").map!createTestTarget);
break;
case "all":
newTargets ~= createUnitTestTarget();
foreach (testDir; testDirs)
newTargets.put(findFiles(testDir));
newTargets.put(findFiles(testDir).map!createTestTarget);
break;
case "unit_tests":
newTargets ~= createUnitTestTarget();
break;
default:
newTargets ~= t;
newTargets ~= createTestTarget(t);
}
}
return newTargets.data;
}
// Removes targets that do not need updating (i.e. their .out file exists and is newer than the source file)
auto filterTargets(string[] targets, string[string] env)
auto filterTargets(Target[] targets, string[string] env)
{
bool error;
foreach (target; targets)
{
if (!testPath(target).exists)
if (!target.exists)
{
writefln("Warning: %s can't be found", target);
writefln("Warning: %s can't be found", target.normalizedTestName);
error = true;
}
}
if (error)
exit(1);
string[] targetsThatNeedUpdating;
Target[] targetsThatNeedUpdating;
foreach (t; targets)
{
auto resultRunTime = resultsDir.buildPath(t ~ ".out").timeLastModified.ifThrown(SysTime.init);
if (!force && resultRunTime > testPath(t).timeLastModified &&
immutable testName = t.normalizedTestName;
auto resultRunTime = resultsDir.buildPath(testName ~ ".out").timeLastModified.ifThrown(SysTime.init);
if (!force && resultRunTime > testPath(testName).timeLastModified &&
resultRunTime > env["DMD"].timeLastModified.ifThrown(SysTime.init))
writefln("%s is already up-to-date", t);
writefln("%s is already up-to-date", testName);
else
targetsThatNeedUpdating ~= t;
}
@ -250,14 +354,18 @@ string[string] getEnvironment()
string[string] env;
env["RESULTS_DIR"] = resultsDir;
auto os = env.getDefault("OS", detectOS);
auto build = env.getDefault("BUILD", "release");
env["OS"] = os;
env["MODEL"] = model;
env["BUILD"] = build;
env["EXE"] = exeExtension;
env["DMD"] = dmdPath;
env.getDefault("DMD_TEST_COVERAGE", "0");
const generatedSuffix = "generated/%s/%s/%s".format(os, build, dmdModel);
version(Windows)
{
env.getDefault("ARGS", "-inline -release -g -O");
auto exe = env["EXE"] = ".exe";
env["OBJ"] = ".obj";
env["DSEP"] = `\\`;
env["SEP"] = `\`;
@ -265,32 +373,16 @@ string[string] getEnvironment()
auto phobosPath = environment.get("PHOBOS_PATH", testPath(`..\..\phobos`));
env["DFLAGS"] = `-I%s\import -I%s`.format(druntimePath, phobosPath);
env["LIB"] = phobosPath;
// auto-tester might run the testsuite with a different $(MODEL) than DMD
// has been compiled with. Hence we manually check which binary exists.
// For windows the $(OS) during build is: `windows`
int dmdModel = testPath(`..\generated\windows\%s\64\dmd%s`.format(build, exe)).exists ? 64 : 32;
env.getDefault("MODEL", dmdModel.text);
env["DMD"] = testPath(`..\generated\windows\%s\%d\dmd%s`.format(build, dmdModel, exe));
}
else
{
env.getDefault("ARGS", "-inline -release -g -O -fPIC");
env["EXE"] = "";
env["OBJ"] = ".o";
env["DSEP"] = "/";
env["SEP"] = "/";
auto druntimePath = environment.get("DRUNTIME_PATH", testPath(`../../druntime`));
auto phobosPath = environment.get("PHOBOS_PATH", testPath(`../../phobos`));
// auto-tester might run the testsuite with a different $(MODEL) than DMD
// has been compiled with. Hence we manually check which binary exists.
const dmdModel = testPath("../generated/%s/%s/64/dmd".format(os, build)).exists ? 64 : 32;
env.getDefault("MODEL", dmdModel.text);
auto generatedSuffix = "generated/%s/%s/%s".format(os, build, dmdModel);
env["DMD"] = testPath("../" ~ generatedSuffix ~ "/dmd");
// default to PIC on x86_64, use PIC=1/0 to en-/disable PIC.
// Note that shared libraries and C files are always compiled with PIC.
bool pic;
@ -317,35 +409,6 @@ string[string] getEnvironment()
return env;
}
/*
Detects the host OS.
Returns: a string from `{windows, osx,linux,freebsd,openbsd,netbsd,dragonflybsd,solaris}`
*/
string detectOS()
{
version(Windows)
return "windows";
else version(OSX)
return "osx";
else version(linux)
return "linux";
else version(FreeBSD)
return "freebsd";
else version(OpenBSD)
return "openbsd";
else version(NetBSD)
return "netbsd";
else version(DragonFlyBSD)
return "dragonflybsd";
else version(Solaris)
return "solaris";
else version(SunOS)
return "solaris";
else
static assert(0, "Unrecognized or unsupported OS.");
}
// Logging primitive
auto log(T...)(T args)
{

76
test/tools/paths.d Normal file
View file

@ -0,0 +1,76 @@
module tools.paths;
import std.file : exists;
import std.path : buildNormalizedPath, buildPath, dirName, setExtension;
import std.process : environment;
version (Posix)
enum exeExtension = "";
else version (Windows)
enum exeExtension = ".exe";
version (Windows)
enum os = "windows";
else version (OSX)
enum os = "osx";
else version (linux)
enum os = "linux";
else version (FreeBSD)
enum os = "freebsd";
else version (OpenBSD)
enum os = "openbsd";
else version (NetBSD)
enum os = "netbsd";
else version (DragonFlyBSD)
enum os = "dragonflybsd";
else version (Solaris)
enum os = "solaris";
else version (SunOS)
enum os = "solaris";
else
static assert(0, "Unrecognized or unsupported OS.");
enum projectRootDir = __FILE_FULL_PATH__.dirName.buildNormalizedPath("..", "..");
enum generatedDir = projectRootDir.buildPath("generated");
enum resultsDir = testPath("test_results");
enum dmdFilename = "dmd".setExtension(exeExtension);
alias testPath = path => projectRootDir.buildPath("test", path);
string build()
{
static string build;
return build = build ? build : environment.get("BUILD", "release");
}
string buildOutputPath()
{
static string buildOutputPath;
return buildOutputPath ? buildOutputPath : (buildOutputPath = generatedDir.buildPath(os, build, dmdModel));
}
// auto-tester might run the test suite with a different $(MODEL) than DMD
// has been compiled with. Hence we manually check which binary exists.
string dmdModel()
{
static string dmdModel;
if (dmdModel)
return dmdModel;
const prefix = generatedDir.buildPath(os, build);
return dmdModel = prefix.buildPath("64", dmdFilename).exists ? "64" : "32";
}
string model()
{
static string model;
return model ? model : (model = environment.get("MODEL", dmdModel));
}
string dmdPath()
{
static string dmdPath;
return dmdPath ? dmdPath : (dmdPath = buildOutputPath.buildPath(dmdFilename));
}

340
test/tools/unit_test_runner.d Executable file
View file

@ -0,0 +1,340 @@
#!/usr/bin/env rdmd
module unit_test_runner;
import std.algorithm : filter, map, joiner, substitute;
import std.array : array, join;
import std.conv : to;
import std.exception : enforce;
import std.file : dirEntries, exists, SpanMode, mkdirRecurse, write;
import std.format : format;
import std.getopt : getopt;
import std.path : absolutePath, buildPath, dirSeparator, stripExtension,
setExtension;
import std.process : environment, spawnProcess, spawnShell, wait;
import std.range : empty;
import std.stdio;
import std.string : join, outdent;
import tools.paths;
enum unitTestDir = testPath("unit");
enum strtoldObjPath = resultsDir.buildPath("strtold.obj");
string[] testFiles(Range)(Range givenFiles)
{
if (!givenFiles.empty)
return givenFiles.map!(testPath).array;
return unitTestDir
.dirEntries("*.d", SpanMode.depth)
.map!(e => e.name)
.array;
}
auto moduleNames(const string[] testFiles)
{
return testFiles
.map!(e => e[unitTestDir.length + 1 .. $])
.map!stripExtension
.array
.map!(e => e.substitute(dirSeparator, "."));
}
void writeRunnerFile(Range)(Range moduleNames, string path, string filter)
{
enum codeTemplate = q{
import core.runtime : Runtime, UnitTestResult;
import std.meta : AliasSeq;
// modules to unit test starts here:
%s
alias modules = AliasSeq!(
%s
);
enum filter = %s;
version(unittest) shared static this()
{
Runtime.extendedModuleUnitTester = &unitTestRunner;
}
UnitTestResult unitTestRunner()
{
import std.algorithm : canFind, each, map;
import std.conv : text;
import std.format : format;
import std.meta : Alias;
import std.range : empty, front, enumerate;
import std.stdio : writeln, writefln, stderr, stdout;
import std.string : join;
import std.traits : hasUDA, isCallable;
static import support;
alias TestCallback = void function();
struct Test
{
Throwable throwable;
string name;
string toString()
{
return format!"%%s\n%%s"(name, throwable);
}
string fileInfo()
{
with (throwable)
return format!"%%s:%%s"(file, line);
}
}
Test[] failedTests;
size_t testCount;
void printReport()
{
if (!failedTests.empty)
{
alias formatTest = t =>
format!"%%s) %%s"(t.index + 1, t.value.toString);
const failedTestsMessage = failedTests
.enumerate
.map!(formatTest)
.join("\n\n");
stderr.writefln!"Failures:\n\n%%s\n"(failedTestsMessage);
}
auto output = failedTests.empty ? stdout : stderr;
output.writefln!"%%s tests, %%s failures"(testCount, failedTests.length);
if (failedTests.empty)
return;
stderr.writefln!"\nFailed tests:\n%%s"(
failedTests.map!(t => t.fileInfo).join("\n"));
}
TestCallback[] getTestCallbacks(alias module_, alias uda)()
{
enum isMemberAccessible(string memberName) =
is(typeof(__traits(getMember, module_, memberName)));
TestCallback[] callbacks;
static foreach(mem ; __traits(allMembers, module_))
{
static if (isMemberAccessible!(mem))
{{
alias member = __traits(getMember, module_, mem);
static if (isCallable!member && hasUDA!(member, uda))
callbacks ~= &member;
}}
}
return callbacks;
}
void executeCallbacks(const TestCallback[] callbacks)
{
callbacks.each!(c => c());
}
static foreach (module_ ; modules)
{
foreach (unitTest ; __traits(getUnitTests, module_))
{
enum attributes = [__traits(getAttributes, unitTest)];
const beforeEachCallbacks = getTestCallbacks!(module_, support.beforeEach);
const afterEachCallbacks = getTestCallbacks!(module_, support.afterEach);
Test test;
try
{
static if (!attributes.empty)
{
test.name = attributes.front;
if (attributes.front.canFind(filter))
{
testCount++;
executeCallbacks(beforeEachCallbacks);
unitTest();
}
}
else static if (filter.length == 0)
{
testCount++;
executeCallbacks(beforeEachCallbacks);
unitTest();
}
}
catch (Throwable t)
{
test.throwable = t;
failedTests ~= test;
}
finally
executeCallbacks(afterEachCallbacks);
}
}
printReport();
UnitTestResult result = {
runMain: false,
executed: testCount,
passed: testCount - failedTests.length
};
return result;
}
}.outdent;
const imports = moduleNames
.map!(e => format!"static import %s;"(e))
.joiner("\n")
.to!string;
const modules = moduleNames
.map!(e => format!"%s"(e))
.joiner(",\n")
.to!string;
const content = format!codeTemplate(imports, modules, format!`"%s"`(filter));
write(path, content);
}
/**
Writes a cmdfile with all the compiler flags to the given `path`.
Params:
path = the path where to write the cmdfile file
runnerPath = the path of the unit test runner file outputted by `writeRunnerFile`
outputPath = the path where to place the compiled binary
testFiles = the test files to compile
*/
void writeCmdfile(string path, string runnerPath, string outputPath,
const string[] testFiles)
{
const commonFlags = [
"-version=NoBackend",
"-version=GC",
"-version=NoMain",
"-version=MARS",
"-unittest",
"-J" ~ buildOutputPath,
"-J" ~ projectRootDir.buildPath("res"),
"-I" ~ projectRootDir.buildPath("src"),
"-I" ~ unitTestDir,
"-i",
"-g",
"-main",
"-of" ~ outputPath,
"-m" ~ model
] ~ testFiles ~ runnerPath;
const flags = needsStrtold ? commonFlags ~ ("-L" ~ strtoldObjPath) : commonFlags;
write(path, flags.join("\n"));
}
/**
Returns `true` if any of the given files don't exist.
Also prints an error message.
*/
bool missingTestFiles(Range)(Range givenFiles)
{
const nonExistingTestFiles = givenFiles
.filter!(file => !file.exists)
.join("\n");
if (!nonExistingTestFiles.empty)
{
stderr.writefln("The following test files don't exist:\n\n%s",
nonExistingTestFiles);
return true;
}
return false;
}
void execute(const string[] args ...)
{
enforce(spawnProcess(args).wait() == 0,
"Failed to execute command: " ~ args.join(" "));
}
void buildStrtold()
{
if (!needsStrtold)
return;
const cmd = [
environment.get("CC", "cl"),
"/nologo",
"/EHsc",
"/TP",
"/c",
projectRootDir.buildPath("src", "dmd", "backend", "strtold.c"),
"/Fo" ~ strtoldObjPath,
"/I",
projectRootDir.buildPath("src", "dmd", "root")
].join(" ");
enforce(spawnShell(cmd).wait() == 0, "Failed to execute command: " ~ cmd);
}
bool needsStrtold()
{
version (Windows)
{
version (DigitalMars)
return model == "32mscoff" || model == "64";
return true;
}
return false;
}
int main(string[] args)
{
string unitTestFilter;
getopt(args, "filter|f", &unitTestFilter);
auto givenFiles = args[1 .. $].map!absolutePath;
if (missingTestFiles(givenFiles))
return 1;
enum runnerPath = resultsDir.buildPath("runner.d");
const testFiles = givenFiles.testFiles;
mkdirRecurse(resultsDir);
testFiles
.moduleNames
.writeRunnerFile(runnerPath, unitTestFilter);
enum cmdfilePath = resultsDir.buildPath("cmdfile");
enum outputPath = resultsDir.buildPath("runner").setExtension(exeExtension);
writeCmdfile(cmdfilePath, runnerPath, outputPath, testFiles);
buildStrtold();
execute(dmdPath, "@" ~ cmdfilePath);
return spawnProcess(outputPath).wait();
}

31
test/unit/self_test.d Normal file
View file

@ -0,0 +1,31 @@
module self_test;
import support : afterEach, beforeEach, defaultImportPaths;
@beforeEach initializeFrontend()
{
import dmd.frontend : initDMD;
initDMD();
}
@afterEach deinitializeFrontend()
{
// import dmd.frontend : deinitializeDMD;
// deinitializeDMD();
}
@("self test")
unittest
{
import std.algorithm : each;
import dmd.frontend;
defaultImportPaths.each!addImport;
auto t = parseModule("test.d", q{
int a = 3;
});
assert(!t.diagnostics.hasErrors);
assert(!t.diagnostics.hasWarnings);
}

23
test/unit/support.d Normal file
View file

@ -0,0 +1,23 @@
module support;
/// UDA used to indicate a function should be run before each test.
enum beforeEach;
/// UDA used to indicate a function should be run after each test.
enum afterEach;
/// Returns: the default import paths, i.e. for Phobos and druntime.
string[] defaultImportPaths()
{
import std.path : buildNormalizedPath, buildPath, dirName;
import std.process : environment;
enum dlangDir = __FILE_FULL_PATH__.dirName.buildNormalizedPath("..", "..", "..");
enum druntimeDir = dlangDir.buildPath("druntime", "import");
enum phobosDir = dlangDir.buildPath("phobos");
return [
environment.get("DRUNTIME_PATH", druntimeDir),
environment.get("PHOBOS_PATH", phobosDir)
];
}