unit test coverage

This commit is contained in:
Hackerpilot 2013-02-14 22:24:15 +00:00
parent 9c3cde0d94
commit 102836503f
1 changed files with 2593 additions and 2441 deletions

View File

@ -119,6 +119,8 @@ import std.regex;
import std.string; import std.string;
import std.traits; import std.traits;
import std.utf; import std.utf;
version (unittest) import std.stdio;
public: public:
@ -177,10 +179,10 @@ struct Token
/** /**
* Comparison operator orders tokens by start index. * Comparison operator orders tokens by start index.
*/ */
int opCmp(size_t i) const int opCmp(ref const(Token) other) const
{ {
if (startIndex < i) return -1; if (startIndex < other.startIndex) return -1;
if (startIndex > i) return 1; if (startIndex > other.startIndex) return 1;
return 0; return 0;
} }
} }
@ -946,9 +948,13 @@ private:
case '-': case '-':
case '+': case '+':
if (foundSign || foundDigit) if (foundSign || foundDigit)
{
errorMessage("Expected an exponent");
return; return;
}
foundSign = true; foundSign = true;
keepNonNewlineChar(); keepNonNewlineChar();
break;
case '0': .. case '9': case '0': .. case '9':
case '_': case '_':
foundDigit = true; foundDigit = true;
@ -961,6 +967,8 @@ private:
lexFloatSuffix(); lexFloatSuffix();
return; return;
default: default:
if (!foundDigit)
errorMessage("Expected an exponent");
return; return;
} }
} }
@ -973,7 +981,7 @@ private:
} }
body body
{ {
bool foundDot = false; bool foundDot = buffer[0] == '.';
current.type = TokenType.intLiteral; current.type = TokenType.intLiteral;
scope(exit) setTokenValue(); scope(exit) setTokenValue();
decimalLoop: while (!isEoF()) decimalLoop: while (!isEoF())
@ -996,6 +1004,8 @@ private:
lexIntSuffix(); lexIntSuffix();
return; return;
case 'i': case 'i':
lexFloatSuffix();
return;
case 'L': case 'L':
if (foundDot) if (foundDot)
{ {
@ -1866,6 +1876,14 @@ pure nothrow bool isOperator(const TokenType t)
return t >= TokenType.assign && t <= TokenType.xorEquals; return t >= TokenType.assign && t <= TokenType.xorEquals;
} }
/**
* ditto
*/
pure nothrow bool isOperator(ref const Token t)
{
return isOperator(t.type);
}
/** /**
* Returns: true if the token is a keyword * Returns: true if the token is a keyword
*/ */
@ -1874,6 +1892,14 @@ pure nothrow bool isKeyword(const TokenType t)
return t >= TokenType.bool_ && t <= TokenType.with_; return t >= TokenType.bool_ && t <= TokenType.with_;
} }
/**
* ditto
*/
pure nothrow bool isKeyword(ref const Token t)
{
return isKeyword(t.type);
}
/** /**
* Returns: true if the token is a built-in type * Returns: true if the token is a built-in type
*/ */
@ -1882,6 +1908,14 @@ pure nothrow bool isType(const TokenType t)
return t >= TokenType.bool_ && t <= TokenType.wchar_; return t >= TokenType.bool_ && t <= TokenType.wchar_;
} }
/**
* ditto
*/
pure nothrow bool isType(ref const Token t)
{
return isType(t.type);
}
/** /**
* Returns: true if the token is an attribute * Returns: true if the token is an attribute
*/ */
@ -1890,6 +1924,14 @@ pure nothrow bool isAttribute(const TokenType t)
return t >= TokenType.align_ && t <= TokenType.static_; return t >= TokenType.align_ && t <= TokenType.static_;
} }
/**
* ditto
*/
pure nothrow bool isAttribute(ref const Token t)
{
return isAttribute(t.type);
}
/** /**
* Returns: true if the token is a protection attribute * Returns: true if the token is a protection attribute
*/ */
@ -1898,6 +1940,14 @@ pure nothrow bool isProtection(const TokenType t)
return t >= TokenType.export_ && t <= TokenType.public_; return t >= TokenType.export_ && t <= TokenType.public_;
} }
/**
* ditto
*/
pure nothrow bool isProtection(ref const Token t)
{
return isProtection(t.type);
}
/** /**
* Returns: true if the token is a compile-time constant such as ___DATE__ * Returns: true if the token is a compile-time constant such as ___DATE__
*/ */
@ -1906,6 +1956,14 @@ pure nothrow bool isConstant(const TokenType t)
return t >= TokenType.date && t <= TokenType.traits; return t >= TokenType.date && t <= TokenType.traits;
} }
/**
* ditto
*/
pure nothrow bool isConstant(ref const Token t)
{
return isConstant(t.type);
}
/** /**
* Returns: true if the token is a string or number literal * Returns: true if the token is a string or number literal
*/ */
@ -1914,6 +1972,14 @@ pure nothrow bool isLiteral(const TokenType t)
return t >= TokenType.doubleLiteral && t <= TokenType.wstringLiteral; return t >= TokenType.doubleLiteral && t <= TokenType.wstringLiteral;
} }
/**
* ditto
*/
pure nothrow bool isLiteral(ref const Token t)
{
return isLiteral(t.type);
}
/** /**
* Returns: true if the token is a number literal * Returns: true if the token is a number literal
*/ */
@ -1922,6 +1988,14 @@ pure nothrow bool isNumberLiteral(const TokenType t)
return t >= TokenType.doubleLiteral && t <= TokenType.ulongLiteral; return t >= TokenType.doubleLiteral && t <= TokenType.ulongLiteral;
} }
/**
* ditto
*/
pure nothrow bool isNumberLiteral(ref const Token t)
{
return isNumberLiteral(t.type);
}
/** /**
* Returns: true if the token is a string literal * Returns: true if the token is a string literal
*/ */
@ -1930,6 +2004,14 @@ pure nothrow bool isStringLiteral(const TokenType t)
return t >= TokenType.dstringLiteral && t <= TokenType.wstringLiteral; return t >= TokenType.dstringLiteral && t <= TokenType.wstringLiteral;
} }
/**
* ditto
*/
pure nothrow bool isStringLiteral(ref const Token t)
{
return isStringLiteral(t.type);
}
/** /**
* Returns: true if the token is whitespace, a commemnt, a special token * Returns: true if the token is whitespace, a commemnt, a special token
* sequence, or an identifier * sequence, or an identifier
@ -1939,6 +2021,14 @@ pure nothrow bool isMisc(const TokenType t)
return t >= TokenType.comment && t <= TokenType.specialTokenSequence; return t >= TokenType.comment && t <= TokenType.specialTokenSequence;
} }
/**
* ditto
*/
pure nothrow bool isMisc(ref const Token t)
{
return isMisc(t.type);
}
/** /**
* Listing of all the tokens in the D language. * Listing of all the tokens in the D language.
*/ */
@ -2852,7 +2942,6 @@ unittest
unittest unittest
{ {
//import std.stdio;
auto source = cast(ubyte[]) ("=@& &=| |=~=:,--/ /=$.===>> >=++{[< <=<>=<>&&||(- -=%%=*=!!=!>!>=!<!<=!<>+ +=^^^^=}]);<< <<=>> >>=..*?~!<>=>>>>>>=...^ ^="); auto source = cast(ubyte[]) ("=@& &=| |=~=:,--/ /=$.===>> >=++{[< <=<>=<>&&||(- -=%%=*=!!=!>!>=!<!<=!<>+ +=^^^^=}]);<< <<=>> >>=..*?~!<>=>>>>>>=...^ ^=");
auto expected = ["=", "@", "&", "&=", "|", "|=", "~=", auto expected = ["=", "@", "&", "&=", "|", "|=", "~=",
":", ",", "--", "/", "/=", "$", ".", "==", ":", ",", "--", "/", "/=", "$", ".", "==",
@ -2871,22 +2960,85 @@ unittest
unittest unittest
{ {
import std.stdio; auto source = cast(ubyte[]) (`
auto source = cast(ubyte[]) (q{ 1 1.2 //comment
1 1.2 1.2f 1u 1uL 0b11 0b1u 0b1 0x11001uL 1.2f 1u 1uL 0b011 0b1uu 0b1 /+abc/+def+/+/0x11001uL
}); 123e1L 123e+1f 123e-1i 15e++ 4ea 1.2u 4i 1337L 4.2L 1..2 4.3.5.8
auto expected = ["1", "1.2", "1.2f", "1u", "1uL", "0b11", "0b1u", "0b1", `);
"0x11001uL"]; auto expected = ["1", "1.2", "1.2f", "1u", "1uL", "0b011", "0b1u", "u", "0b1",
"0x11001uL", "123e1L", "123e+1f", "123e-1i", "15e+", "+", "4e", "a",
"1.2", "u", "4i", "1337L", "4.2L", "1", "..", "2", "4.3", ".5", ".8"];
int errCount = 0;
void errorFunction(string file, size_t index, uint line, uint col, string msg)
{
++errCount;
}
LexerConfig config;
config.errorFunc = &errorFunction;
auto tokens = byToken(source, config);
//writeln(tokens.map!"a.value"().array());
assert (equal(map!"a.value"(tokens), expected));
assert (errCount == 3);
}
unittest
{
auto source = cast(ubyte[]) ("int #line 4\n double q{abcde}");
LexerConfig config; LexerConfig config;
auto tokens = byToken(source, config); auto tokens = byToken(source, config);
writeln(tokens.map!"a.value"().array()); assert (tokens.front.line == 1);
assert (equal(map!"a.value"(tokens), expected)); assert (tokens.moveFront() == TokenType.int_);
assert (tokens.front.line == 4);
assert (isType(tokens.front));
assert (tokens.front.value == "double");
tokens.popFront();
assert (tokens.front.value == "abcde");
assert (isStringLiteral(tokens.front));
}
unittest
{
auto source = cast(ubyte[]) (`"string`);
int errCount = 0;
void errorFunction(string file, size_t index, uint line, uint col, string msg)
{
++errCount;
}
LexerConfig config;
config.errorFunc = &errorFunction;
auto tokens = byToken(source, config);
assert (errCount == 1);
}
unittest
{
auto source = cast(ubyte[]) ("import foo");
LexerConfig config;
auto tokens = byToken(source, config);
Token a = tokens.moveFront();
Token b = tokens.moveFront();
assert (a != b);
assert (a != "foo");
assert (a < b);
assert (b > a);
assert (!(a > a));
assert (tokens.empty);
}
unittest
{
auto source = cast(ubyte[]) ("import std.stdio; void main(){writeln(\"hello world\");}");
LexerConfig config;
auto tokens = byToken(source, config);
int tokenCount = 0;
foreach (t; tokens)
{
++tokenCount;
}
assert (tokenCount == 16);
} }
void main(string[] args) void main(string[] args)
{ {
} }