mirror of
https://github.com/dlang/dmd.git
synced 2025-04-27 05:30:13 +03:00
add CompileEnv to lexer
This commit is contained in:
parent
77b933e7e0
commit
a986508ed0
20 changed files with 172 additions and 136 deletions
|
@ -71,9 +71,9 @@ final class CParser(AST) : Parser!AST
|
|||
|
||||
extern (D) this(TARGET)(AST.Module _module, const(char)[] input, bool doDocComment,
|
||||
ErrorSink errorSink,
|
||||
const ref TARGET target, OutBuffer* defines) scope
|
||||
const ref TARGET target, OutBuffer* defines, const CompileEnv* compileEnv) scope
|
||||
{
|
||||
super(_module, input, doDocComment, errorSink);
|
||||
super(_module, input, doDocComment, errorSink, compileEnv);
|
||||
|
||||
//printf("CParser.this()\n");
|
||||
mod = _module;
|
||||
|
|
|
@ -767,7 +767,7 @@ extern (C++) final class Module : Package
|
|||
{
|
||||
filetype = FileType.c;
|
||||
|
||||
scope p = new CParser!AST(this, buf, cast(bool) docfile, global.errorSink, target.c, &defines);
|
||||
scope p = new CParser!AST(this, buf, cast(bool) docfile, global.errorSink, target.c, &defines, &global.compileEnv);
|
||||
p.nextToken();
|
||||
checkCompiledImport();
|
||||
members = p.parseModule();
|
||||
|
@ -776,7 +776,7 @@ extern (C++) final class Module : Package
|
|||
}
|
||||
else
|
||||
{
|
||||
scope p = new Parser!AST(this, buf, cast(bool) docfile, global.errorSink);
|
||||
scope p = new Parser!AST(this, buf, cast(bool) docfile, global.errorSink, &global.compileEnv);
|
||||
p.nextToken();
|
||||
p.parseModuleDeclaration();
|
||||
md = p.md;
|
||||
|
|
|
@ -5184,7 +5184,7 @@ private void highlightCode2(Scope* sc, Dsymbols* a, ref OutBuffer buf, size_t of
|
|||
|
||||
scope Lexer lex = new Lexer(null, cast(char*)buf[].ptr, 0, buf.length - 1, 0, 1,
|
||||
global.errorSink,
|
||||
global.vendor, global.versionNumber());
|
||||
&global.compileEnv);
|
||||
OutBuffer res;
|
||||
const(char)* lastp = cast(char*)buf[].ptr;
|
||||
//printf("highlightCode2('%.*s')\n", cast(int)(buf.length - 1), buf[].ptr);
|
||||
|
|
|
@ -1933,7 +1933,7 @@ private extern(C++) final class DsymbolSemanticVisitor : Visitor
|
|||
const len = buf.length;
|
||||
buf.writeByte(0);
|
||||
const str = buf.extractSlice()[0 .. len];
|
||||
scope p = new Parser!ASTCodegen(cd.loc, sc._module, str, false, global.errorSink);
|
||||
scope p = new Parser!ASTCodegen(cd.loc, sc._module, str, false, global.errorSink, &global.compileEnv);
|
||||
p.nextToken();
|
||||
|
||||
auto d = p.parseDeclDefs(0);
|
||||
|
|
|
@ -84,9 +84,9 @@ extern(C++) void genCppHdrFiles(ref Modules ms)
|
|||
m.accept(v);
|
||||
|
||||
if (global.params.cxxhdr.fullOutput)
|
||||
buf.printf("// Automatically generated by %s Compiler v%d", global.vendor.ptr, global.versionNumber());
|
||||
buf.printf("// Automatically generated by %s Compiler v%d", global.compileEnv.vendor.ptr, global.versionNumber());
|
||||
else
|
||||
buf.printf("// Automatically generated by %s Compiler", global.vendor.ptr);
|
||||
buf.printf("// Automatically generated by %s Compiler", global.compileEnv.vendor.ptr);
|
||||
|
||||
buf.writenl();
|
||||
buf.writenl();
|
||||
|
|
|
@ -823,7 +823,7 @@ private void colorHighlightCode(ref OutBuffer buf)
|
|||
if (!errorSinkNull)
|
||||
errorSinkNull = new ErrorSinkNull;
|
||||
|
||||
scope Lexer lex = new Lexer(null, cast(char*)buf[].ptr, 0, buf.length - 1, 0, 1, errorSinkNull, global.vendor, global.versionNumber());
|
||||
scope Lexer lex = new Lexer(null, cast(char*)buf[].ptr, 0, buf.length - 1, 0, 1, errorSinkNull, &global.compileEnv);
|
||||
OutBuffer res;
|
||||
const(char)* lastp = cast(char*)buf[].ptr;
|
||||
//printf("colorHighlightCode('%.*s')\n", cast(int)(buf.length - 1), buf[].ptr);
|
||||
|
|
|
@ -6106,7 +6106,7 @@ private extern (C++) final class ExpressionSemanticVisitor : Visitor
|
|||
uint errors = global.errors;
|
||||
const len = buf.length;
|
||||
const str = buf.extractChars()[0 .. len];
|
||||
scope p = new Parser!ASTCodegen(exp.loc, sc._module, str, false, global.errorSink);
|
||||
scope p = new Parser!ASTCodegen(exp.loc, sc._module, str, false, global.errorSink, &global.compileEnv);
|
||||
p.nextToken();
|
||||
//printf("p.loc.linnum = %d\n", p.loc.linnum);
|
||||
|
||||
|
|
|
@ -3363,6 +3363,30 @@ struct Param final
|
|||
{}
|
||||
};
|
||||
|
||||
struct CompileEnv final
|
||||
{
|
||||
uint32_t versionNumber;
|
||||
_d_dynamicArray< const char > date;
|
||||
_d_dynamicArray< const char > time;
|
||||
_d_dynamicArray< const char > vendor;
|
||||
_d_dynamicArray< const char > timestamp;
|
||||
CompileEnv() :
|
||||
versionNumber(),
|
||||
date(),
|
||||
time(),
|
||||
vendor(),
|
||||
timestamp()
|
||||
{
|
||||
}
|
||||
CompileEnv(uint32_t versionNumber, _d_dynamicArray< const char > date = {}, _d_dynamicArray< const char > time = {}, _d_dynamicArray< const char > vendor = {}, _d_dynamicArray< const char > timestamp = {}) :
|
||||
versionNumber(versionNumber),
|
||||
date(date),
|
||||
time(time),
|
||||
vendor(vendor),
|
||||
timestamp(timestamp)
|
||||
{}
|
||||
};
|
||||
|
||||
struct Global final
|
||||
{
|
||||
_d_dynamicArray< const char > inifilename;
|
||||
|
@ -3370,7 +3394,7 @@ struct Global final
|
|||
_d_dynamicArray< const char > written;
|
||||
Array<const char* >* path;
|
||||
Array<const char* >* filePath;
|
||||
_d_dynamicArray< const char > vendor;
|
||||
CompileEnv compileEnv;
|
||||
Param params;
|
||||
uint32_t errors;
|
||||
uint32_t warnings;
|
||||
|
@ -3399,7 +3423,7 @@ struct Global final
|
|||
written(24, "written by Walter Bright"),
|
||||
path(),
|
||||
filePath(),
|
||||
vendor(),
|
||||
compileEnv(),
|
||||
params(),
|
||||
errors(),
|
||||
warnings(),
|
||||
|
@ -3416,13 +3440,13 @@ struct Global final
|
|||
preprocess()
|
||||
{
|
||||
}
|
||||
Global(_d_dynamicArray< const char > inifilename, _d_dynamicArray< const char > copyright = { 73, "Copyright (C) 1999-2023 by The D Language Foundation, All Rights Reserved" }, _d_dynamicArray< const char > written = { 24, "written by Walter Bright" }, Array<const char* >* path = nullptr, Array<const char* >* filePath = nullptr, _d_dynamicArray< const char > vendor = {}, Param params = Param(), uint32_t errors = 0u, uint32_t warnings = 0u, uint32_t gag = 0u, uint32_t gaggedErrors = 0u, uint32_t gaggedWarnings = 0u, void* console = nullptr, Array<Identifier* >* versionids = nullptr, Array<Identifier* >* debugids = nullptr, bool hasMainFunction = false, uint32_t varSequenceNumber = 1u, FileManager* fileManager = nullptr, ErrorSink* errorSink = nullptr, FileName(*preprocess)(FileName , const Loc& , bool& , OutBuffer* ) = nullptr) :
|
||||
Global(_d_dynamicArray< const char > inifilename, _d_dynamicArray< const char > copyright = { 73, "Copyright (C) 1999-2023 by The D Language Foundation, All Rights Reserved" }, _d_dynamicArray< const char > written = { 24, "written by Walter Bright" }, Array<const char* >* path = nullptr, Array<const char* >* filePath = nullptr, CompileEnv compileEnv = CompileEnv(), Param params = Param(), uint32_t errors = 0u, uint32_t warnings = 0u, uint32_t gag = 0u, uint32_t gaggedErrors = 0u, uint32_t gaggedWarnings = 0u, void* console = nullptr, Array<Identifier* >* versionids = nullptr, Array<Identifier* >* debugids = nullptr, bool hasMainFunction = false, uint32_t varSequenceNumber = 1u, FileManager* fileManager = nullptr, ErrorSink* errorSink = nullptr, FileName(*preprocess)(FileName , const Loc& , bool& , OutBuffer* ) = nullptr) :
|
||||
inifilename(inifilename),
|
||||
copyright(copyright),
|
||||
written(written),
|
||||
path(path),
|
||||
filePath(filePath),
|
||||
vendor(vendor),
|
||||
compileEnv(compileEnv),
|
||||
params(params),
|
||||
errors(errors),
|
||||
warnings(warnings),
|
||||
|
|
|
@ -11,7 +11,10 @@
|
|||
|
||||
module dmd.globals;
|
||||
|
||||
import core.stdc.stdio;
|
||||
import core.stdc.stdint;
|
||||
import core.stdc.string;
|
||||
|
||||
import dmd.root.array;
|
||||
import dmd.root.filename;
|
||||
import dmd.common.outbuffer;
|
||||
|
@ -20,6 +23,8 @@ import dmd.errors;
|
|||
import dmd.file_manager;
|
||||
import dmd.identifier;
|
||||
import dmd.location;
|
||||
import dmd.lexer : CompileEnv;
|
||||
import dmd.utils;
|
||||
|
||||
/// Defines a setting for how compiler warnings and deprecations are handled
|
||||
enum DiagnosticReporting : ubyte
|
||||
|
@ -270,9 +275,7 @@ extern (C++) struct Global
|
|||
Array!(const(char)*)* filePath; /// Array of char*'s which form the file import lookup path
|
||||
|
||||
private enum string _version = import("VERSION");
|
||||
private enum uint _versionNumber = parseVersionNumber(_version);
|
||||
|
||||
const(char)[] vendor; /// Compiler backend name
|
||||
CompileEnv compileEnv;
|
||||
|
||||
Param params; /// command line parameters
|
||||
uint errors; /// number of errors reported so far
|
||||
|
@ -350,12 +353,12 @@ extern (C++) struct Global
|
|||
|
||||
extern (C++) void _init()
|
||||
{
|
||||
global.errorSink = new ErrorSinkCompiler;
|
||||
errorSink = new ErrorSinkCompiler;
|
||||
|
||||
this.fileManager = new FileManager();
|
||||
version (MARS)
|
||||
{
|
||||
vendor = "Digital Mars D";
|
||||
compileEnv.vendor = "Digital Mars D";
|
||||
|
||||
// -color=auto is the default value
|
||||
import dmd.console : detectTerminal;
|
||||
|
@ -363,8 +366,38 @@ extern (C++) struct Global
|
|||
}
|
||||
else version (IN_GCC)
|
||||
{
|
||||
vendor = "GNU D";
|
||||
compileEnv.vendor = "GNU D";
|
||||
}
|
||||
compileEnv.versionNumber = parseVersionNumber(_version);
|
||||
|
||||
/* Initialize date, time, and timestamp
|
||||
*/
|
||||
import core.stdc.time;
|
||||
import core.stdc.stdlib : getenv;
|
||||
|
||||
time_t ct;
|
||||
// https://issues.dlang.org/show_bug.cgi?id=20444
|
||||
if (auto p = getenv("SOURCE_DATE_EPOCH"))
|
||||
{
|
||||
if (!ct.parseDigits(p[0 .. strlen(p)]))
|
||||
errorSink.error(Loc.initial, "value of environment variable `SOURCE_DATE_EPOCH` should be a valid UNIX timestamp, not: `%s`", p);
|
||||
}
|
||||
else
|
||||
core.stdc.time.time(&ct);
|
||||
const p = ctime(&ct);
|
||||
assert(p);
|
||||
|
||||
__gshared char[11 + 1] date = 0; // put in BSS segment
|
||||
__gshared char[8 + 1] time = 0;
|
||||
__gshared char[24 + 1] timestamp = 0;
|
||||
|
||||
const dsz = snprintf(&date[0], date.length, "%.6s %.4s", p + 4, p + 20);
|
||||
const tsz = snprintf(&time[0], time.length, "%.8s", p + 11);
|
||||
const tssz = snprintf(×tamp[0], timestamp.length, "%.24s", p);
|
||||
assert(dsz > 0 && tsz > 0 && tssz > 0);
|
||||
compileEnv.time = time[0 .. tsz];
|
||||
compileEnv.date = date[0 .. dsz];
|
||||
compileEnv.timestamp = timestamp[0 .. tssz];
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -415,7 +448,7 @@ extern (C++) struct Global
|
|||
*/
|
||||
extern(C++) uint versionNumber()
|
||||
{
|
||||
return _versionNumber;
|
||||
return compileEnv.versionNumber;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -252,6 +252,15 @@ const DString hdr_ext = "di"; // for D 'header' import files
|
|||
const DString json_ext = "json"; // for JSON files
|
||||
const DString map_ext = "map"; // for .map files
|
||||
|
||||
struct CompileEnv
|
||||
{
|
||||
uint32_t versionNumber;
|
||||
DString date;
|
||||
DString time;
|
||||
DString vendor;
|
||||
DString timestamp;
|
||||
};
|
||||
|
||||
struct Global
|
||||
{
|
||||
DString inifilename;
|
||||
|
@ -261,7 +270,7 @@ struct Global
|
|||
Array<const char *> *path; // Array of char*'s which form the import lookup path
|
||||
Array<const char *> *filePath; // Array of char*'s which form the file import lookup path
|
||||
|
||||
DString vendor; // Compiler backend name
|
||||
CompileEnv compileEnv;
|
||||
|
||||
Param params;
|
||||
unsigned errors; // number of errors reported so far
|
||||
|
|
|
@ -302,7 +302,7 @@ Ldone:
|
|||
extern (C++) public Statement gccAsmSemantic(GccAsmStatement s, Scope *sc)
|
||||
{
|
||||
//printf("GccAsmStatement.semantic()\n");
|
||||
scope p = new Parser!ASTCodegen(sc._module, ";", false, global.errorSink);
|
||||
scope p = new Parser!ASTCodegen(sc._module, ";", false, global.errorSink, &global.compileEnv);
|
||||
|
||||
// Make a safe copy of the token list before parsing.
|
||||
Token *toklist = null;
|
||||
|
@ -410,7 +410,7 @@ unittest
|
|||
{
|
||||
const errors = global.errors;
|
||||
scope gas = new GccAsmStatement(Loc.initial, tokens);
|
||||
scope p = new Parser!ASTCodegen(null, ";", false, global.errorSink);
|
||||
scope p = new Parser!ASTCodegen(null, ";", false, global.errorSink, &global.compileEnv);
|
||||
p.token = *tokens;
|
||||
p.parseGccAsm(gas);
|
||||
return global.errors - errors;
|
||||
|
@ -420,7 +420,7 @@ unittest
|
|||
static void parseAsm(string input, bool expectError)
|
||||
{
|
||||
// Generate tokens from input test.
|
||||
scope p = new Parser!ASTCodegen(null, input, false, global.errorSink);
|
||||
scope p = new Parser!ASTCodegen(null, input, false, global.errorSink, &global.compileEnv);
|
||||
p.nextToken();
|
||||
|
||||
Token* toklist = null;
|
||||
|
|
|
@ -833,7 +833,7 @@ public:
|
|||
{
|
||||
import dmd.target : target;
|
||||
objectStart();
|
||||
requiredProperty("vendor", global.vendor);
|
||||
requiredProperty("vendor", global.compileEnv.vendor);
|
||||
requiredProperty("version", global.versionString());
|
||||
property("__VERSION__", global.versionNumber());
|
||||
requiredProperty("interface", determineCompilerInterface());
|
||||
|
@ -1070,13 +1070,13 @@ Determines and returns the compiler interface which is one of `dmd`, `ldc`,
|
|||
*/
|
||||
private extern(D) string determineCompilerInterface()
|
||||
{
|
||||
if (global.vendor == "Digital Mars D")
|
||||
if (global.compileEnv.vendor == "Digital Mars D")
|
||||
return "dmd";
|
||||
if (global.vendor == "LDC")
|
||||
if (global.compileEnv.vendor == "LDC")
|
||||
return "ldc";
|
||||
if (global.vendor == "GNU D")
|
||||
if (global.compileEnv.vendor == "GNU D")
|
||||
return "gdc";
|
||||
if (global.vendor == "SDC")
|
||||
if (global.compileEnv.vendor == "SDC")
|
||||
return "sdc";
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -14,12 +14,8 @@
|
|||
module dmd.lexer;
|
||||
|
||||
import core.stdc.ctype;
|
||||
import core.stdc.errno;
|
||||
import core.stdc.stdarg;
|
||||
import core.stdc.stdio;
|
||||
import core.stdc.stdlib : getenv;
|
||||
import core.stdc.string;
|
||||
import core.stdc.time;
|
||||
|
||||
import dmd.entity;
|
||||
import dmd.errorsink;
|
||||
|
@ -31,10 +27,8 @@ import dmd.root.ctfloat;
|
|||
import dmd.common.outbuffer;
|
||||
import dmd.root.port;
|
||||
import dmd.root.rmem;
|
||||
import dmd.root.string;
|
||||
import dmd.root.utf;
|
||||
import dmd.tokens;
|
||||
import dmd.utils;
|
||||
|
||||
nothrow:
|
||||
|
||||
|
@ -43,6 +37,18 @@ version (DMDLIB)
|
|||
version = LocOffset;
|
||||
}
|
||||
|
||||
/***********************************************************
|
||||
* Values to use for various magic identifiers
|
||||
*/
|
||||
struct CompileEnv
|
||||
{
|
||||
uint versionNumber; /// __VERSION__
|
||||
const(char)[] date; /// __DATE__
|
||||
const(char)[] time; /// __TIME__
|
||||
const(char)[] vendor; /// __VENDOR__
|
||||
const(char)[] timestamp; /// __TIMESTAMP__
|
||||
}
|
||||
|
||||
/***********************************************************
|
||||
*/
|
||||
class Lexer
|
||||
|
@ -87,8 +93,8 @@ class Lexer
|
|||
int lastDocLine; // last line of previous doc comment
|
||||
|
||||
Token* tokenFreelist;
|
||||
uint versionNumber;
|
||||
const(char)[] vendor;
|
||||
|
||||
CompileEnv compileEnv; // environment
|
||||
}
|
||||
|
||||
nothrow:
|
||||
|
@ -105,13 +111,12 @@ class Lexer
|
|||
* doDocComment = handle documentation comments
|
||||
* commentToken = comments become TOK.comment's
|
||||
* errorSink = where error messages go, must not be null
|
||||
* vendor = name of the vendor
|
||||
* versionNumber = version of the caller
|
||||
* compileEnv = version, vendor, date, time, etc.
|
||||
*/
|
||||
this(const(char)* filename, const(char)* base, size_t begoffset,
|
||||
size_t endoffset, bool doDocComment, bool commentToken,
|
||||
ErrorSink errorSink,
|
||||
const(char)[] vendor = "DLF", uint versionNumber = 1) pure scope
|
||||
const CompileEnv* compileEnv) pure scope
|
||||
{
|
||||
scanloc = Loc(filename, 1, 1);
|
||||
// debug printf("Lexer::Lexer(%p)\n", base);
|
||||
|
@ -128,8 +133,13 @@ class Lexer
|
|||
this.lastDocLine = 0;
|
||||
this.eSink = errorSink;
|
||||
assert(errorSink);
|
||||
this.versionNumber = versionNumber;
|
||||
this.vendor = vendor;
|
||||
if (compileEnv)
|
||||
this.compileEnv = *compileEnv;
|
||||
else
|
||||
{
|
||||
this.compileEnv.versionNumber = 1;
|
||||
this.compileEnv.vendor = "DLF";
|
||||
}
|
||||
//initKeywords();
|
||||
/* If first line starts with '#!', ignore the line
|
||||
*/
|
||||
|
@ -169,10 +179,10 @@ class Lexer
|
|||
*/
|
||||
this(const(char)* filename, const(char)* base, size_t begoffset, size_t endoffset,
|
||||
bool doDocComment, bool commentToken, bool whitespaceToken,
|
||||
ErrorSink errorSink
|
||||
ErrorSink errorSink, const CompileEnv* compileEnv = null
|
||||
)
|
||||
{
|
||||
this(filename, base, begoffset, endoffset, doDocComment, commentToken, errorSink);
|
||||
this(filename, base, begoffset, endoffset, doDocComment, commentToken, errorSink, compileEnv);
|
||||
this.whitespaceToken = whitespaceToken;
|
||||
}
|
||||
|
||||
|
@ -571,36 +581,26 @@ class Lexer
|
|||
|
||||
else if (*t.ptr == '_') // if special identifier token
|
||||
{
|
||||
// Lazy initialization
|
||||
TimeStampInfo.initialize(t.loc, eSink);
|
||||
void toToken(const(char)[] s)
|
||||
{
|
||||
t.value = TOK.string_;
|
||||
t.ustring = s.ptr;
|
||||
t.len = cast(uint)s.length;
|
||||
t.postfix = 0;
|
||||
}
|
||||
|
||||
if (id == Id.DATE)
|
||||
{
|
||||
t.ustring = TimeStampInfo.date.ptr;
|
||||
goto Lstr;
|
||||
}
|
||||
toToken(compileEnv.date);
|
||||
else if (id == Id.TIME)
|
||||
{
|
||||
t.ustring = TimeStampInfo.time.ptr;
|
||||
goto Lstr;
|
||||
}
|
||||
toToken(compileEnv.time);
|
||||
else if (id == Id.VENDOR)
|
||||
{
|
||||
t.ustring = vendor.xarraydup.ptr;
|
||||
goto Lstr;
|
||||
}
|
||||
toToken(compileEnv.vendor);
|
||||
else if (id == Id.TIMESTAMP)
|
||||
{
|
||||
t.ustring = TimeStampInfo.timestamp.ptr;
|
||||
Lstr:
|
||||
t.value = TOK.string_;
|
||||
t.postfix = 0;
|
||||
t.len = cast(uint)strlen(t.ustring);
|
||||
}
|
||||
toToken(compileEnv.timestamp);
|
||||
else if (id == Id.VERSIONX)
|
||||
{
|
||||
t.value = TOK.int64Literal;
|
||||
t.unsvalue = versionNumber;
|
||||
t.unsvalue = compileEnv.versionNumber;
|
||||
}
|
||||
else if (id == Id.EOFX)
|
||||
{
|
||||
|
@ -3019,7 +3019,10 @@ class Lexer
|
|||
auto dc = (lineComment && anyToken) ? &t.lineComment : &t.blockComment;
|
||||
// Combine with previous doc comment, if any
|
||||
if (*dc)
|
||||
*dc = combineComments(*dc, buf[], newParagraph).toDString();
|
||||
{
|
||||
auto p = combineComments(*dc, buf[], newParagraph);
|
||||
*dc = p ? p[0 .. strlen(p)] : null;
|
||||
}
|
||||
else
|
||||
*dc = buf.extractSlice(true);
|
||||
}
|
||||
|
@ -3067,42 +3070,6 @@ class Lexer
|
|||
|
||||
private:
|
||||
|
||||
/// Support for `__DATE__`, `__TIME__`, and `__TIMESTAMP__`
|
||||
private struct TimeStampInfo
|
||||
{
|
||||
private __gshared bool initdone = false;
|
||||
|
||||
// Note: Those properties need to be guarded by a call to `init`
|
||||
// The API isn't safe, and quite brittle, but it was left this way
|
||||
// over performance concerns.
|
||||
// This is currently only called once, from the lexer.
|
||||
__gshared char[11 + 1] date;
|
||||
__gshared char[8 + 1] time;
|
||||
__gshared char[24 + 1] timestamp;
|
||||
|
||||
public static void initialize(const ref Loc loc, ErrorSink eSink) nothrow
|
||||
{
|
||||
if (initdone)
|
||||
return;
|
||||
|
||||
initdone = true;
|
||||
time_t ct;
|
||||
// https://issues.dlang.org/show_bug.cgi?id=20444
|
||||
if (auto p = getenv("SOURCE_DATE_EPOCH"))
|
||||
{
|
||||
if (!ct.parseDigits(p.toDString()))
|
||||
eSink.error(loc, "value of environment variable `SOURCE_DATE_EPOCH` should be a valid UNIX timestamp, not: `%s`", p);
|
||||
}
|
||||
else
|
||||
.time(&ct);
|
||||
const p = ctime(&ct);
|
||||
assert(p);
|
||||
snprintf(&date[0], date.length, "%.6s %.4s", p + 4, p + 20);
|
||||
snprintf(&time[0], time.length, "%.8s", p + 11);
|
||||
snprintf(×tamp[0], timestamp.length, "%.24s", p);
|
||||
}
|
||||
}
|
||||
|
||||
private enum LS = 0x2028; // UTF line separator
|
||||
private enum PS = 0x2029; // UTF paragraph separator
|
||||
|
||||
|
@ -3352,7 +3319,7 @@ unittest
|
|||
*/
|
||||
string text = "int"; // We rely on the implicit null-terminator
|
||||
ErrorSink errorSink = new ErrorSinkStderr;
|
||||
scope Lexer lex1 = new Lexer(null, text.ptr, 0, text.length, false, false, errorSink);
|
||||
scope Lexer lex1 = new Lexer(null, text.ptr, 0, text.length, false, false, errorSink, null);
|
||||
TOK tok;
|
||||
tok = lex1.nextToken();
|
||||
//printf("tok == %s, %d, %d\n", Token::toChars(tok), tok, TOK.int32);
|
||||
|
@ -3388,7 +3355,7 @@ unittest
|
|||
|
||||
foreach (testcase; testcases)
|
||||
{
|
||||
scope Lexer lex2 = new Lexer(null, testcase.ptr, 0, testcase.length-1, false, false, errorSink);
|
||||
scope Lexer lex2 = new Lexer(null, testcase.ptr, 0, testcase.length-1, false, false, errorSink, null);
|
||||
TOK tok = lex2.nextToken();
|
||||
size_t iterations = 1;
|
||||
while ((tok != TOK.endOfFile) && (iterations++ < testcase.length))
|
||||
|
|
|
@ -30,6 +30,8 @@ import dmd.root.rootobject;
|
|||
import dmd.root.string;
|
||||
import dmd.tokens;
|
||||
|
||||
alias CompileEnv = dmd.lexer.CompileEnv;
|
||||
|
||||
/***********************************************************
|
||||
*/
|
||||
class Parser(AST, Lexer = dmd.lexer.Lexer) : Lexer
|
||||
|
@ -53,11 +55,11 @@ class Parser(AST, Lexer = dmd.lexer.Lexer) : Lexer
|
|||
* loc location in source file of mixin
|
||||
*/
|
||||
extern (D) this(const ref Loc loc, AST.Module _module, const(char)[] input, bool doDocComment,
|
||||
ErrorSink errorSink) scope
|
||||
ErrorSink errorSink, const CompileEnv* compileEnv) scope
|
||||
{
|
||||
super(_module ? _module.srcfile.toChars() : null, input.ptr, 0, input.length, doDocComment, false,
|
||||
errorSink,
|
||||
global.vendor, global.versionNumber());
|
||||
compileEnv);
|
||||
|
||||
//printf("Parser::Parser()\n");
|
||||
scanloc = loc;
|
||||
|
@ -78,11 +80,12 @@ class Parser(AST, Lexer = dmd.lexer.Lexer) : Lexer
|
|||
//nextToken(); // start up the scanner
|
||||
}
|
||||
|
||||
extern (D) this(AST.Module _module, const(char)[] input, bool doDocComment, ErrorSink errorSink) scope
|
||||
extern (D) this(AST.Module _module, const(char)[] input, bool doDocComment, ErrorSink errorSink,
|
||||
const CompileEnv* compileEnv) scope
|
||||
{
|
||||
super(_module ? _module.srcfile.toChars() : null, input.ptr, 0, input.length, doDocComment, false,
|
||||
errorSink,
|
||||
global.vendor, global.versionNumber());
|
||||
compileEnv);
|
||||
|
||||
//printf("Parser::Parser()\n");
|
||||
mod = _module;
|
||||
|
|
|
@ -4751,7 +4751,7 @@ private Statements* flatten(Statement statement, Scope* sc)
|
|||
const len = buf.length;
|
||||
buf.writeByte(0);
|
||||
const str = buf.extractSlice()[0 .. len];
|
||||
scope p = new Parser!ASTCodegen(cs.loc, sc._module, str, false, global.errorSink);
|
||||
scope p = new Parser!ASTCodegen(cs.loc, sc._module, str, false, global.errorSink, &global.compileEnv);
|
||||
p.nextToken();
|
||||
|
||||
auto a = new Statements();
|
||||
|
|
|
@ -4918,7 +4918,7 @@ RootObject compileTypeMixin(TypeMixin tm, Loc loc, Scope* sc)
|
|||
const len = buf.length;
|
||||
buf.writeByte(0);
|
||||
const str = buf.extractSlice()[0 .. len];
|
||||
scope p = new Parser!ASTCodegen(loc, sc._module, str, false, global.errorSink);
|
||||
scope p = new Parser!ASTCodegen(loc, sc._module, str, false, global.errorSink, &global.compileEnv);
|
||||
p.nextToken();
|
||||
//printf("p.loc.linnum = %d\n", p.loc.linnum);
|
||||
|
||||
|
|
|
@ -68,7 +68,7 @@ static void frontend_init()
|
|||
gc_disable();
|
||||
|
||||
global._init();
|
||||
global.vendor = "Front-End Tester";
|
||||
global.compileEnv.vendor = "Front-End Tester";
|
||||
global.params.objname = NULL;
|
||||
|
||||
target.os = Target::OS_linux;
|
||||
|
|
|
@ -64,7 +64,7 @@ private void lexUntilEndOfFile(string code)
|
|||
|
||||
if (!global.errorSink)
|
||||
global.errorSink = new ErrorSinkCompiler;
|
||||
scope lexer = new Lexer("test", code.ptr, 0, code.length, 0, 0, global.errorSink);
|
||||
scope lexer = new Lexer("test", code.ptr, 0, code.length, 0, 0, global.errorSink, null);
|
||||
lexer.nextToken;
|
||||
|
||||
while (lexer.nextToken != TOK.endOfFile) {}
|
||||
|
|
|
@ -170,7 +170,7 @@ unittest
|
|||
TOK.rightCurly,
|
||||
];
|
||||
|
||||
Lexer lexer = new Lexer(null, code.ptr, 0, code.length, false, false, new ErrorSinkStderr);
|
||||
Lexer lexer = new Lexer(null, code.ptr, 0, code.length, false, false, new ErrorSinkStderr, null);
|
||||
lexer.nextToken;
|
||||
|
||||
TOK[] result;
|
||||
|
@ -191,7 +191,7 @@ unittest
|
|||
TOK.comment,
|
||||
];
|
||||
|
||||
Lexer lexer = new Lexer(null, code.ptr, 0, code.length, false, true, new ErrorSinkStderr);
|
||||
Lexer lexer = new Lexer(null, code.ptr, 0, code.length, false, true, new ErrorSinkStderr, null);
|
||||
lexer.nextToken;
|
||||
|
||||
TOK[] result;
|
||||
|
@ -217,7 +217,7 @@ unittest
|
|||
TOK.reserved,
|
||||
];
|
||||
|
||||
Lexer lexer = new Lexer(null, code.ptr, 0, code.length, false, false, new ErrorSinkStderr);
|
||||
Lexer lexer = new Lexer(null, code.ptr, 0, code.length, false, false, new ErrorSinkStderr, null);
|
||||
|
||||
TOK[] result;
|
||||
|
||||
|
@ -266,7 +266,7 @@ unittest
|
|||
foreach (codeNum, code; codes)
|
||||
{
|
||||
auto fileName = text("file", codeNum, '\0');
|
||||
Lexer lexer = new Lexer(fileName.ptr, code.ptr, 0, code.length, false, false, new ErrorSinkCompiler);
|
||||
Lexer lexer = new Lexer(fileName.ptr, code.ptr, 0, code.length, false, false, new ErrorSinkCompiler, null);
|
||||
// Generate the errors
|
||||
foreach(unused; lexer){}
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@ unittest
|
|||
{
|
||||
enum code = "token";
|
||||
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, 0, new ErrorSinkStderr);
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, 0, new ErrorSinkStderr, null);
|
||||
|
||||
lexer.nextToken;
|
||||
|
||||
|
@ -29,7 +29,7 @@ unittest
|
|||
{
|
||||
enum code = "ignored_token token";
|
||||
|
||||
scope lexer = new Lexer("test.d", code.ptr, 13, code.length - 14, 0, 0, new ErrorSinkStderr);
|
||||
scope lexer = new Lexer("test.d", code.ptr, 13, code.length - 14, 0, 0, new ErrorSinkStderr, null);
|
||||
|
||||
lexer.nextToken;
|
||||
|
||||
|
@ -41,7 +41,7 @@ unittest
|
|||
{
|
||||
enum code = "token1 token2 3";
|
||||
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, 0, new ErrorSinkStderr);
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, 0, new ErrorSinkStderr, null);
|
||||
|
||||
lexer.nextToken;
|
||||
lexer.nextToken;
|
||||
|
@ -55,7 +55,7 @@ unittest
|
|||
{
|
||||
enum code = "token";
|
||||
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, 0, new ErrorSinkStderr);
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, 0, new ErrorSinkStderr, null);
|
||||
|
||||
lexer.nextToken;
|
||||
lexer.nextToken;
|
||||
|
@ -68,7 +68,7 @@ unittest
|
|||
{
|
||||
enum code = "/* comment */";
|
||||
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, true, new ErrorSinkStderr);
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, true, new ErrorSinkStderr, null);
|
||||
|
||||
lexer.nextToken;
|
||||
|
||||
|
@ -81,7 +81,7 @@ unittest
|
|||
{
|
||||
enum code = "// comment";
|
||||
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, true, new ErrorSinkStderr);
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, true, new ErrorSinkStderr, null);
|
||||
|
||||
lexer.nextToken;
|
||||
|
||||
|
@ -94,7 +94,7 @@ unittest
|
|||
{
|
||||
enum code = "/+ comment +/";
|
||||
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, true, new ErrorSinkStderr);
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, true, new ErrorSinkStderr, null);
|
||||
|
||||
lexer.nextToken;
|
||||
|
||||
|
@ -107,7 +107,7 @@ unittest
|
|||
{
|
||||
enum code = "/* comment */ token";
|
||||
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, 0, new ErrorSinkStderr);
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, 0, new ErrorSinkStderr, null);
|
||||
|
||||
lexer.nextToken;
|
||||
|
||||
|
@ -119,7 +119,7 @@ unittest
|
|||
{
|
||||
enum code = "// comment\ntoken";
|
||||
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, 0, new ErrorSinkStderr);
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, 0, new ErrorSinkStderr, null);
|
||||
|
||||
lexer.nextToken;
|
||||
|
||||
|
@ -131,7 +131,7 @@ unittest
|
|||
{
|
||||
enum code = "/+ comment +/ token";
|
||||
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, 0, new ErrorSinkStderr);
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, 0, new ErrorSinkStderr, null);
|
||||
|
||||
lexer.nextToken;
|
||||
|
||||
|
@ -143,7 +143,7 @@ unittest
|
|||
{
|
||||
enum code = "line\ntoken";
|
||||
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, 0, new ErrorSinkStderr);
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, 0, new ErrorSinkStderr, null);
|
||||
|
||||
lexer.nextToken;
|
||||
lexer.nextToken;
|
||||
|
@ -156,7 +156,7 @@ unittest
|
|||
{
|
||||
enum code = "line\r\ntoken";
|
||||
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, 0, new ErrorSinkStderr);
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, 0, new ErrorSinkStderr, null);
|
||||
|
||||
lexer.nextToken;
|
||||
lexer.nextToken;
|
||||
|
@ -169,7 +169,7 @@ unittest
|
|||
{
|
||||
enum code = "line\rtoken";
|
||||
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, 0, new ErrorSinkStderr);
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, 0, new ErrorSinkStderr, null);
|
||||
|
||||
lexer.nextToken;
|
||||
lexer.nextToken;
|
||||
|
@ -182,7 +182,7 @@ unittest
|
|||
{
|
||||
enum code = "'🍺'";
|
||||
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, 0, new ErrorSinkStderr);
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, 0, new ErrorSinkStderr, null);
|
||||
|
||||
lexer.nextToken;
|
||||
|
||||
|
@ -194,7 +194,7 @@ unittest
|
|||
{
|
||||
enum code = `"🍺🍺"`;
|
||||
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, 0, new ErrorSinkStderr);
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, 0, new ErrorSinkStderr, null);
|
||||
|
||||
lexer.nextToken;
|
||||
|
||||
|
@ -206,7 +206,7 @@ unittest
|
|||
{
|
||||
enum code = "'🍺' token";
|
||||
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, 0, new ErrorSinkStderr);
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, 0, new ErrorSinkStderr, null);
|
||||
|
||||
lexer.nextToken;
|
||||
lexer.nextToken;
|
||||
|
@ -219,7 +219,7 @@ unittest
|
|||
{
|
||||
enum code = `"🍺🍺" token`;
|
||||
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, 0, new ErrorSinkStderr);
|
||||
scope lexer = new Lexer("test.d", code.ptr, 0, code.length, 0, 0, new ErrorSinkStderr, null);
|
||||
|
||||
lexer.nextToken;
|
||||
lexer.nextToken;
|
||||
|
@ -558,7 +558,7 @@ static foreach (tok; __traits(allMembers, TOK))
|
|||
{
|
||||
const newCode = "first_token " ~ tests[tok].code;
|
||||
|
||||
scope lexer = new Lexer("test.d", newCode.ptr, 0, newCode.length, 0, 0, new ErrorSinkStderr);
|
||||
scope lexer = new Lexer("test.d", newCode.ptr, 0, newCode.length, 0, 0, new ErrorSinkStderr, null);
|
||||
|
||||
lexer.nextToken;
|
||||
lexer.nextToken;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue