Merge pull request #21202 from kinke/merge_stable

Merge stable
This commit is contained in:
Martin Kinkelin 2025-04-12 13:04:54 +02:00 committed by GitHub
commit ce6cef9762
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
22 changed files with 211 additions and 68 deletions

View file

@ -263,7 +263,6 @@ alias lexer = makeRuleWithArgs!((MethodInitializer!BuildRule builder, BuildRule
.sources(sources.lexer)
.deps([
versionFile,
sysconfDirFile,
common(suffix, extraFlags)
])
.msg("(DC) LEXER" ~ suffix)
@ -378,7 +377,7 @@ alias backend = makeRuleWithArgs!((MethodInitializer!BuildRule builder, BuildRul
).array)
);
/// Returns: the rules that generate required string files: VERSION and SYSCONFDIR.imp
/// Returns: the rule that generates string-import file `VERSION` (for the lexer)
alias versionFile = makeRule!((builder, rule) {
alias contents = memoize!(() {
if (dmdRepo.buildPath(".git").exists)
@ -411,7 +410,7 @@ alias versionFile = makeRule!((builder, rule) {
return gitResult.output.strip;
}
// version fallback
return dmdRepo.buildPath("VERSION").readText;
return dmdRepo.buildPath("VERSION").readText.strip;
});
builder
.target(env["G"].buildPath("VERSION"))
@ -420,6 +419,7 @@ alias versionFile = makeRule!((builder, rule) {
.commandFunction(() => writeText(rule.target, contents));
});
/// Returns: the rule that generates string-import file `SYSCONFDIR.imp` (for the driver)
alias sysconfDirFile = makeRule!((builder, rule) => builder
.target(env["G"].buildPath("SYSCONFDIR.imp"))
.condition(() => !rule.target.exists || rule.target.readText != env["SYSCONFDIR"])
@ -469,7 +469,7 @@ alias dmdExe = makeRuleWithArgs!((MethodInitializer!BuildRule builder, BuildRule
.sources(dmdSources.chain(lexer.targets, backend.targets, common.targets).array)
.target(env["DMD_PATH"] ~ targetSuffix)
.msg("(DC) DMD" ~ targetSuffix)
.deps([versionFile, sysconfDirFile, lexer, backend, common])
.deps([sysconfDirFile, lexer, backend, common])
.command([
env["HOST_DMD_RUN"],
"-of" ~ rule.target,
@ -646,7 +646,7 @@ alias runTests = makeRule!((testBuilder, testRule)
/// BuildRule to run the DMD unittest executable.
alias runDmdUnittest = makeRule!((builder, rule) {
auto dmdUnittestExe = dmdExe("-unittest", ["-version=NoMain", "-unittest", env["HOST_DMD_KIND"] == "gdc" ? "-fmain" : "-main"], ["-unittest"]);
auto dmdUnittestExe = dmdExe("-unittest", ["-version=NoMain", "-unittest", env["HOST_DMD_KIND"] == "gdc" ? "-fmain" : "-main"], ["-unittest"]);
builder
.name("unittest")
.description("Run the dmd unittests")
@ -757,13 +757,15 @@ alias runCxxUnittest = makeRule!((runCxxBuilder, runCxxRule) {
.name("cxx-unittest")
.description("Build the C++ unittests")
.msg("(DC) CXX-UNITTEST")
.deps([lexer(null, null), cxxFrontend])
.deps([sysconfDirFile, lexer(null, null), cxxFrontend])
.sources(sources.dmd.driver ~ sources.dmd.frontend ~ sources.root ~ sources.common ~ env["D"].buildPath("cxxfrontend.d"))
.target(env["G"].buildPath("cxx-unittest").exeName)
.command([ env["HOST_DMD_RUN"], "-of=" ~ exeRule.target, "-vtls", "-J" ~ env["RES"],
"-L-lstdc++", "-version=NoMain", "-version=NoBackend"
].chain(
flags["DFLAGS"], exeRule.sources, exeRule.deps.map!(d => d.target)
flags["DFLAGS"], exeRule.sources,
// don't compile deps[0], the SYSCONFDIR.imp string-import file
exeRule.deps[1 .. $].map!(d => d.target)
).array)
);
@ -967,7 +969,7 @@ alias html = makeRule!((htmlBuilder, htmlRule) {
.sources(sourceArray)
.target(env["DOC_OUTPUT_DIR"].buildPath(d2html(source)[srcDir.length + 1..$]
.replace(dirSeparator, "_")))
.deps([dmdDefault, versionFile, sysconfDirFile])
.deps([dmdDefault])
.command([
dmdDefault.deps[0].target,
"-o-",

View file

@ -442,7 +442,7 @@ private struct ErrorInfo
this.kind = kind;
}
const SourceLoc loc; // location of error
const SourceLoc loc; // location of error
Classification headerColor; // color to set `header` output to
const(char)* p1; // additional message prefix
const(char)* p2; // additional message prefix
@ -731,13 +731,9 @@ private void verrorPrint(const(char)* format, va_list ap, ref ErrorInfo info)
!loc.filename.startsWith(".d-mixin-") &&
!global.params.mixinOut.doOutput)
{
import dmd.root.filename : FileName;
if (auto text = cast(const(char[])) global.fileManager.getFileContents(FileName(loc.filename)))
{
tmp.reset();
printErrorLineContext(tmp, text, loc.fileOffset);
fputs(tmp.peekChars(), stderr);
}
tmp.reset();
printErrorLineContext(tmp, loc.fileContent, loc.fileOffset);
fputs(tmp.peekChars(), stderr);
}
old_loc = loc;
fflush(stderr); // ensure it gets written out in case of compiler aborts
@ -750,7 +746,7 @@ private void printErrorLineContext(ref OutBuffer buf, const(char)[] text, size_t
import dmd.root.utf : utf_decodeChar;
if (offset >= text.length)
return; // Out of bounds (can happen in pre-processed C files currently)
return; // Out of bounds (missing source content in SourceLoc)
// Scan backwards for beginning of line
size_t s = offset;

View file

@ -641,21 +641,25 @@ TupleDeclaration isAliasThisTuple(Expression e)
Type t = e.type.toBasetype();
while (true)
{
Dsymbol s = t.toDsymbol(null);
if (!s)
return null;
auto ad = s.isAggregateDeclaration();
if (!ad)
return null;
s = ad.aliasthis ? ad.aliasthis.sym : null;
if (s && s.isVarDeclaration())
if (Dsymbol s = t.toDsymbol(null))
{
TupleDeclaration td = s.isVarDeclaration().toAlias().isTupleDeclaration();
if (td && td.isexp)
return td;
if (auto ad = s.isAggregateDeclaration())
{
s = ad.aliasthis ? ad.aliasthis.sym : null;
if (s && s.isVarDeclaration())
{
TupleDeclaration td = s.isVarDeclaration().toAlias().isTupleDeclaration();
if (td && td.isexp)
return td;
}
if (Type att = t.aliasthisOf())
{
t = att;
continue;
}
}
}
if (Type att = t.aliasthisOf())
t = att;
return null;
}
}
@ -1247,6 +1251,9 @@ private Expression resolveUFCS(Scope* sc, CallExp ce)
}
else
{
if (arrayExpressionSemantic(ce.arguments.peekSlice(), sc))
return ErrorExp.get();
if (Expression ey = die.dotIdSemanticProp(sc, 1))
{
if (ey.op == EXP.error)
@ -1254,19 +1261,11 @@ private Expression resolveUFCS(Scope* sc, CallExp ce)
ce.e1 = ey;
if (isDotOpDispatch(ey))
{
// even opDispatch and UFCS must have valid arguments,
// so now that we've seen indication of a problem,
// check them for issues.
Expressions* originalArguments = Expression.arraySyntaxCopy(ce.arguments);
const errors = global.startGagging();
e = ce.expressionSemantic(sc);
if (!global.endGagging(errors))
return e;
if (arrayExpressionSemantic(originalArguments.peekSlice(), sc))
return ErrorExp.get();
/* fall down to UFCS */
}
else

View file

@ -384,12 +384,14 @@ struct SourceLoc final
uint32_t line;
uint32_t column;
uint32_t fileOffset;
_d_dynamicArray< const char > fileContent;
const char* toChars(bool showColumns = Loc::showColumns, MessageStyle messageStyle = Loc::messageStyle) const;
SourceLoc() :
filename(),
line(),
column(),
fileOffset()
fileOffset(),
fileContent()
{
}
};

View file

@ -421,6 +421,7 @@ struct SourceLoc
uint32_t line;
uint32_t column;
uint32_t fileOffset;
DString fileContent;
};
struct Loc

View file

@ -132,7 +132,7 @@ class Lexer
// debug printf("Lexer::Lexer(%p)\n", base);
// debug printf("lexer.filename = %s\n", filename);
token = Token.init;
this.baseLoc = newBaseLoc(filename, endoffset);
this.baseLoc = newBaseLoc(filename, base[0 .. endoffset]);
this.linnum = 1;
this.base = base;
this.end = base + endoffset;
@ -224,7 +224,7 @@ class Lexer
inTokenStringConstant = 0;
lastDocLine = 0;
baseLoc = newBaseLoc("#defines", slice.length);
baseLoc = newBaseLoc("#defines", slice);
scanloc = baseLoc.getLoc(0);
}

View file

@ -73,7 +73,7 @@ nothrow:
extern (C++) static Loc singleFilename(const char* filename)
{
Loc result;
locFileTable ~= new BaseLoc(filename.toDString, locIndex, 0, [0]);
locFileTable ~= new BaseLoc(filename.toDString, null, locIndex, 0, [0]);
result.index = locIndex++;
return result;
}
@ -244,16 +244,20 @@ struct SourceLoc
uint column; /// column number (starts at 1)
uint fileOffset; /// byte index into file
/// Index `fileOffset` into this to to obtain source code context of this location
const(char)[] fileContent;
// aliases for backwards compatibility
alias linnum = line;
alias charnum = column;
this(const(char)[] filename, uint line, uint column, uint fileOffset = 0) nothrow @nogc pure @safe
this(const(char)[] filename, uint line, uint column, uint fileOffset = 0, const(char)[] fileContent = null) nothrow @nogc pure @safe
{
this.filename = filename;
this.line = line;
this.column = column;
this.fileOffset = fileOffset;
this.fileContent = fileContent;
}
this(Loc loc) nothrow @nogc @trusted
@ -307,15 +311,15 @@ private size_t fileTableIndex(uint index) nothrow @nogc
* Create a new source location map for a file
* Params:
* filename = source file name
* size = space to reserve for locations, equal to the file size in bytes
* fileContent = content of source file
* Returns: new BaseLoc
*/
BaseLoc* newBaseLoc(const(char)* filename, size_t size) nothrow
BaseLoc* newBaseLoc(const(char)* filename, const(char)[] fileContent) nothrow
{
locFileTable ~= new BaseLoc(filename.toDString, locIndex, 1, [0]);
locFileTable ~= new BaseLoc(filename.toDString, fileContent, locIndex, 1, [0]);
// Careful: the endloc of a FuncDeclaration can
// point to 1 past the very last byte in the file, so account for that
locIndex += size + 1;
locIndex += fileContent.length + 1;
return locFileTable[$ - 1];
}
@ -361,6 +365,7 @@ struct BaseLoc
@safe nothrow:
const(char)[] filename; /// Source file name
const(char)[] fileContents; /// Source file contents
uint startIndex; /// Subtract this from Loc.index to get file offset
int startLine = 1; /// Line number at index 0
uint[] lines; /// For each line, the file offset at which it starts. At index 0 there's always a 0 entry.
@ -396,11 +401,11 @@ struct BaseLoc
{
auto fname = filename.toDString;
if (substitutions.length == 0)
substitutions ~= BaseLoc(this.filename, 0, 0);
substitutions ~= BaseLoc(this.filename, null, 0, 0);
if (fname.length == 0)
fname = substitutions[$ - 1].filename;
substitutions ~= BaseLoc(fname, offset, cast(int) (line - lines.length + startLine - 2));
substitutions ~= BaseLoc(fname, null, offset, cast(int) (line - lines.length + startLine - 2));
}
/// Returns: `loc` modified by substitutions from #file / #line directives
@ -420,7 +425,7 @@ struct BaseLoc
private SourceLoc getSourceLoc(uint offset) @nogc
{
const i = getLineIndex(offset);
const sl = SourceLoc(filename, cast(int) (i + startLine), cast(int) (1 + offset - lines[i]), offset);
const sl = SourceLoc(filename, cast(int) (i + startLine), cast(int) (1 + offset - lines[i]), offset, fileContents);
return substitute(sl);
}

View file

@ -3277,9 +3277,19 @@ Type merge(Type type)
case Tsarray:
// prevents generating the mangle if the array dim is not yet known
if (!type.isTypeSArray().dim.isIntegerExp())
return type;
goto default;
if (auto ie = type.isTypeSArray().dim.isIntegerExp())
{
// After TypeSemantic, the length is always converted to size_t, but the parser
// usually generates regular integer types (e.g. in cast(const ubyte[2])) which
// it may try to merge, which then leads to failing implicit conversions as 2LU != 2
// according to Expression.equals. Only merge array types with size_t lengths for now.
// https://github.com/dlang/dmd/issues/21179
if (ie.type != Type.tsize_t)
return type;
goto default;
}
return type;
case Tenum:
break;