Merge remote-tracking branch 'origin/stable'

Conflicts:
	compiler/src/dmd/declaration.h
	compiler/src/dmd/frontend.h
	compiler/src/dmd/globals.h
	compiler/src/dmd/typesem.d
	compiler/src/tests/cxxfrontend.cc
	compiler/test/fail_compilation/fail347.d
This commit is contained in:
Martin Kinkelin 2025-04-11 00:02:30 +02:00 committed by Martin Kinkelin
commit 58d065f935
20 changed files with 197 additions and 67 deletions

View file

@ -263,7 +263,6 @@ alias lexer = makeRuleWithArgs!((MethodInitializer!BuildRule builder, BuildRule
.sources(sources.lexer) .sources(sources.lexer)
.deps([ .deps([
versionFile, versionFile,
sysconfDirFile,
common(suffix, extraFlags) common(suffix, extraFlags)
]) ])
.msg("(DC) LEXER" ~ suffix) .msg("(DC) LEXER" ~ suffix)
@ -378,7 +377,7 @@ alias backend = makeRuleWithArgs!((MethodInitializer!BuildRule builder, BuildRul
).array) ).array)
); );
/// Returns: the rules that generate required string files: VERSION and SYSCONFDIR.imp /// Returns: the rule that generates string-import file `VERSION` (for the lexer)
alias versionFile = makeRule!((builder, rule) { alias versionFile = makeRule!((builder, rule) {
alias contents = memoize!(() { alias contents = memoize!(() {
if (dmdRepo.buildPath(".git").exists) if (dmdRepo.buildPath(".git").exists)
@ -411,7 +410,7 @@ alias versionFile = makeRule!((builder, rule) {
return gitResult.output.strip; return gitResult.output.strip;
} }
// version fallback // version fallback
return dmdRepo.buildPath("VERSION").readText; return dmdRepo.buildPath("VERSION").readText.strip;
}); });
builder builder
.target(env["G"].buildPath("VERSION")) .target(env["G"].buildPath("VERSION"))
@ -420,6 +419,7 @@ alias versionFile = makeRule!((builder, rule) {
.commandFunction(() => writeText(rule.target, contents)); .commandFunction(() => writeText(rule.target, contents));
}); });
/// Returns: the rule that generates string-import file `SYSCONFDIR.imp` (for the driver)
alias sysconfDirFile = makeRule!((builder, rule) => builder alias sysconfDirFile = makeRule!((builder, rule) => builder
.target(env["G"].buildPath("SYSCONFDIR.imp")) .target(env["G"].buildPath("SYSCONFDIR.imp"))
.condition(() => !rule.target.exists || rule.target.readText != env["SYSCONFDIR"]) .condition(() => !rule.target.exists || rule.target.readText != env["SYSCONFDIR"])
@ -469,7 +469,7 @@ alias dmdExe = makeRuleWithArgs!((MethodInitializer!BuildRule builder, BuildRule
.sources(dmdSources.chain(lexer.targets, backend.targets, common.targets).array) .sources(dmdSources.chain(lexer.targets, backend.targets, common.targets).array)
.target(env["DMD_PATH"] ~ targetSuffix) .target(env["DMD_PATH"] ~ targetSuffix)
.msg("(DC) DMD" ~ targetSuffix) .msg("(DC) DMD" ~ targetSuffix)
.deps([versionFile, sysconfDirFile, lexer, backend, common]) .deps([sysconfDirFile, lexer, backend, common])
.command([ .command([
env["HOST_DMD_RUN"], env["HOST_DMD_RUN"],
"-of" ~ rule.target, "-of" ~ rule.target,
@ -646,7 +646,7 @@ alias runTests = makeRule!((testBuilder, testRule)
/// BuildRule to run the DMD unittest executable. /// BuildRule to run the DMD unittest executable.
alias runDmdUnittest = makeRule!((builder, rule) { alias runDmdUnittest = makeRule!((builder, rule) {
auto dmdUnittestExe = dmdExe("-unittest", ["-version=NoMain", "-unittest", env["HOST_DMD_KIND"] == "gdc" ? "-fmain" : "-main"], ["-unittest"]); auto dmdUnittestExe = dmdExe("-unittest", ["-version=NoMain", "-unittest", env["HOST_DMD_KIND"] == "gdc" ? "-fmain" : "-main"], ["-unittest"]);
builder builder
.name("unittest") .name("unittest")
.description("Run the dmd unittests") .description("Run the dmd unittests")
@ -757,13 +757,15 @@ alias runCxxUnittest = makeRule!((runCxxBuilder, runCxxRule) {
.name("cxx-unittest") .name("cxx-unittest")
.description("Build the C++ unittests") .description("Build the C++ unittests")
.msg("(DC) CXX-UNITTEST") .msg("(DC) CXX-UNITTEST")
.deps([lexer(null, null), cxxFrontend]) .deps([sysconfDirFile, lexer(null, null), cxxFrontend])
.sources(sources.dmd.driver ~ sources.dmd.frontend ~ sources.root ~ sources.common ~ env["D"].buildPath("cxxfrontend.d")) .sources(sources.dmd.driver ~ sources.dmd.frontend ~ sources.root ~ sources.common ~ env["D"].buildPath("cxxfrontend.d"))
.target(env["G"].buildPath("cxx-unittest").exeName) .target(env["G"].buildPath("cxx-unittest").exeName)
.command([ env["HOST_DMD_RUN"], "-of=" ~ exeRule.target, "-vtls", "-J" ~ env["RES"], .command([ env["HOST_DMD_RUN"], "-of=" ~ exeRule.target, "-vtls", "-J" ~ env["RES"],
"-L-lstdc++", "-version=NoMain", "-version=NoBackend" "-L-lstdc++", "-version=NoMain", "-version=NoBackend"
].chain( ].chain(
flags["DFLAGS"], exeRule.sources, exeRule.deps.map!(d => d.target) flags["DFLAGS"], exeRule.sources,
// don't compile deps[0], the SYSCONFDIR.imp string-import file
exeRule.deps[1 .. $].map!(d => d.target)
).array) ).array)
); );
@ -967,7 +969,7 @@ alias html = makeRule!((htmlBuilder, htmlRule) {
.sources(sourceArray) .sources(sourceArray)
.target(env["DOC_OUTPUT_DIR"].buildPath(d2html(source)[srcDir.length + 1..$] .target(env["DOC_OUTPUT_DIR"].buildPath(d2html(source)[srcDir.length + 1..$]
.replace(dirSeparator, "_"))) .replace(dirSeparator, "_")))
.deps([dmdDefault, versionFile, sysconfDirFile]) .deps([dmdDefault])
.command([ .command([
dmdDefault.deps[0].target, dmdDefault.deps[0].target,
"-o-", "-o-",

View file

@ -442,7 +442,7 @@ private struct ErrorInfo
this.kind = kind; this.kind = kind;
} }
const SourceLoc loc; // location of error const SourceLoc loc; // location of error
Classification headerColor; // color to set `header` output to Classification headerColor; // color to set `header` output to
const(char)* p1; // additional message prefix const(char)* p1; // additional message prefix
const(char)* p2; // additional message prefix const(char)* p2; // additional message prefix
@ -731,13 +731,9 @@ private void verrorPrint(const(char)* format, va_list ap, ref ErrorInfo info)
!loc.filename.startsWith(".d-mixin-") && !loc.filename.startsWith(".d-mixin-") &&
!global.params.mixinOut.doOutput) !global.params.mixinOut.doOutput)
{ {
import dmd.root.filename : FileName; tmp.reset();
if (auto text = cast(const(char[])) global.fileManager.getFileContents(FileName(loc.filename))) printErrorLineContext(tmp, loc.fileContent, loc.fileOffset);
{ fputs(tmp.peekChars(), stderr);
tmp.reset();
printErrorLineContext(tmp, text, loc.fileOffset);
fputs(tmp.peekChars(), stderr);
}
} }
old_loc = loc; old_loc = loc;
fflush(stderr); // ensure it gets written out in case of compiler aborts fflush(stderr); // ensure it gets written out in case of compiler aborts
@ -750,7 +746,7 @@ private void printErrorLineContext(ref OutBuffer buf, const(char)[] text, size_t
import dmd.root.utf : utf_decodeChar; import dmd.root.utf : utf_decodeChar;
if (offset >= text.length) if (offset >= text.length)
return; // Out of bounds (can happen in pre-processed C files currently) return; // Out of bounds (missing source content in SourceLoc)
// Scan backwards for beginning of line // Scan backwards for beginning of line
size_t s = offset; size_t s = offset;

View file

@ -641,21 +641,25 @@ TupleDeclaration isAliasThisTuple(Expression e)
Type t = e.type.toBasetype(); Type t = e.type.toBasetype();
while (true) while (true)
{ {
Dsymbol s = t.toDsymbol(null); if (Dsymbol s = t.toDsymbol(null))
if (!s)
return null;
auto ad = s.isAggregateDeclaration();
if (!ad)
return null;
s = ad.aliasthis ? ad.aliasthis.sym : null;
if (s && s.isVarDeclaration())
{ {
TupleDeclaration td = s.isVarDeclaration().toAlias().isTupleDeclaration(); if (auto ad = s.isAggregateDeclaration())
if (td && td.isexp) {
return td; s = ad.aliasthis ? ad.aliasthis.sym : null;
if (s && s.isVarDeclaration())
{
TupleDeclaration td = s.isVarDeclaration().toAlias().isTupleDeclaration();
if (td && td.isexp)
return td;
}
if (Type att = t.aliasthisOf())
{
t = att;
continue;
}
}
} }
if (Type att = t.aliasthisOf()) return null;
t = att;
} }
} }
@ -1247,6 +1251,9 @@ private Expression resolveUFCS(Scope* sc, CallExp ce)
} }
else else
{ {
if (arrayExpressionSemantic(ce.arguments.peekSlice(), sc))
return ErrorExp.get();
if (Expression ey = die.dotIdSemanticProp(sc, 1)) if (Expression ey = die.dotIdSemanticProp(sc, 1))
{ {
if (ey.op == EXP.error) if (ey.op == EXP.error)
@ -1254,19 +1261,11 @@ private Expression resolveUFCS(Scope* sc, CallExp ce)
ce.e1 = ey; ce.e1 = ey;
if (isDotOpDispatch(ey)) if (isDotOpDispatch(ey))
{ {
// even opDispatch and UFCS must have valid arguments,
// so now that we've seen indication of a problem,
// check them for issues.
Expressions* originalArguments = Expression.arraySyntaxCopy(ce.arguments);
const errors = global.startGagging(); const errors = global.startGagging();
e = ce.expressionSemantic(sc); e = ce.expressionSemantic(sc);
if (!global.endGagging(errors)) if (!global.endGagging(errors))
return e; return e;
if (arrayExpressionSemantic(originalArguments.peekSlice(), sc))
return ErrorExp.get();
/* fall down to UFCS */ /* fall down to UFCS */
} }
else else

View file

@ -384,12 +384,14 @@ struct SourceLoc final
uint32_t line; uint32_t line;
uint32_t column; uint32_t column;
uint32_t fileOffset; uint32_t fileOffset;
_d_dynamicArray< const char > fileContent;
const char* toChars(bool showColumns = Loc::showColumns, MessageStyle messageStyle = Loc::messageStyle) const; const char* toChars(bool showColumns = Loc::showColumns, MessageStyle messageStyle = Loc::messageStyle) const;
SourceLoc() : SourceLoc() :
filename(), filename(),
line(), line(),
column(), column(),
fileOffset() fileOffset(),
fileContent()
{ {
} }
}; };

View file

@ -421,6 +421,7 @@ struct SourceLoc
uint32_t line; uint32_t line;
uint32_t column; uint32_t column;
uint32_t fileOffset; uint32_t fileOffset;
DString fileContent;
}; };
struct Loc struct Loc

View file

@ -132,7 +132,7 @@ class Lexer
// debug printf("Lexer::Lexer(%p)\n", base); // debug printf("Lexer::Lexer(%p)\n", base);
// debug printf("lexer.filename = %s\n", filename); // debug printf("lexer.filename = %s\n", filename);
token = Token.init; token = Token.init;
this.baseLoc = newBaseLoc(filename, endoffset); this.baseLoc = newBaseLoc(filename, base[0 .. endoffset]);
this.linnum = 1; this.linnum = 1;
this.base = base; this.base = base;
this.end = base + endoffset; this.end = base + endoffset;
@ -224,7 +224,7 @@ class Lexer
inTokenStringConstant = 0; inTokenStringConstant = 0;
lastDocLine = 0; lastDocLine = 0;
baseLoc = newBaseLoc("#defines", slice.length); baseLoc = newBaseLoc("#defines", slice);
scanloc = baseLoc.getLoc(0); scanloc = baseLoc.getLoc(0);
} }

View file

@ -73,7 +73,7 @@ nothrow:
extern (C++) static Loc singleFilename(const char* filename) extern (C++) static Loc singleFilename(const char* filename)
{ {
Loc result; Loc result;
locFileTable ~= new BaseLoc(filename.toDString, locIndex, 0, [0]); locFileTable ~= new BaseLoc(filename.toDString, null, locIndex, 0, [0]);
result.index = locIndex++; result.index = locIndex++;
return result; return result;
} }
@ -244,16 +244,20 @@ struct SourceLoc
uint column; /// column number (starts at 1) uint column; /// column number (starts at 1)
uint fileOffset; /// byte index into file uint fileOffset; /// byte index into file
/// Index `fileOffset` into this to to obtain source code context of this location
const(char)[] fileContent;
// aliases for backwards compatibility // aliases for backwards compatibility
alias linnum = line; alias linnum = line;
alias charnum = column; alias charnum = column;
this(const(char)[] filename, uint line, uint column, uint fileOffset = 0) nothrow @nogc pure @safe this(const(char)[] filename, uint line, uint column, uint fileOffset = 0, const(char)[] fileContent = null) nothrow @nogc pure @safe
{ {
this.filename = filename; this.filename = filename;
this.line = line; this.line = line;
this.column = column; this.column = column;
this.fileOffset = fileOffset; this.fileOffset = fileOffset;
this.fileContent = fileContent;
} }
this(Loc loc) nothrow @nogc @trusted this(Loc loc) nothrow @nogc @trusted
@ -307,15 +311,15 @@ private size_t fileTableIndex(uint index) nothrow @nogc
* Create a new source location map for a file * Create a new source location map for a file
* Params: * Params:
* filename = source file name * filename = source file name
* size = space to reserve for locations, equal to the file size in bytes * fileContent = content of source file
* Returns: new BaseLoc * Returns: new BaseLoc
*/ */
BaseLoc* newBaseLoc(const(char)* filename, size_t size) nothrow BaseLoc* newBaseLoc(const(char)* filename, const(char)[] fileContent) nothrow
{ {
locFileTable ~= new BaseLoc(filename.toDString, locIndex, 1, [0]); locFileTable ~= new BaseLoc(filename.toDString, fileContent, locIndex, 1, [0]);
// Careful: the endloc of a FuncDeclaration can // Careful: the endloc of a FuncDeclaration can
// point to 1 past the very last byte in the file, so account for that // point to 1 past the very last byte in the file, so account for that
locIndex += size + 1; locIndex += fileContent.length + 1;
return locFileTable[$ - 1]; return locFileTable[$ - 1];
} }
@ -361,6 +365,7 @@ struct BaseLoc
@safe nothrow: @safe nothrow:
const(char)[] filename; /// Source file name const(char)[] filename; /// Source file name
const(char)[] fileContents; /// Source file contents
uint startIndex; /// Subtract this from Loc.index to get file offset uint startIndex; /// Subtract this from Loc.index to get file offset
int startLine = 1; /// Line number at index 0 int startLine = 1; /// Line number at index 0
uint[] lines; /// For each line, the file offset at which it starts. At index 0 there's always a 0 entry. uint[] lines; /// For each line, the file offset at which it starts. At index 0 there's always a 0 entry.
@ -396,11 +401,11 @@ struct BaseLoc
{ {
auto fname = filename.toDString; auto fname = filename.toDString;
if (substitutions.length == 0) if (substitutions.length == 0)
substitutions ~= BaseLoc(this.filename, 0, 0); substitutions ~= BaseLoc(this.filename, null, 0, 0);
if (fname.length == 0) if (fname.length == 0)
fname = substitutions[$ - 1].filename; fname = substitutions[$ - 1].filename;
substitutions ~= BaseLoc(fname, offset, cast(int) (line - lines.length + startLine - 2)); substitutions ~= BaseLoc(fname, null, offset, cast(int) (line - lines.length + startLine - 2));
} }
/// Returns: `loc` modified by substitutions from #file / #line directives /// Returns: `loc` modified by substitutions from #file / #line directives
@ -420,7 +425,7 @@ struct BaseLoc
private SourceLoc getSourceLoc(uint offset) @nogc private SourceLoc getSourceLoc(uint offset) @nogc
{ {
const i = getLineIndex(offset); const i = getLineIndex(offset);
const sl = SourceLoc(filename, cast(int) (i + startLine), cast(int) (1 + offset - lines[i]), offset); const sl = SourceLoc(filename, cast(int) (i + startLine), cast(int) (1 + offset - lines[i]), offset, fileContents);
return substitute(sl); return substitute(sl);
} }

View file

@ -3277,9 +3277,19 @@ Type merge(Type type)
case Tsarray: case Tsarray:
// prevents generating the mangle if the array dim is not yet known // prevents generating the mangle if the array dim is not yet known
if (!type.isTypeSArray().dim.isIntegerExp()) if (auto ie = type.isTypeSArray().dim.isIntegerExp())
return type; {
goto default; // After TypeSemantic, the length is always converted to size_t, but the parser
// usually generates regular integer types (e.g. in cast(const ubyte[2])) which
// it may try to merge, which then leads to failing implicit conversions as 2LU != 2
// according to Expression.equals. Only merge array types with size_t lengths for now.
// https://github.com/dlang/dmd/issues/21179
if (ie.type != Type.tsize_t)
return type;
goto default;
}
return type;
case Tenum: case Tenum:
break; break;

View file

@ -0,0 +1,77 @@
struct Nullable(T)
{
static struct DontCallDestructorT
{
T payload;
}
DontCallDestructorT _value;
string toString() const
{
Appender!string app;
formatValueImpl(app, _value);
return null;
}
}
struct Appender(A)
{
InPlaceAppender!A impl;
}
struct InPlaceAppender(T)
{
static void toStringImpl(const T[] data)
{
string app;
formatValue(app, data);
}
}
void formatValueImpl(Writer, T)(Writer, const(T)) {}
void formatValueImpl(Writer, T)(Writer w, T obj)
if (is(T == U[], U))
{
formatValue(w, obj[0]);
}
enum HasToStringResult
{
none,
bla
}
template hasToString(T)
{
static if (is(typeof(
(T val) {
val.toString(s);
})))
enum hasToString = HasToStringResult.bla;
else
enum hasToString = HasToStringResult.none;
}
void formatValueImpl(Writer, T)(ref Writer w, T val)
if (is(T == struct) || is(T == union))
{
static if (hasToString!T)
int dummy;
formatElement(w, val.tupleof);
}
void formatElement(Writer, T)(Writer w, T val)
{
formatValueImpl(w, val);
}
void formatValue(Writer, T)(Writer w, T val)
{
formatValueImpl(w, val);
}

View file

@ -0,0 +1,12 @@
import imports.test21098_phobos : Appender, Nullable;
struct Type {
Nullable!(Type[]) templateArgs;
}
Type[] parseDeclarations() {
Appender!(Type[]) members;
return null;
}
enum ast = parseDeclarations();

View file

@ -1,4 +1,4 @@
/* REQUIRED_ARGS: -wi /* REQUIRED_ARGS: -wi -verrors=simple
TEST_OUTPUT: TEST_OUTPUT:
--- ---
compilable/pragmapack.c(101): Warning: current pack attribute is default compilable/pragmapack.c(101): Warning: current pack attribute is default

View file

@ -19,12 +19,10 @@
#include <limits.h> #include <limits.h>
#include <locale.h> #include <locale.h>
#ifndef __APPLE__ // /Applications/Xcode-14.2.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/usr/include/tgmath.h(39): Error: named parameter required before `...`
#include <math.h> #include <math.h>
#ifndef _MSC_VER // C:\Program Files (x86)\Windows Kits\10\include\10.0.26100.0\ucrt\corecrt_math.h(93): Error: reinterpretation through overlapped field `f` is not allowed in CTFE #ifndef _MSC_VER // C:\Program Files (x86)\Windows Kits\10\include\10.0.26100.0\ucrt\corecrt_math.h(93): Error: reinterpretation through overlapped field `f` is not allowed in CTFE
float x = NAN; float x = NAN;
#endif #endif
#endif
#ifndef _MSC_VER // setjmp.h(51): Error: missing tag `identifier` after `struct #ifndef _MSC_VER // setjmp.h(51): Error: missing tag `identifier` after `struct
#include <setjmp.h> #include <setjmp.h>

View file

@ -0,0 +1,4 @@
// https://github.com/dlang/dmd/issues/21098
// EXTRA_FILES: imports/test21098b.d imports/test21098_phobos.d
import imports.test21098b;

View file

@ -0,0 +1,8 @@
// https://github.com/dlang/dmd/issues/21153
alias AliasSeq(TList...) = TList;
class DataClass;
void reduce(DataClass[] r)
{
alias Args = AliasSeq!(DataClass);
Args result = r[0];
}

View file

@ -0,0 +1,11 @@
// https://github.com/dlang/dmd/issues/21179
void bigEndianToNative(ubyte[2] a) {}
void main()
{
ubyte[] arr;
const ubyte[2] bytes;
bigEndianToNative(bytes);
auto b = cast(const ubyte[2][]) arr;
}

View file

@ -1,22 +1,24 @@
/* /*
REQUIRED_ARGS: -verrors=context REQUIRED_ARGS: -verrors=context
TEST_OUTPUT: TEST_OUTPUT:
--- ---
fail_compilation/fail_pretty_errors.d(27): Error: undefined identifier `a` fail_compilation/fail_pretty_errors.d(29): Error: undefined identifier `a`
a = 1; a = 1;
^ ^
fail_compilation/fail_pretty_errors.d-mixin-32(32): Error: undefined identifier `b` fail_compilation/fail_pretty_errors.d-mixin-34(34): Error: undefined identifier `b`
fail_compilation/fail_pretty_errors.d(37): Error: cannot implicitly convert expression `5` of type `int` to `string` b = 1;
^
fail_compilation/fail_pretty_errors.d(39): Error: cannot implicitly convert expression `5` of type `int` to `string`
string x = 5; string x = 5;
^ ^
fail_compilation/fail_pretty_errors.d(42): Error: mixin `fail_pretty_errors.testMixin2.mixinTemplate!()` error instantiating fail_compilation/fail_pretty_errors.d(44): Error: mixin `fail_pretty_errors.testMixin2.mixinTemplate!()` error instantiating
mixin mixinTemplate; mixin mixinTemplate;
^ ^
fail_compilation/fail_pretty_errors.d(48): Error: invalid array operation `"" + ""` (possible missing []) fail_compilation/fail_pretty_errors.d(50): Error: invalid array operation `"" + ""` (possible missing [])
auto x = ""+""; auto x = ""+"";
^ ^
fail_compilation/fail_pretty_errors.d(48): did you mean to concatenate (`"" ~ ""`) instead ? fail_compilation/fail_pretty_errors.d(50): did you mean to concatenate (`"" ~ ""`) instead ?
fail_compilation/fail_pretty_errors.d(51): Error: cannot implicitly convert expression `1111` of type `int` to `byte` fail_compilation/fail_pretty_errors.d(53): Error: cannot implicitly convert expression `1111` of type `int` to `byte`
byte ɑ = 1111; byte ɑ = 1111;
^ ^
--- ---

View file

@ -1,7 +1,10 @@
// check dsymbolSemantic analysis of C files // check dsymbolSemantic analysis of C files
/* TEST_OUTPUT: /* TEST_OUTPUT:
REQUIRED_ARGS: -verrors=context
--- ---
fail_compilation/failcstuff6.c(56): Error: enum member `failcstuff6.test_overflow.boom` initialization with `2147483647+1` causes overflow for type `int` fail_compilation/failcstuff6.c(56): Error: enum member `failcstuff6.test_overflow.boom` initialization with `2147483647+1` causes overflow for type `int`
boom,
^
--- ---
*/ */

View file

@ -55,7 +55,7 @@ string generateVersion(const string versionFile)
enum workDir = __FILE_FULL_PATH__.dirName; enum workDir = __FILE_FULL_PATH__.dirName;
const result = execute(["git", "-C", workDir, "describe", "--dirty"]); const result = execute(["git", "-C", workDir, "describe", "--dirty"]);
return result.status == 0 ? result.output.strip : versionFile.readText; return result.status == 0 ? result.output.strip : versionFile.readText.strip;
} }
/** /**

View file

@ -558,7 +558,7 @@ protected:
// of the executing thread. // of the executing thread.
static ucontext_t sm_utxt = void; static ucontext_t sm_utxt = void;
ucontext_t m_utxt = void; ucontext_t m_utxt = void;
ucontext_t* m_ucur = null; package ucontext_t* m_ucur = null;
} }

View file

@ -168,12 +168,12 @@ typedef unsigned long long __uint64_t;
*/ */
#define __STDC_NO_VLA__ 1 #define __STDC_NO_VLA__ 1
#define _Float16 float
#if linux // Microsoft won't allow the following macro #if linux // Microsoft won't allow the following macro
// Ubuntu's assert.h uses this // Ubuntu's assert.h uses this
#define __PRETTY_FUNCTION__ __func__ #define __PRETTY_FUNCTION__ __func__
#ifndef __aarch64__ #ifndef __aarch64__
#define _Float16 float
#define _Float32 float #define _Float32 float
#define _Float32x double #define _Float32x double
#define _Float64 double #define _Float64 double