mirror of https://github.com/buggins/dlangui.git
obj model loader for #183
This commit is contained in:
parent
06d009d928
commit
7fc9dd9495
|
@ -256,6 +256,7 @@
|
|||
<Compile Include="src\dlangui\core\linestream.d" />
|
||||
<Compile Include="src\dlangui\core\logger.d" />
|
||||
<Compile Include="src\dlangui\core\math3d.d" />
|
||||
<Compile Include="src\dlangui\core\parseutils.d" />
|
||||
<Compile Include="src\dlangui\core\settings.d" />
|
||||
<Compile Include="src\dlangui\core\signals.d" />
|
||||
<Compile Include="src\dlangui\core\stdaction.d" />
|
||||
|
@ -270,6 +271,7 @@
|
|||
<Compile Include="src\dlangui\dml\annotations.d" />
|
||||
<Compile Include="src\dlangui\dml\dmlhighlight.d" />
|
||||
<Compile Include="src\dlangui\dml\parser.d" />
|
||||
<Compile Include="src\dlangui\dml\tokenizer.d" />
|
||||
<Compile Include="src\dlangui\graphics\colors.d" />
|
||||
<Compile Include="src\dlangui\graphics\drawbuf.d" />
|
||||
<Compile Include="src\dlangui\graphics\domrender.d" />
|
||||
|
@ -286,6 +288,7 @@
|
|||
<Compile Include="src\dlangui\graphics\scene\mesh.d" />
|
||||
<Compile Include="src\dlangui\graphics\scene\model.d" />
|
||||
<Compile Include="src\dlangui\graphics\scene\node.d" />
|
||||
<Compile Include="src\dlangui\graphics\scene\objimport.d" />
|
||||
<Compile Include="src\dlangui\graphics\scene\scene3d.d" />
|
||||
<Compile Include="src\dlangui\graphics\scene\transform.d" />
|
||||
<Compile Include="src\dlangui\graphics\xpm\xpmcolors.d" />
|
||||
|
|
|
@ -115,6 +115,7 @@
|
|||
<Compile Include="src\dlangui\core\linestream.d" />
|
||||
<Compile Include="src\dlangui\core\logger.d" />
|
||||
<Compile Include="src\dlangui\core\math3d.d" />
|
||||
<Compile Include="src\dlangui\core\parseutils.d" />
|
||||
<Compile Include="src\dlangui\core\settings.d" />
|
||||
<Compile Include="src\dlangui\core\signals.d" />
|
||||
<Compile Include="src\dlangui\core\stdaction.d" />
|
||||
|
@ -129,6 +130,7 @@
|
|||
<Compile Include="src\dlangui\dml\annotations.d" />
|
||||
<Compile Include="src\dlangui\dml\dmlhighlight.d" />
|
||||
<Compile Include="src\dlangui\dml\parser.d" />
|
||||
<Compile Include="src\dlangui\dml\tokenizer.d" />
|
||||
<Compile Include="src\dlangui\graphics\colors.d" />
|
||||
<Compile Include="src\dlangui\graphics\drawbuf.d" />
|
||||
<Compile Include="src\dlangui\graphics\domrender.d" />
|
||||
|
@ -145,6 +147,7 @@
|
|||
<Compile Include="src\dlangui\graphics\scene\mesh.d" />
|
||||
<Compile Include="src\dlangui\graphics\scene\model.d" />
|
||||
<Compile Include="src\dlangui\graphics\scene\node.d" />
|
||||
<Compile Include="src\dlangui\graphics\scene\objimport.d" />
|
||||
<Compile Include="src\dlangui\graphics\scene\scene3d.d" />
|
||||
<Compile Include="src\dlangui\graphics\scene\transform.d" />
|
||||
<Compile Include="src\dlangui\graphics\xpm\reader.d" />
|
||||
|
|
|
@ -748,6 +748,7 @@
|
|||
<File path="src\dlangui\core\linestream.d" />
|
||||
<File path="src\dlangui\core\logger.d" />
|
||||
<File path="src\dlangui\core\math3d.d" />
|
||||
<File path="src\dlangui\core\parseutils.d" />
|
||||
<File path="src\dlangui\core\queue.d" />
|
||||
<File path="src\dlangui\core\settings.d" />
|
||||
<File path="src\dlangui\core\signals.d" />
|
||||
|
@ -767,6 +768,7 @@
|
|||
<File path="src\dlangui\dml\annotations.d" />
|
||||
<File path="src\dlangui\dml\dmlhighlight.d" />
|
||||
<File path="src\dlangui\dml\parser.d" />
|
||||
<File path="src\dlangui\dml\tokenizer.d" />
|
||||
</Folder>
|
||||
<Folder name="graphics">
|
||||
<Folder name="scene">
|
||||
|
@ -777,6 +779,7 @@
|
|||
<File path="src\dlangui\graphics\scene\mesh.d" />
|
||||
<File path="src\dlangui\graphics\scene\model.d" />
|
||||
<File path="src\dlangui\graphics\scene\node.d" />
|
||||
<File path="src\dlangui\graphics\scene\objimport.d" />
|
||||
<File path="src\dlangui\graphics\scene\scene3d.d" />
|
||||
<File path="src\dlangui\graphics\scene\transform.d" />
|
||||
</Folder>
|
||||
|
|
|
@ -54,7 +54,7 @@
|
|||
<ccTransOpt>1</ccTransOpt>
|
||||
<program>$(DMDInstallDir)windows\bin\dmd.exe</program>
|
||||
<imppath>$(SolutionDir)/../dlangui/src $(SolutionDir)/../dlangui/3rdparty $(SolutionDir)/../dlangui/deps/DerelictGL3/source $(SolutionDir)/../dlangui/deps/DerelictUtil/source $(SolutionDir)/../dlangui/deps/DerelictFT/source $(SolutionDir)/../dlangui/deps/DerelictSDL2/source</imppath>
|
||||
<fileImppath>views views/res views/res/i18n views/res/mdpi views/res/hdpi views/res/shaders</fileImppath>
|
||||
<fileImppath>views views/res views/res/i18n views/res/mdpi views/res/hdpi views/res/shaders views/res/models</fileImppath>
|
||||
<outdir>$(ConfigurationName)</outdir>
|
||||
<objdir>$(OutDir)</objdir>
|
||||
<objname />
|
||||
|
@ -146,7 +146,7 @@
|
|||
<pic>0</pic>
|
||||
<cov>0</cov>
|
||||
<nofloat>0</nofloat>
|
||||
<Dversion>2.043</Dversion>
|
||||
<Dversion>2</Dversion>
|
||||
<ignoreUnsupportedPragmas>0</ignoreUnsupportedPragmas>
|
||||
<allinst>0</allinst>
|
||||
<stackStomp>0</stackStomp>
|
||||
|
@ -156,7 +156,7 @@
|
|||
<ccTransOpt>1</ccTransOpt>
|
||||
<program>$(DMDInstallDir)windows\bin\dmd.exe</program>
|
||||
<imppath>$(SolutionDir)/../dlangui/src $(SolutionDir)/../dlangui/3rdparty $(SolutionDir)/../dlangui/deps/DerelictGL3/source $(SolutionDir)/../dlangui/deps/DerelictUtil/source $(SolutionDir)/../dlangui/deps/DerelictFT/source $(SolutionDir)/../dlangui/deps/DerelictSDL2/source</imppath>
|
||||
<fileImppath>views views/res views/res/i18n views/res/mdpi views/res/hdpi views/res/shaders</fileImppath>
|
||||
<fileImppath>views views/res views/res/i18n views/res/mdpi views/res/hdpi views/res/shaders views/res/models</fileImppath>
|
||||
<outdir>$(ConfigurationName)</outdir>
|
||||
<objdir>$(OutDir)</objdir>
|
||||
<objname />
|
||||
|
@ -258,7 +258,7 @@
|
|||
<ccTransOpt>1</ccTransOpt>
|
||||
<program>$(DMDInstallDir)windows\bin\dmd.exe</program>
|
||||
<imppath>$(SolutionDir)/../dlangui/src $(SolutionDir)/../dlangui/3rdparty $(SolutionDir)/../dlangui/deps/DerelictGL3/source $(SolutionDir)/../dlangui/deps/DerelictUtil/source $(SolutionDir)/../dlangui/deps/DerelictFT/source $(SolutionDir)/../dlangui/deps/DerelictSDL2/source</imppath>
|
||||
<fileImppath>views views/res views/res/i18n views/res/mdpi views/res/hdpi views/res/shaders</fileImppath>
|
||||
<fileImppath>views views/res views/res/i18n views/res/mdpi views/res/hdpi views/res/shaders views/res/models</fileImppath>
|
||||
<outdir>$(ConfigurationName)</outdir>
|
||||
<objdir>$(OutDir)</objdir>
|
||||
<objname />
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
"license": "Boost",
|
||||
"authors": ["Vadim Lopatin"],
|
||||
|
||||
"stringImportPaths": ["views", "views/res", "views/res/i18n", "views/res/mdpi", "views/res/shaders"],
|
||||
"stringImportPaths": ["views", "views/res", "views/res/i18n", "views/res/mdpi", "views/res/shaders", "views/res/models"],
|
||||
|
||||
"targetPath": "bin",
|
||||
"targetName": "d3d",
|
||||
|
|
|
@ -8,6 +8,7 @@ import dlangui.graphics.scene.material;
|
|||
import dlangui.graphics.scene.effect;
|
||||
import dlangui.graphics.scene.model;
|
||||
import dlangui.graphics.scene.node;
|
||||
import dlangui.graphics.scene.objimport;
|
||||
import dlangui.graphics.glsupport;
|
||||
import dlangui.graphics.gldrawbuf;
|
||||
import derelict.opengl3.gl3;
|
||||
|
@ -133,6 +134,10 @@ class UiWidget : VerticalLayout, CellVisitor {
|
|||
Node3d cubeNode = new Node3d("cubes", cubeDrawable);
|
||||
_scene.addChild(cubeNode);
|
||||
|
||||
ObjModelImport importer;
|
||||
string src = loadTextResource("suzanne.obj");
|
||||
importer.parse(src);
|
||||
|
||||
|
||||
_minerMesh = new Mesh(VertexFormat(VertexElementType.POSITION, VertexElementType.NORMAL, VertexElementType.COLOR, VertexElementType.TEXCOORD0));
|
||||
_world = new World();
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -6,3 +6,4 @@ res/mdpi/crate.png
|
|||
res/mdpi/blocks.png
|
||||
res/shaders/textured.vert
|
||||
res/shaders/textured.frag
|
||||
res/models/suzanne.obj
|
||||
|
|
|
@ -0,0 +1,42 @@
|
|||
module dlangui.core.parseutils;
|
||||
|
||||
long parseLong(inout string v, long defValue = 0) {
|
||||
int len = cast(int)v.length;
|
||||
if (len == 0)
|
||||
return defValue;
|
||||
int sign = 1;
|
||||
long value = 0;
|
||||
int digits = 0;
|
||||
foreach(i; 0 .. len) {
|
||||
char ch = v[i];
|
||||
if (ch == '-') {
|
||||
if (i != 0)
|
||||
return defValue;
|
||||
sign = -1;
|
||||
} else if (ch >= '0' && ch <= '9') {
|
||||
digits++;
|
||||
value = value * 10 + (ch - '0');
|
||||
} else {
|
||||
return defValue;
|
||||
}
|
||||
}
|
||||
return digits > 0 ? (sign > 0 ? value : -value) : defValue;
|
||||
}
|
||||
|
||||
ulong parseULong(inout string v, ulong defValue = 0) {
|
||||
int len = cast(int)v.length;
|
||||
if (len == 0)
|
||||
return defValue;
|
||||
ulong value = 0;
|
||||
int digits = 0;
|
||||
foreach(i; 0 .. len) {
|
||||
char ch = v[i];
|
||||
if (ch >= '0' && ch <= '9') {
|
||||
digits++;
|
||||
value = value * 10 + (ch - '0');
|
||||
} else {
|
||||
return defValue;
|
||||
}
|
||||
}
|
||||
return digits > 0 ? value : defValue;
|
||||
}
|
|
@ -28,6 +28,7 @@ module dlangui.core.settings;
|
|||
|
||||
import dlangui.core.logger;
|
||||
import dlangui.core.types : parseHexDigit;
|
||||
public import dlangui.core.parseutils;
|
||||
import std.range;
|
||||
//import std.algorithm : clamp, equal;
|
||||
import std.algorithm : equal;
|
||||
|
@ -1906,44 +1907,4 @@ final class Setting {
|
|||
}
|
||||
}
|
||||
|
||||
long parseLong(inout string v, long defValue = 0) {
|
||||
int len = cast(int)v.length;
|
||||
if (len == 0)
|
||||
return defValue;
|
||||
int sign = 1;
|
||||
long value = 0;
|
||||
int digits = 0;
|
||||
foreach(i; 0 .. len) {
|
||||
char ch = v[i];
|
||||
if (ch == '-') {
|
||||
if (i != 0)
|
||||
return defValue;
|
||||
sign = -1;
|
||||
} else if (ch >= '0' && ch <= '9') {
|
||||
digits++;
|
||||
value = value * 10 + (ch - '0');
|
||||
} else {
|
||||
return defValue;
|
||||
}
|
||||
}
|
||||
return digits > 0 ? (sign > 0 ? value : -value) : defValue;
|
||||
}
|
||||
|
||||
ulong parseULong(inout string v, ulong defValue = 0) {
|
||||
int len = cast(int)v.length;
|
||||
if (len == 0)
|
||||
return defValue;
|
||||
ulong value = 0;
|
||||
int digits = 0;
|
||||
foreach(i; 0 .. len) {
|
||||
char ch = v[i];
|
||||
if (ch >= '0' && ch <= '9') {
|
||||
digits++;
|
||||
value = value * 10 + (ch - '0');
|
||||
} else {
|
||||
return defValue;
|
||||
}
|
||||
}
|
||||
return digits > 0 ? value : defValue;
|
||||
}
|
||||
|
||||
|
|
|
@ -34,26 +34,7 @@ import std.algorithm : equal, min, max;
|
|||
import std.utf : toUTF32, toUTF8;
|
||||
import std.array : join;
|
||||
public import dlangui.dml.annotations;
|
||||
|
||||
class ParserException : Exception {
|
||||
protected string _msg;
|
||||
protected string _file;
|
||||
protected int _line;
|
||||
protected int _pos;
|
||||
|
||||
@property string file() { return _file; }
|
||||
@property string msg() { return _msg; }
|
||||
@property int line() { return _line; }
|
||||
@property int pos() { return _pos; }
|
||||
|
||||
this(string msg, string file, int line, int pos) {
|
||||
super(msg ~ " at " ~ file ~ " line " ~ to!string(line) ~ " column " ~ to!string(pos));
|
||||
_msg = msg;
|
||||
_file = file;
|
||||
_line = line;
|
||||
_pos = pos;
|
||||
}
|
||||
}
|
||||
public import dlangui.dml.tokenizer;
|
||||
|
||||
/// parser exception - unknown (unregistered) widget name
|
||||
class UnknownWidgetException : ParserException {
|
||||
|
@ -78,447 +59,6 @@ class UnknownPropertyException : UnknownWidgetException {
|
|||
}
|
||||
}
|
||||
|
||||
enum TokenType : ushort {
|
||||
/// end of file
|
||||
eof,
|
||||
/// end of line
|
||||
eol,
|
||||
/// whitespace
|
||||
whitespace,
|
||||
/// string literal
|
||||
str,
|
||||
/// integer literal
|
||||
integer,
|
||||
/// floating point literal
|
||||
floating,
|
||||
/// comment
|
||||
comment,
|
||||
/// ident
|
||||
ident,
|
||||
/// error
|
||||
error,
|
||||
// operators
|
||||
/// : operator
|
||||
colon,
|
||||
/// . operator
|
||||
dot,
|
||||
/// ; operator
|
||||
semicolon,
|
||||
/// , operator
|
||||
comma,
|
||||
/// - operator
|
||||
minus,
|
||||
/// + operator
|
||||
plus,
|
||||
/// [
|
||||
curlyOpen,
|
||||
/// ]
|
||||
curlyClose,
|
||||
/// (
|
||||
open,
|
||||
/// )
|
||||
close,
|
||||
/// [
|
||||
squareOpen,
|
||||
/// ]
|
||||
squareClose,
|
||||
}
|
||||
|
||||
struct Token {
|
||||
TokenType type;
|
||||
ushort line;
|
||||
ushort pos;
|
||||
bool multiline;
|
||||
string text;
|
||||
union {
|
||||
int intvalue;
|
||||
double floatvalue;
|
||||
}
|
||||
public @property string toString() const {
|
||||
if (type == TokenType.integer)
|
||||
return "" ~ to!string(line) ~ ":" ~ to!string(pos) ~ " " ~ to!string(type) ~ " " ~ to!string(intvalue);
|
||||
else if (type == TokenType.floating)
|
||||
return "" ~ to!string(line) ~ ":" ~ to!string(pos) ~ " " ~ to!string(type) ~ " " ~ to!string(floatvalue);
|
||||
else
|
||||
return "" ~ to!string(line) ~ ":" ~ to!string(pos) ~ " " ~ to!string(type) ~ " \"" ~ text ~ "\"";
|
||||
}
|
||||
@property bool isMultilineComment() {
|
||||
return type == TokenType.comment && multiline;
|
||||
}
|
||||
}
|
||||
|
||||
/// simple tokenizer for DlangUI ML
|
||||
class Tokenizer {
|
||||
protected LineStream _lines;
|
||||
|
||||
dchar[] _lineText;
|
||||
ushort _line;
|
||||
ushort _pos;
|
||||
int _len;
|
||||
dchar _prevChar;
|
||||
string _filename;
|
||||
|
||||
Token _token;
|
||||
|
||||
enum : int {
|
||||
EOF_CHAR = 0x001A,
|
||||
EOL_CHAR = 0x000A
|
||||
}
|
||||
|
||||
this(string source, string filename = "") {
|
||||
_filename = filename;
|
||||
_lines = LineStream.create(source, filename);
|
||||
_lineText = _lines.readLine();
|
||||
_len = cast(int)_lineText.length;
|
||||
_line = 0;
|
||||
_pos = 0;
|
||||
_prevChar = 0;
|
||||
}
|
||||
|
||||
~this() {
|
||||
destroy(_lines);
|
||||
_lines = null;
|
||||
}
|
||||
|
||||
protected dchar peekChar() {
|
||||
if (_pos < _len)
|
||||
return _lineText[_pos];
|
||||
else if (_lineText is null)
|
||||
return EOF_CHAR;
|
||||
return EOL_CHAR;
|
||||
}
|
||||
|
||||
protected dchar peekNextChar() {
|
||||
if (_pos < _len - 1)
|
||||
return _lineText[_pos + 1];
|
||||
else if (_lineText is null)
|
||||
return EOF_CHAR;
|
||||
return EOL_CHAR;
|
||||
}
|
||||
|
||||
protected dchar nextChar() {
|
||||
if (_pos < _len)
|
||||
_prevChar = _lineText[_pos++];
|
||||
else if (_lineText is null)
|
||||
_prevChar = EOF_CHAR;
|
||||
else {
|
||||
_lineText = _lines.readLine();
|
||||
_len = cast(int)_lineText.length;
|
||||
_line++;
|
||||
_pos = 0;
|
||||
_prevChar = EOL_CHAR;
|
||||
}
|
||||
return _prevChar;
|
||||
}
|
||||
|
||||
protected dchar skipChar() {
|
||||
nextChar();
|
||||
return peekChar();
|
||||
}
|
||||
|
||||
protected void setTokenStart() {
|
||||
_token.pos = _pos;
|
||||
_token.line = _line;
|
||||
_token.text = null;
|
||||
_token.intvalue = 0;
|
||||
}
|
||||
|
||||
protected ref const(Token) parseEof() {
|
||||
_token.type = TokenType.eof;
|
||||
return _token;
|
||||
}
|
||||
|
||||
protected ref const(Token) parseEol() {
|
||||
_token.type = TokenType.eol;
|
||||
nextChar();
|
||||
return _token;
|
||||
}
|
||||
|
||||
protected ref const(Token) parseWhiteSpace() {
|
||||
_token.type = TokenType.whitespace;
|
||||
for(;;) {
|
||||
dchar ch = skipChar();
|
||||
if (ch != ' ' && ch != '\t')
|
||||
break;
|
||||
}
|
||||
return _token;
|
||||
}
|
||||
|
||||
static bool isAlpha(dchar ch) {
|
||||
return (ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') || ch == '_';
|
||||
}
|
||||
|
||||
static bool isNum(dchar ch) {
|
||||
return (ch >= '0' && ch <= '9');
|
||||
}
|
||||
|
||||
static bool isAlphaNum(dchar ch) {
|
||||
return isNum(ch) || isAlpha(ch);
|
||||
}
|
||||
|
||||
private char[] _stringbuf;
|
||||
protected ref const(Token) parseString() {
|
||||
_token.type = TokenType.str;
|
||||
//skipChar(); // skip "
|
||||
bool lastBackslash = false;
|
||||
_stringbuf.length = 0;
|
||||
for(;;) {
|
||||
dchar ch = skipChar();
|
||||
if (ch == '\"') {
|
||||
if (lastBackslash) {
|
||||
_stringbuf ~= ch;
|
||||
lastBackslash = false;
|
||||
} else {
|
||||
skipChar();
|
||||
break;
|
||||
}
|
||||
} else if (ch == '\\') {
|
||||
if (lastBackslash) {
|
||||
_stringbuf ~= ch;
|
||||
lastBackslash = false;
|
||||
} else {
|
||||
lastBackslash = true;
|
||||
}
|
||||
} else if (ch == EOL_CHAR) {
|
||||
skipChar();
|
||||
break;
|
||||
} else if (lastBackslash) {
|
||||
if (ch == 'n')
|
||||
ch = '\n';
|
||||
else if (ch == 't')
|
||||
ch = '\t';
|
||||
_stringbuf ~= ch;
|
||||
lastBackslash = false;
|
||||
} else {
|
||||
_stringbuf ~= ch;
|
||||
lastBackslash = false;
|
||||
}
|
||||
}
|
||||
_token.text = _stringbuf.dup;
|
||||
return _token;
|
||||
}
|
||||
|
||||
protected ref const(Token) parseIdent() {
|
||||
_token.type = TokenType.ident;
|
||||
_stringbuf.length = 0;
|
||||
_stringbuf ~= peekChar();
|
||||
for(;;) {
|
||||
dchar ch = skipChar();
|
||||
if (!isAlphaNum(ch))
|
||||
break;
|
||||
_stringbuf ~= ch;
|
||||
}
|
||||
_token.text = _stringbuf.dup;
|
||||
return _token;
|
||||
}
|
||||
|
||||
protected ref const(Token) parseFloating(int n) {
|
||||
_token.type = TokenType.floating;
|
||||
dchar ch = peekChar();
|
||||
// floating point
|
||||
int div = 1;
|
||||
int n2 = 0;
|
||||
for (;;) {
|
||||
ch = skipChar();
|
||||
if (!isNum(ch))
|
||||
break;
|
||||
n2 = n2 * 10 + (ch - '0');
|
||||
div *= 10;
|
||||
}
|
||||
_token.floatvalue = cast(double)n + (div > 0 ? cast(double)n2 / div : 0.0);
|
||||
string suffix;
|
||||
if (ch == '%') {
|
||||
suffix ~= ch;
|
||||
ch = skipChar();
|
||||
} else {
|
||||
while (ch >= 'a' && ch <= 'z') {
|
||||
suffix ~= ch;
|
||||
ch = skipChar();
|
||||
}
|
||||
}
|
||||
if (isAlphaNum(ch) || ch == '.')
|
||||
return parseError();
|
||||
_token.text = suffix;
|
||||
return _token;
|
||||
}
|
||||
|
||||
protected ref const(Token) parseHex(int prefixLen) {
|
||||
dchar ch = 0;
|
||||
foreach(i; 0 .. prefixLen)
|
||||
ch = skipChar();
|
||||
|
||||
uint n = parseHexDigit(ch);
|
||||
if (n == uint.max)
|
||||
return parseError();
|
||||
|
||||
for(;;) {
|
||||
ch = skipChar();
|
||||
uint digit = parseHexDigit(ch);
|
||||
if (digit == uint.max)
|
||||
break;
|
||||
n = (n << 4) + digit;
|
||||
}
|
||||
string suffix;
|
||||
if (ch == '%') {
|
||||
suffix ~= ch;
|
||||
ch = skipChar();
|
||||
} else {
|
||||
while (ch >= 'a' && ch <= 'z') {
|
||||
suffix ~= ch;
|
||||
ch = skipChar();
|
||||
}
|
||||
}
|
||||
if (isAlphaNum(ch) || ch == '.')
|
||||
return parseError();
|
||||
_token.type = TokenType.integer;
|
||||
_token.intvalue = n;
|
||||
_token.text = suffix;
|
||||
return _token;
|
||||
}
|
||||
|
||||
protected ref const(Token) parseNumber() {
|
||||
dchar ch = peekChar();
|
||||
uint n = ch - '0';
|
||||
for(;;) {
|
||||
ch = skipChar();
|
||||
if (!isNum(ch))
|
||||
break;
|
||||
n = n * 10 + (ch - '0');
|
||||
}
|
||||
if (ch == '.')
|
||||
return parseFloating(n);
|
||||
string suffix;
|
||||
if (ch == '%') {
|
||||
suffix ~= ch;
|
||||
ch = skipChar();
|
||||
} else {
|
||||
while (ch >= 'a' && ch <= 'z') {
|
||||
suffix ~= ch;
|
||||
ch = skipChar();
|
||||
}
|
||||
}
|
||||
if (isAlphaNum(ch) || ch == '.')
|
||||
return parseError();
|
||||
_token.type = TokenType.integer;
|
||||
_token.intvalue = n;
|
||||
_token.text = suffix;
|
||||
return _token;
|
||||
}
|
||||
|
||||
protected ref const(Token) parseSingleLineComment() {
|
||||
for(;;) {
|
||||
dchar ch = skipChar();
|
||||
if (ch == EOL_CHAR || ch == EOF_CHAR)
|
||||
break;
|
||||
}
|
||||
_token.type = TokenType.comment;
|
||||
_token.multiline = false;
|
||||
return _token;
|
||||
}
|
||||
|
||||
protected ref const(Token) parseMultiLineComment() {
|
||||
skipChar();
|
||||
for(;;) {
|
||||
dchar ch = skipChar();
|
||||
if (ch == '*' && peekNextChar() == '/') {
|
||||
skipChar();
|
||||
skipChar();
|
||||
break;
|
||||
}
|
||||
if (ch == EOF_CHAR)
|
||||
break;
|
||||
}
|
||||
_token.type = TokenType.comment;
|
||||
_token.multiline = true;
|
||||
return _token;
|
||||
}
|
||||
|
||||
protected ref const(Token) parseError() {
|
||||
_token.type = TokenType.error;
|
||||
for(;;) {
|
||||
dchar ch = skipChar();
|
||||
if (ch == ' ' || ch == '\t' || ch == EOL_CHAR || ch == EOF_CHAR)
|
||||
break;
|
||||
}
|
||||
return _token;
|
||||
}
|
||||
|
||||
protected ref const(Token) parseOp(TokenType op) {
|
||||
_token.type = op;
|
||||
skipChar();
|
||||
return _token;
|
||||
}
|
||||
|
||||
/// get next token
|
||||
ref const(Token) nextToken() {
|
||||
setTokenStart();
|
||||
dchar ch = peekChar();
|
||||
if (ch == EOF_CHAR)
|
||||
return parseEof();
|
||||
if (ch == EOL_CHAR)
|
||||
return parseEol();
|
||||
if (ch == ' ' || ch == '\t')
|
||||
return parseWhiteSpace();
|
||||
if (ch == '\"')
|
||||
return parseString();
|
||||
if (isAlpha(ch))
|
||||
return parseIdent();
|
||||
if (ch == '0' && peekNextChar == 'x')
|
||||
return parseHex(2);
|
||||
if (ch == '#')
|
||||
return parseHex(1);
|
||||
if (isNum(ch))
|
||||
return parseNumber();
|
||||
if (ch == '.' && isNum(peekNextChar()))
|
||||
return parseFloating(0);
|
||||
if (ch == '/' && peekNextChar() == '/')
|
||||
return parseSingleLineComment();
|
||||
if (ch == '/' && peekNextChar() == '*')
|
||||
return parseMultiLineComment();
|
||||
switch (ch) {
|
||||
case '.': return parseOp(TokenType.dot);
|
||||
case ':': return parseOp(TokenType.colon);
|
||||
case ';': return parseOp(TokenType.semicolon);
|
||||
case ',': return parseOp(TokenType.comma);
|
||||
case '-': return parseOp(TokenType.minus);
|
||||
case '+': return parseOp(TokenType.plus);
|
||||
case '{': return parseOp(TokenType.curlyOpen);
|
||||
case '}': return parseOp(TokenType.curlyClose);
|
||||
case '(': return parseOp(TokenType.open);
|
||||
case ')': return parseOp(TokenType.close);
|
||||
case '[': return parseOp(TokenType.squareOpen);
|
||||
case ']': return parseOp(TokenType.squareClose);
|
||||
default:
|
||||
return parseError();
|
||||
}
|
||||
}
|
||||
|
||||
string getContextSource() {
|
||||
string s = toUTF8(_lineText);
|
||||
if (_pos == 0)
|
||||
return " near `^^^" ~ s[0..min($,30)] ~ "`";
|
||||
if (_pos >= _len)
|
||||
return " near `" ~ s[max(_len - 30, 0) .. $] ~ "^^^`";
|
||||
return " near `" ~ s[max(_pos - 15, 0) .. _pos] ~ "^^^" ~ s[_pos .. min(_pos + 15, $)] ~ "`";
|
||||
}
|
||||
|
||||
void emitError(string msg) {
|
||||
throw new ParserException(msg ~ getContextSource(), _filename, _token.line, _token.pos);
|
||||
}
|
||||
|
||||
void emitUnknownPropertyError(string objectName, string propName) {
|
||||
throw new UnknownPropertyException("Unknown property " ~ objectName ~ "." ~ propName ~ getContextSource(), objectName, propName, _filename, _token.line, _token.pos);
|
||||
}
|
||||
|
||||
void emitUnknownObjectError(string objectName) {
|
||||
throw new UnknownWidgetException("Unknown widget type " ~ objectName ~ getContextSource(), objectName, _filename, _token.line, _token.pos);
|
||||
}
|
||||
|
||||
void emitError(string msg, ref const Token token) {
|
||||
throw new ParserException(msg, _filename, token.line, token.pos);
|
||||
}
|
||||
}
|
||||
|
||||
class MLParser {
|
||||
protected string _code;
|
||||
protected string _filename;
|
||||
|
@ -590,11 +130,11 @@ class MLParser {
|
|||
}
|
||||
|
||||
protected void unknownObjectError(string objectName) {
|
||||
_tokenizer.emitUnknownObjectError(objectName);
|
||||
throw new UnknownWidgetException("Unknown widget type " ~ objectName ~ _tokenizer.getContextSource(), objectName, _tokenizer.filename, _tokenizer.line, _tokenizer.pos);
|
||||
}
|
||||
|
||||
protected void unknownPropertyError(string objectName, string propName) {
|
||||
_tokenizer.emitUnknownPropertyError(objectName, propName);
|
||||
throw new UnknownPropertyException("Unknown property " ~ objectName ~ "." ~ propName ~ _tokenizer.getContextSource(), objectName, propName, _tokenizer.filename, _tokenizer.line, _tokenizer.pos);
|
||||
}
|
||||
|
||||
Widget createWidget(string name) {
|
||||
|
|
|
@ -0,0 +1,501 @@
|
|||
module dlangui.dml.tokenizer;
|
||||
|
||||
import dlangui.core.types;
|
||||
import dlangui.core.linestream;
|
||||
|
||||
import std.conv : to;
|
||||
import std.utf : toUTF32, toUTF8;
|
||||
import std.algorithm : equal, min, max;
|
||||
|
||||
enum TokenType : ushort {
|
||||
/// end of file
|
||||
eof,
|
||||
/// end of line
|
||||
eol,
|
||||
/// whitespace
|
||||
whitespace,
|
||||
/// string literal
|
||||
str,
|
||||
/// integer literal
|
||||
integer,
|
||||
/// floating point literal
|
||||
floating,
|
||||
/// comment
|
||||
comment,
|
||||
/// ident
|
||||
ident,
|
||||
/// error
|
||||
error,
|
||||
// operators
|
||||
/// : operator
|
||||
colon,
|
||||
/// . operator
|
||||
dot,
|
||||
/// ; operator
|
||||
semicolon,
|
||||
/// / operator
|
||||
divide,
|
||||
/// , operator
|
||||
comma,
|
||||
/// - operator
|
||||
minus,
|
||||
/// + operator
|
||||
plus,
|
||||
/// [
|
||||
curlyOpen,
|
||||
/// ]
|
||||
curlyClose,
|
||||
/// (
|
||||
open,
|
||||
/// )
|
||||
close,
|
||||
/// [
|
||||
squareOpen,
|
||||
/// ]
|
||||
squareClose,
|
||||
}
|
||||
|
||||
struct Token {
|
||||
TokenType type;
|
||||
ushort line;
|
||||
ushort pos;
|
||||
bool multiline;
|
||||
string text;
|
||||
union {
|
||||
int intvalue;
|
||||
double floatvalue;
|
||||
}
|
||||
public @property string toString() const {
|
||||
if (type == TokenType.integer)
|
||||
return "" ~ to!string(line) ~ ":" ~ to!string(pos) ~ " " ~ to!string(type) ~ " " ~ to!string(intvalue);
|
||||
else if (type == TokenType.floating)
|
||||
return "" ~ to!string(line) ~ ":" ~ to!string(pos) ~ " " ~ to!string(type) ~ " " ~ to!string(floatvalue);
|
||||
else
|
||||
return "" ~ to!string(line) ~ ":" ~ to!string(pos) ~ " " ~ to!string(type) ~ " \"" ~ text ~ "\"";
|
||||
}
|
||||
@property bool isMultilineComment() {
|
||||
return type == TokenType.comment && multiline;
|
||||
}
|
||||
}
|
||||
|
||||
class ParserException : Exception {
|
||||
protected string _msg;
|
||||
protected string _file;
|
||||
protected int _line;
|
||||
protected int _pos;
|
||||
|
||||
@property string file() { return _file; }
|
||||
@property string msg() { return _msg; }
|
||||
@property int line() { return _line; }
|
||||
@property int pos() { return _pos; }
|
||||
|
||||
this(string msg, string file, int line, int pos) {
|
||||
super(msg ~ " at " ~ file ~ " line " ~ to!string(line) ~ " column " ~ to!string(pos));
|
||||
_msg = msg;
|
||||
_file = file;
|
||||
_line = line;
|
||||
_pos = pos;
|
||||
}
|
||||
}
|
||||
|
||||
/// simple tokenizer for DlangUI ML
|
||||
class Tokenizer {
|
||||
|
||||
protected string[] _singleLineCommentPrefixes = ["//"];
|
||||
protected LineStream _lines;
|
||||
protected dchar[] _lineText;
|
||||
protected ushort _line;
|
||||
protected ushort _pos;
|
||||
protected int _len;
|
||||
protected dchar _prevChar;
|
||||
protected string _filename;
|
||||
protected Token _token;
|
||||
|
||||
enum : int {
|
||||
EOF_CHAR = 0x001A,
|
||||
EOL_CHAR = 0x000A
|
||||
}
|
||||
|
||||
this(string source, string filename = "", string[] singleLineCommentPrefixes = ["//"]) {
|
||||
_singleLineCommentPrefixes = singleLineCommentPrefixes;
|
||||
_filename = filename;
|
||||
_lines = LineStream.create(source, filename);
|
||||
_lineText = _lines.readLine();
|
||||
_len = cast(int)_lineText.length;
|
||||
_line = 0;
|
||||
_pos = 0;
|
||||
_prevChar = 0;
|
||||
}
|
||||
|
||||
~this() {
|
||||
destroy(_lines);
|
||||
_lines = null;
|
||||
}
|
||||
|
||||
protected dchar peekChar() {
|
||||
if (_pos < _len)
|
||||
return _lineText[_pos];
|
||||
else if (_lineText is null)
|
||||
return EOF_CHAR;
|
||||
return EOL_CHAR;
|
||||
}
|
||||
|
||||
protected dchar peekNextChar() {
|
||||
if (_pos < _len - 1)
|
||||
return _lineText[_pos + 1];
|
||||
else if (_lineText is null)
|
||||
return EOF_CHAR;
|
||||
return EOL_CHAR;
|
||||
}
|
||||
|
||||
protected dchar nextChar() {
|
||||
if (_pos < _len)
|
||||
_prevChar = _lineText[_pos++];
|
||||
else if (_lineText is null)
|
||||
_prevChar = EOF_CHAR;
|
||||
else {
|
||||
_lineText = _lines.readLine();
|
||||
_len = cast(int)_lineText.length;
|
||||
_line++;
|
||||
_pos = 0;
|
||||
_prevChar = EOL_CHAR;
|
||||
}
|
||||
return _prevChar;
|
||||
}
|
||||
|
||||
protected dchar skipChar() {
|
||||
nextChar();
|
||||
return peekChar();
|
||||
}
|
||||
|
||||
protected void setTokenStart() {
|
||||
_token.pos = _pos;
|
||||
_token.line = _line;
|
||||
_token.text = null;
|
||||
_token.intvalue = 0;
|
||||
}
|
||||
|
||||
protected ref const(Token) parseEof() {
|
||||
_token.type = TokenType.eof;
|
||||
return _token;
|
||||
}
|
||||
|
||||
protected ref const(Token) parseEol() {
|
||||
_token.type = TokenType.eol;
|
||||
nextChar();
|
||||
return _token;
|
||||
}
|
||||
|
||||
protected ref const(Token) parseWhiteSpace() {
|
||||
_token.type = TokenType.whitespace;
|
||||
for(;;) {
|
||||
dchar ch = skipChar();
|
||||
if (ch != ' ' && ch != '\t')
|
||||
break;
|
||||
}
|
||||
return _token;
|
||||
}
|
||||
|
||||
static bool isAlpha(dchar ch) {
|
||||
return (ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') || ch == '_';
|
||||
}
|
||||
|
||||
static bool isNum(dchar ch) {
|
||||
return (ch >= '0' && ch <= '9');
|
||||
}
|
||||
|
||||
static bool isAlphaNum(dchar ch) {
|
||||
return isNum(ch) || isAlpha(ch);
|
||||
}
|
||||
|
||||
private char[] _stringbuf;
|
||||
protected ref const(Token) parseString() {
|
||||
_token.type = TokenType.str;
|
||||
//skipChar(); // skip "
|
||||
bool lastBackslash = false;
|
||||
_stringbuf.length = 0;
|
||||
for(;;) {
|
||||
dchar ch = skipChar();
|
||||
if (ch == '\"') {
|
||||
if (lastBackslash) {
|
||||
_stringbuf ~= ch;
|
||||
lastBackslash = false;
|
||||
} else {
|
||||
skipChar();
|
||||
break;
|
||||
}
|
||||
} else if (ch == '\\') {
|
||||
if (lastBackslash) {
|
||||
_stringbuf ~= ch;
|
||||
lastBackslash = false;
|
||||
} else {
|
||||
lastBackslash = true;
|
||||
}
|
||||
} else if (ch == EOL_CHAR) {
|
||||
skipChar();
|
||||
break;
|
||||
} else if (lastBackslash) {
|
||||
if (ch == 'n')
|
||||
ch = '\n';
|
||||
else if (ch == 't')
|
||||
ch = '\t';
|
||||
_stringbuf ~= ch;
|
||||
lastBackslash = false;
|
||||
} else {
|
||||
_stringbuf ~= ch;
|
||||
lastBackslash = false;
|
||||
}
|
||||
}
|
||||
_token.text = _stringbuf.dup;
|
||||
return _token;
|
||||
}
|
||||
|
||||
protected ref const(Token) parseIdent() {
|
||||
_token.type = TokenType.ident;
|
||||
_stringbuf.length = 0;
|
||||
_stringbuf ~= peekChar();
|
||||
for(;;) {
|
||||
dchar ch = skipChar();
|
||||
if (!isAlphaNum(ch))
|
||||
break;
|
||||
_stringbuf ~= ch;
|
||||
}
|
||||
_token.text = _stringbuf.dup;
|
||||
return _token;
|
||||
}
|
||||
|
||||
protected ref const(Token) parseFloating(int n) {
|
||||
_token.type = TokenType.floating;
|
||||
dchar ch = peekChar();
|
||||
// floating point
|
||||
int div = 1;
|
||||
int n2 = 0;
|
||||
for (;;) {
|
||||
ch = skipChar();
|
||||
if (!isNum(ch))
|
||||
break;
|
||||
n2 = n2 * 10 + (ch - '0');
|
||||
div *= 10;
|
||||
}
|
||||
_token.floatvalue = cast(double)n + (div > 0 ? cast(double)n2 / div : 0.0);
|
||||
string suffix;
|
||||
if (ch == '%') {
|
||||
suffix ~= ch;
|
||||
ch = skipChar();
|
||||
} else {
|
||||
while (ch >= 'a' && ch <= 'z') {
|
||||
suffix ~= ch;
|
||||
ch = skipChar();
|
||||
}
|
||||
}
|
||||
if (isAlphaNum(ch) || ch == '.')
|
||||
return parseError();
|
||||
_token.text = suffix;
|
||||
return _token;
|
||||
}
|
||||
|
||||
protected ref const(Token) parseHex(int prefixLen) {
|
||||
dchar ch = 0;
|
||||
foreach(i; 0 .. prefixLen)
|
||||
ch = skipChar();
|
||||
|
||||
uint n = parseHexDigit(ch);
|
||||
if (n == uint.max)
|
||||
return parseError();
|
||||
|
||||
for(;;) {
|
||||
ch = skipChar();
|
||||
uint digit = parseHexDigit(ch);
|
||||
if (digit == uint.max)
|
||||
break;
|
||||
n = (n << 4) + digit;
|
||||
}
|
||||
string suffix;
|
||||
if (ch == '%') {
|
||||
suffix ~= ch;
|
||||
ch = skipChar();
|
||||
} else {
|
||||
while (ch >= 'a' && ch <= 'z') {
|
||||
suffix ~= ch;
|
||||
ch = skipChar();
|
||||
}
|
||||
}
|
||||
if (isAlphaNum(ch) || ch == '.')
|
||||
return parseError();
|
||||
_token.type = TokenType.integer;
|
||||
_token.intvalue = n;
|
||||
_token.text = suffix;
|
||||
return _token;
|
||||
}
|
||||
|
||||
protected ref const(Token) parseNumber() {
|
||||
dchar ch = peekChar();
|
||||
uint n = ch - '0';
|
||||
for(;;) {
|
||||
ch = skipChar();
|
||||
if (!isNum(ch))
|
||||
break;
|
||||
n = n * 10 + (ch - '0');
|
||||
}
|
||||
if (ch == '.')
|
||||
return parseFloating(n);
|
||||
string suffix;
|
||||
if (ch == '%') {
|
||||
suffix ~= ch;
|
||||
ch = skipChar();
|
||||
} else {
|
||||
while (ch >= 'a' && ch <= 'z') {
|
||||
suffix ~= ch;
|
||||
ch = skipChar();
|
||||
}
|
||||
}
|
||||
if (isAlphaNum(ch) || ch == '.')
|
||||
return parseError();
|
||||
_token.type = TokenType.integer;
|
||||
_token.intvalue = n;
|
||||
_token.text = suffix;
|
||||
return _token;
|
||||
}
|
||||
|
||||
protected ref const(Token) parseSingleLineComment() {
|
||||
for(;;) {
|
||||
dchar ch = skipChar();
|
||||
if (ch == EOL_CHAR || ch == EOF_CHAR)
|
||||
break;
|
||||
}
|
||||
_token.type = TokenType.comment;
|
||||
_token.multiline = false;
|
||||
return _token;
|
||||
}
|
||||
|
||||
protected ref const(Token) parseMultiLineComment() {
|
||||
skipChar();
|
||||
for(;;) {
|
||||
dchar ch = skipChar();
|
||||
if (ch == '*' && peekNextChar() == '/') {
|
||||
skipChar();
|
||||
skipChar();
|
||||
break;
|
||||
}
|
||||
if (ch == EOF_CHAR)
|
||||
break;
|
||||
}
|
||||
_token.type = TokenType.comment;
|
||||
_token.multiline = true;
|
||||
return _token;
|
||||
}
|
||||
|
||||
protected ref const(Token) parseError() {
|
||||
_token.type = TokenType.error;
|
||||
for(;;) {
|
||||
dchar ch = skipChar();
|
||||
if (ch == ' ' || ch == '\t' || ch == EOL_CHAR || ch == EOF_CHAR)
|
||||
break;
|
||||
}
|
||||
return _token;
|
||||
}
|
||||
|
||||
protected ref const(Token) parseOp(TokenType op) {
|
||||
_token.type = op;
|
||||
skipChar();
|
||||
return _token;
|
||||
}
|
||||
|
||||
/// get next token
|
||||
ref const(Token) nextToken() {
|
||||
setTokenStart();
|
||||
dchar ch = peekChar();
|
||||
if (ch == EOF_CHAR)
|
||||
return parseEof();
|
||||
if (ch == EOL_CHAR)
|
||||
return parseEol();
|
||||
if (ch == ' ' || ch == '\t')
|
||||
return parseWhiteSpace();
|
||||
if (ch == '\"')
|
||||
return parseString();
|
||||
if (isAlpha(ch))
|
||||
return parseIdent();
|
||||
if (ch == '0' && peekNextChar == 'x')
|
||||
return parseHex(2);
|
||||
if (ch == '#')
|
||||
return parseHex(1);
|
||||
if (isNum(ch))
|
||||
return parseNumber();
|
||||
if (ch == '.' && isNum(peekNextChar()))
|
||||
return parseFloating(0);
|
||||
foreach(prefix; _singleLineCommentPrefixes) {
|
||||
if (ch == prefix[0] && (prefix.length == 1 || peekNextChar() == prefix[1]))
|
||||
return parseSingleLineComment();
|
||||
}
|
||||
if (ch == '/' && peekNextChar() == '*')
|
||||
return parseMultiLineComment();
|
||||
switch (ch) {
|
||||
case '.': return parseOp(TokenType.dot);
|
||||
case ':': return parseOp(TokenType.colon);
|
||||
case ';': return parseOp(TokenType.semicolon);
|
||||
case ',': return parseOp(TokenType.comma);
|
||||
case '-': return parseOp(TokenType.minus);
|
||||
case '+': return parseOp(TokenType.plus);
|
||||
case '{': return parseOp(TokenType.curlyOpen);
|
||||
case '}': return parseOp(TokenType.curlyClose);
|
||||
case '(': return parseOp(TokenType.open);
|
||||
case ')': return parseOp(TokenType.close);
|
||||
case '[': return parseOp(TokenType.squareOpen);
|
||||
case ']': return parseOp(TokenType.squareClose);
|
||||
case '/': return parseOp(TokenType.divide);
|
||||
default:
|
||||
return parseError();
|
||||
}
|
||||
}
|
||||
|
||||
string getContextSource() {
|
||||
string s = toUTF8(_lineText);
|
||||
if (_pos == 0)
|
||||
return " near `^^^" ~ s[0..min($,30)] ~ "`";
|
||||
if (_pos >= _len)
|
||||
return " near `" ~ s[max(_len - 30, 0) .. $] ~ "^^^`";
|
||||
return " near `" ~ s[max(_pos - 15, 0) .. _pos] ~ "^^^" ~ s[_pos .. min(_pos + 15, $)] ~ "`";
|
||||
}
|
||||
|
||||
@property string filename() {
|
||||
return filename;
|
||||
}
|
||||
@property int line() {
|
||||
return _token.line;
|
||||
}
|
||||
@property int pos() {
|
||||
return _token.pos;
|
||||
}
|
||||
|
||||
void emitError(string msg) {
|
||||
throw new ParserException(msg ~ getContextSource(), _filename, _token.line, _token.pos);
|
||||
}
|
||||
|
||||
void emitError(string msg, ref const Token token) {
|
||||
throw new ParserException(msg, _filename, token.line, token.pos);
|
||||
}
|
||||
}
|
||||
|
||||
/// tokenize source into array of tokens (excluding EOF)
|
||||
public Token[] tokenize(string code, string[] _singleLineCommentPrefixes = ["//"]) {
|
||||
Token[] res;
|
||||
auto tokenizer = new Tokenizer(code, "");
|
||||
for (;;) {
|
||||
auto token = tokenizer.nextToken();
|
||||
if (token.type == TokenType.eof)
|
||||
break;
|
||||
res ~= token;
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
/// exclude whitespace tokens at beginning and end of token sequence
|
||||
Token[] trimSpaceTokens(Token[] tokens, bool trimBeginning = true, bool trimEnd = true) {
|
||||
if (trimBeginning)
|
||||
while(tokens.length > 0 && tokens[0].type == TokenType.whitespace)
|
||||
tokens = tokens[1 .. $];
|
||||
if (trimEnd)
|
||||
while(tokens.length > 0 && tokens[$ - 1].type == TokenType.whitespace)
|
||||
tokens = tokens[0 .. $ - 1];
|
||||
return tokens;
|
||||
}
|
|
@ -0,0 +1,194 @@
|
|||
module dlangui.graphics.scene.objimport;
|
||||
|
||||
import dlangui.core.logger;
|
||||
import dlangui.dml.tokenizer;
|
||||
|
||||
struct ObjModelImport {
|
||||
alias FaceIndex = int[3];
|
||||
|
||||
private float[] _vertexData;
|
||||
private float[] _normalData;
|
||||
private float[] _txData;
|
||||
private int _vertexCount;
|
||||
private int _normalCount;
|
||||
private int _triangleCount;
|
||||
private int _txCount;
|
||||
private float[8] _buf;
|
||||
protected float[] parseFloatList(Token[] tokens, int maxItems = 3, float padding = 0) {
|
||||
int i = 0;
|
||||
foreach(t; tokens) {
|
||||
if (i >= maxItems)
|
||||
break;
|
||||
if (t.type == TokenType.floating)
|
||||
_buf[i++] = cast(float)t.floatvalue;
|
||||
else if (t.type == TokenType.integer)
|
||||
_buf[i++] = cast(float)t.intvalue;
|
||||
}
|
||||
while(i < maxItems)
|
||||
_buf[i++] = padding;
|
||||
if (i > 0)
|
||||
return _buf[0 .. i];
|
||||
return null;
|
||||
}
|
||||
//# List of geometric vertices, with (x,y,z[,w]) coordinates, w is optional and defaults to 1.0.
|
||||
//v 0.123 0.234 0.345 1.0
|
||||
protected bool parseVertexLine(Token[] tokens) {
|
||||
float[] data = parseFloatList(tokens, 3, 0);
|
||||
if (data.length == 3) {
|
||||
_vertexData ~= data;
|
||||
_vertexCount++;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
//# List of texture coordinates, in (u, v [,w]) coordinates, these will vary between 0 and 1, w is optional and defaults to 0.
|
||||
//vt 0.500 1 [0]
|
||||
protected bool parseVertexTextureLine(Token[] tokens) {
|
||||
float[] data = parseFloatList(tokens, 2, 0);
|
||||
if (data.length == 2) {
|
||||
_txData ~= data;
|
||||
_txCount++;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
//# List of vertex normals in (x,y,z) form; normals might not be unit vectors.
|
||||
//vn 0.707 0.000 0.707
|
||||
protected bool parseVertexNormalsLine(Token[] tokens) {
|
||||
float[] data = parseFloatList(tokens, 3, 0);
|
||||
if (data.length == 3) {
|
||||
_normalData ~= data;
|
||||
_normalCount++;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
static protected bool skipToken(ref Token[] tokens) {
|
||||
tokens = tokens.length > 1 ? tokens[1 .. $] : null;
|
||||
return tokens.length > 0;
|
||||
}
|
||||
static protected bool parseIndex(ref Token[] tokens, ref int data) {
|
||||
int sign = 1;
|
||||
if (tokens[0].type == TokenType.minus) {
|
||||
sign = -1;
|
||||
skipToken(tokens);
|
||||
}
|
||||
if (tokens[0].type == TokenType.integer) {
|
||||
data = tokens[0].intvalue * sign;
|
||||
skipToken(tokens);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
static protected bool skip(ref Token[] tokens, TokenType type) {
|
||||
if (tokens.length > 0 && tokens[0].type == type) {
|
||||
skipToken(tokens);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
static protected bool parseFaceIndex(ref Token[] tokens, ref FaceIndex data) {
|
||||
int i = 0;
|
||||
if (tokens.length == 0)
|
||||
return false;
|
||||
if (!parseIndex(tokens, data[0]))
|
||||
return false;
|
||||
if (skip(tokens, TokenType.divide)) {
|
||||
parseIndex(tokens, data[1]);
|
||||
if (skip(tokens, TokenType.divide)) {
|
||||
if (!parseIndex(tokens, data[2]))
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return tokens.length == 0 || skip(tokens, TokenType.whitespace);
|
||||
}
|
||||
//# Parameter space vertices in ( u [,v] [,w] ) form; free form geometry statement ( see below )
|
||||
//vp 0.310000 3.210000 2.100000
|
||||
protected bool parseParameterSpaceLine(Token[] tokens) {
|
||||
// not supported
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
//f 1 2 3
|
||||
//f 3/1 4/2 5/3
|
||||
//f 6/4/1 3/5/3 7/6/5
|
||||
protected bool parseFaceLine(Token[] tokens) {
|
||||
FaceIndex[10] indexes;
|
||||
int i = 0;
|
||||
while(parseFaceIndex(tokens, indexes[i])) {
|
||||
if (++i >= 10)
|
||||
break;
|
||||
}
|
||||
for (int j = 1; j + 1 < i; j++)
|
||||
addTriangle(indexes[0], indexes[j], indexes[j + 1]);
|
||||
return true;
|
||||
}
|
||||
|
||||
protected bool addTriangle(FaceIndex v1, FaceIndex v2, FaceIndex v3) {
|
||||
_triangleCount++;
|
||||
return true;
|
||||
}
|
||||
|
||||
protected bool parseLine(Token[] tokens) {
|
||||
tokens = trimSpaceTokens(tokens);
|
||||
if (tokens.length) {
|
||||
if (tokens[0].type == TokenType.comment)
|
||||
return true; // ignore comment
|
||||
if (tokens[0].type == TokenType.ident) {
|
||||
string ident = tokens[0].text;
|
||||
tokens = trimSpaceTokens(tokens[1 .. $], true, false);
|
||||
if (ident == "v") // vertex
|
||||
return parseVertexLine(tokens);
|
||||
if (ident == "vt") // texture coords
|
||||
return parseVertexTextureLine(tokens);
|
||||
if (ident == "vn") // normals
|
||||
return parseVertexNormalsLine(tokens);
|
||||
if (ident == "vp") // parameter space
|
||||
return parseParameterSpaceLine(tokens);
|
||||
if (ident == "f") // face
|
||||
return parseFaceLine(tokens);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
bool parse(string source) {
|
||||
import dlangui.dml.tokenizer;
|
||||
try {
|
||||
Token[] tokens = tokenize(source, ["#"]);
|
||||
int start = 0;
|
||||
int i = 0;
|
||||
for ( ; i <= tokens.length; i++) {
|
||||
if (i == tokens.length || tokens[i].type == TokenType.eol) {
|
||||
if (i > start && !parseLine(tokens[start .. i]))
|
||||
return false;
|
||||
start = i + 1;
|
||||
}
|
||||
}
|
||||
} catch (ParserException e) {
|
||||
Log.d("failed to tokenize OBJ source", e);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// load text resource
|
||||
string loadTextResource(string resourceId) {
|
||||
import dlangui.graphics.resources;
|
||||
import std.string : endsWith;
|
||||
string filename;
|
||||
filename = drawableCache.findResource(resourceId);
|
||||
if (!filename) {
|
||||
Log.e("Object resource file not found for resourceId ", resourceId);
|
||||
assert(false);
|
||||
}
|
||||
string s = cast(string)loadResourceBytes(filename);
|
||||
if (!s) {
|
||||
Log.e("Cannot read shader source resource ", resourceId, " from file ", filename);
|
||||
assert(false);
|
||||
}
|
||||
return s;
|
||||
}
|
Loading…
Reference in New Issue