From 31a62882254a1578c9a750c5fb34d375901436e0 Mon Sep 17 00:00:00 2001 From: Vadim Lopatin Date: Wed, 11 Feb 2015 16:14:15 +0300 Subject: [PATCH 01/13] matching brackets working finally - issue #19 is implemented --- src/dlangide/ui/dsourceedit.d | 1 - 1 file changed, 1 deletion(-) diff --git a/src/dlangide/ui/dsourceedit.d b/src/dlangide/ui/dsourceedit.d index 2393dfd..312d559 100644 --- a/src/dlangide/ui/dsourceedit.d +++ b/src/dlangide/ui/dsourceedit.d @@ -253,7 +253,6 @@ class SimpleDSyntaxHighlighter : SyntaxHighlighter { return startPos; // continue } - return p; } From f8ce6dad8f3c2477b3e63ad240c90dcb2b70c966 Mon Sep 17 00:00:00 2001 From: Vadim Lopatin Date: Thu, 12 Feb 2015 11:13:52 +0300 Subject: [PATCH 02/13] fixes in brackets matching --- src/dlangide/ui/dsourceedit.d | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/src/dlangide/ui/dsourceedit.d b/src/dlangide/ui/dsourceedit.d index 312d559..cf67271 100644 --- a/src/dlangide/ui/dsourceedit.d +++ b/src/dlangide/ui/dsourceedit.d @@ -637,14 +637,13 @@ class SimpleDSyntaxHighlighter : SyntaxHighlighter { //Log.d("Null token returned"); break; } - if (token.type == TokenType.EOF) { - //Log.d("EOF token"); - break; - } uint newPos = token.pos - 1; uint newLine = token.line - 1; - //Log.d("", token.line, ":", token.pos, "\t", tokenLine + 1, ":", tokenPos + 1, "\t", token.toString); + //Log.d("", tokenLine + 1, ":", tokenPos + 1, " \t", token.line, ":", token.pos, "\t", token.toString); + if (token.type == TokenType.EOF) { + //Log.d("EOF token"); + } // fill with category for (int i = tokenLine; i <= newLine; i++) { @@ -691,6 +690,9 @@ class SimpleDSyntaxHighlighter : SyntaxHighlighter { case TokenType.COMMENT: category = TokenCategory.Error_InvalidComment; break; + case TokenType.OP: + category = TokenCategory.Error_InvalidOp; + break; case TokenType.FLOAT: case TokenType.INTEGER: category = TokenCategory.Error_InvalidNumber; @@ -707,6 +709,10 @@ class SimpleDSyntaxHighlighter : SyntaxHighlighter { tokenPos = newPos; tokenLine= newLine; + if (token.type == TokenType.EOF) { + //Log.d("EOF token"); + break; + } } } catch (Exception e) { Log.e("exception while trying to parse D source", e); @@ -718,4 +724,3 @@ class SimpleDSyntaxHighlighter : SyntaxHighlighter { Log.d("updateHighlight took ", elapsed, "ms"); } } - From 8bcf603b27626a9883400e50b189a2f57b638079 Mon Sep 17 00:00:00 2001 From: Vadim Lopatin Date: Thu, 12 Feb 2015 13:19:11 +0300 Subject: [PATCH 03/13] fix tokenizer; initial implementation of win32 debugger --- dlangide.visualdproj | 5 + src/ddc/lexer/tokenizer.d | 54 ++++++---- src/ddebug/windows/windebug.d | 190 ++++++++++++++++++++++++++++++++++ 3 files changed, 226 insertions(+), 23 deletions(-) create mode 100644 src/ddebug/windows/windebug.d diff --git a/dlangide.visualdproj b/dlangide.visualdproj index b7a89b5..b7abcc4 100644 --- a/dlangide.visualdproj +++ b/dlangide.visualdproj @@ -216,6 +216,11 @@ + + + + + diff --git a/src/ddc/lexer/tokenizer.d b/src/ddc/lexer/tokenizer.d index c0d5268..5e951f2 100644 --- a/src/ddc/lexer/tokenizer.d +++ b/src/ddc/lexer/tokenizer.d @@ -1483,11 +1483,15 @@ class Tokenizer protected dchar nextChar() { if (_pos >= _len) { if (!nextLine()) { + _pos = _prevLineLength + 1; return EOF_CHAR; } return EOL_CHAR; } - return _lineText[_pos++]; + dchar res = _lineText[_pos++]; + if (_pos >= _len) + nextLine(); + return res; } protected dchar peekChar() { @@ -1503,16 +1507,12 @@ class Tokenizer protected Token emitEof() { // TODO: check for current state - return new EofToken(_lineStream.file, _line, _pos); + return new EofToken(_lineStream.file, _startLine, _startPos + 2); } protected Token processWhiteSpace(dchar firstChar) { // reuse the same token instance, to avoid extra heap spamming - if (_pos == 0) { - _sharedWhiteSpaceToken.setPos(_line - 1, _prevLineLength); - } else { - _sharedWhiteSpaceToken.setPos(_line, _pos - 1); - } + _sharedWhiteSpaceToken.setPos(_startLine, _startPos); for (;;) { int i = _pos; for (; i < _len; i++) { @@ -1531,18 +1531,19 @@ class Tokenizer } protected Token processOneLineComment() { - _sharedCommentToken.setPos(_line, _pos - 1); + _sharedCommentToken.setPos(_startLine, _startPos); _sharedCommentToken.isDocumentationComment = _pos + 1 < _lineText.length && _lineText[_pos + 1] == '/'; _sharedCommentToken.isMultilineComment = false; if (_enableCommentText) { _sharedCommentToken.text = _lineText[_pos + 1 .. $]; } _pos = _len; + nextChar(); return _sharedCommentToken; } protected Token processOneLineSharpComment() { - _sharedCommentToken.setPos(_line, _pos - 1); + _sharedCommentToken.setPos(_startLine, _startPos); if (_enableCommentText) { _sharedCommentToken.text = _lineText[_pos .. $]; } @@ -1552,7 +1553,7 @@ class Tokenizer // Comment /* */ protected Token processMultilineComment() { - _sharedCommentToken.setPos(_line, _pos - 1); + _sharedCommentToken.setPos(_startLine, _startPos); _sharedCommentToken.isDocumentationComment = _pos + 1 < _lineText.length && _lineText[_pos + 1] == '*'; _sharedCommentToken.isMultilineComment = true; _commentAppender.reset(); @@ -1587,7 +1588,7 @@ class Tokenizer // Comment /+ +/ protected Token processNestedComment() { - _sharedCommentToken.setPos(_line, _pos - 1); + _sharedCommentToken.setPos(_startLine, _startPos); _sharedCommentToken.isDocumentationComment = _pos + 1 < _lineText.length && _lineText[_pos + 1] == '+'; _sharedCommentToken.isMultilineComment = true; _commentAppender.reset(); @@ -1649,7 +1650,7 @@ class Tokenizer } protected Token processIdent() { - _sharedIdentToken.setPos(_line, _pos - 1); + _sharedIdentToken.setPos(_startLine, _startPos); _identAppender.reset(); int startPos = _pos - 1; int endPos = _len; @@ -1695,7 +1696,7 @@ class Tokenizer } protected Token processBinaryNumber() { - _sharedIntegerToken.setPos(_line, _pos - 1); + _sharedIntegerToken.setPos(_startLine, _startPos); _pos++; if (_pos >= _len) return parserError("Unexpected end of line in binary number", _sharedIntegerToken); @@ -1717,8 +1718,8 @@ class Tokenizer } protected Token processHexNumber() { - _sharedIntegerToken.setPos(_line, _pos - 1); - _sharedRealToken.setPos(_line, _pos - 1); + _sharedIntegerToken.setPos(_startLine, _startPos); + _sharedRealToken.setPos(_startLine, _startPos); _pos++; if (_pos >= _len) return parserError("Unexpected end of line in hex number", _sharedIntegerToken); @@ -1749,7 +1750,7 @@ class Tokenizer } protected Token processOctNumber() { - _sharedIntegerToken.setPos(_line, _pos - 1); + _sharedIntegerToken.setPos(_startLine, _startPos); if (_pos >= _len) return parserError("Unexpected end of line in octal number", _sharedIntegerToken); int digits = 0; @@ -1872,8 +1873,8 @@ class Tokenizer protected Token processDecNumber(dchar c) { _pos--; - _sharedIntegerToken.setPos(_line, _pos); - _sharedRealToken.setPos(_line, _pos); + _sharedIntegerToken.setPos(_startLine, _startPos); + _sharedRealToken.setPos(_startLine, _startPos); if (_pos >= _len) return parserError("Unexpected end of line in number", _sharedIntegerToken); int digits = 0; @@ -2375,7 +2376,7 @@ class Tokenizer } protected Token processCharacterLiteral() { - _sharedCharacterLiteralToken.setPos(_line, _pos - 1); + _sharedCharacterLiteralToken.setPos(_startLine, _startPos); if (_pos + 2 > _len) return parserError("Invalid character literal", _sharedCharacterLiteralToken); dchar ch = _lineText[_pos++]; @@ -2424,7 +2425,7 @@ class Tokenizer protected Token processDoubleQuotedOrWysiwygString(dchar delimiter) { bool wysiwyg = (delimiter == 'r' || delimiter == '`'); //writeln("processDoubleQuotedString()"); - _sharedStringLiteralToken.setPos(_line, _pos - 1); + _sharedStringLiteralToken.setPos(_startLine, _startPos); _stringLiteralAppender.reset(); if (delimiter == 'r') { _pos++; @@ -2501,7 +2502,7 @@ class Tokenizer static immutable dstring VENDOR = "coolreader.org"; protected Token makeSpecialTokenString(dstring str, int pos) { - _sharedStringLiteralToken.setPos(_line, pos); + _sharedStringLiteralToken.setPos(_startLine, _startPos); _sharedStringLiteralToken.setText(cast(dchar[])str, 0); return _sharedStringLiteralToken; } @@ -2525,8 +2526,13 @@ class Tokenizer return null; } + protected int _startLine; + protected int _startPos; + // returns next token (clone it if you want to store for future usage, otherwise it may be overwritten by further nextToken() calls). Token nextToken() { + _startLine = _line; + _startPos = _pos; dchar ch = nextChar(); if (ch == EOF_CHAR) { return emitEof(); @@ -2587,7 +2593,7 @@ class Tokenizer case Keyword.VERSION_: // Compiler version as an integer, such as 2001 return processSpecialToken(keyword, oldPos); default: - _sharedKeywordToken.setPos(_line, oldPos); + _sharedKeywordToken.setPos(_startLine, _startPos); _sharedKeywordToken.keyword = keyword; return _sharedKeywordToken; } @@ -2596,10 +2602,12 @@ class Tokenizer } OpCode op = detectOp(ch); if (op != OpCode.NONE) { - _sharedOpToken.setPos(_line, oldPos); + _sharedOpToken.setPos(_startLine, _startPos); _sharedOpToken.opCode = op; return _sharedOpToken; } + + // TODO: for tolerant parsing, return error token return null; } diff --git a/src/ddebug/windows/windebug.d b/src/ddebug/windows/windebug.d new file mode 100644 index 0000000..d7622fe --- /dev/null +++ b/src/ddebug/windows/windebug.d @@ -0,0 +1,190 @@ +// just an attempt to implement D debugger for win32 +module ddebug.windows.windebug; + +import win32.windows; + +import std.utf; + +version(Windows): + + +class WinDebugger { + this() { + } + + STARTUPINFOW _si; + PROCESS_INFORMATION _pi; + + bool startDebugging(string exefile, string args) { + _stopRequested = false; + _si = STARTUPINFOW.init; + _si.cb = _si.sizeof; + _pi = PROCESS_INFORMATION.init; + + string cmdline = "\"" ~ exefile ~ "\""; + if (args.length > 0) + cmdline = cmdline ~ " " ~ args; + wchar[] exefilew = cast(wchar[])toUTF16(exefile); + exefilew ~= cast(dchar)0; + wchar[] cmdlinew = cast(wchar[])toUTF16(cmdline); + cmdlinew ~= cast(dchar)0; + if (!CreateProcessW(cast(const wchar*)exefilew.ptr, + cmdlinew.ptr, + cast(SECURITY_ATTRIBUTES*)NULL, cast(SECURITY_ATTRIBUTES*)NULL, + FALSE, + DEBUG_ONLY_THIS_PROCESS, + NULL, + cast(const wchar*)NULL, &_si, &_pi)) { + return false; + } + return true; + } + + uint onCreateThreadDebugEvent(ref DEBUG_EVENT debug_event) { + return DBG_CONTINUE; + } + uint onCreateProcessDebugEvent(ref DEBUG_EVENT debug_event) { + return DBG_CONTINUE; + } + uint onExitThreadDebugEvent(ref DEBUG_EVENT debug_event) { + return DBG_CONTINUE; + } + uint onExitProcessDebugEvent(ref DEBUG_EVENT debug_event) { + return DBG_CONTINUE; + } + uint onLoadDllDebugEvent(ref DEBUG_EVENT debug_event) { + return DBG_CONTINUE; + } + uint onUnloadDllDebugEvent(ref DEBUG_EVENT debug_event) { + return DBG_CONTINUE; + } + uint onOutputDebugStringEvent(ref DEBUG_EVENT debug_event) { + return DBG_CONTINUE; + } + uint onRipEvent(ref DEBUG_EVENT debug_event) { + return DBG_TERMINATE_PROCESS; + } + + void processDebugEvent(ref DEBUG_EVENT debug_event) { + switch (debug_event.dwDebugEventCode) + { + case EXCEPTION_DEBUG_EVENT: + // Process the exception code. When handling + // exceptions, remember to set the continuation + // status parameter (dwContinueStatus). This value + // is used by the ContinueDebugEvent function. + + switch(debug_event.Exception.ExceptionRecord.ExceptionCode) + { + case EXCEPTION_ACCESS_VIOLATION: + // First chance: Pass this on to the system. + // Last chance: Display an appropriate error. + break; + + case EXCEPTION_BREAKPOINT: + // First chance: Display the current + // instruction and register values. + break; + + case EXCEPTION_DATATYPE_MISALIGNMENT: + // First chance: Pass this on to the system. + // Last chance: Display an appropriate error. + break; + + case EXCEPTION_SINGLE_STEP: + // First chance: Update the display of the + // current instruction and register values. + break; + + case DBG_CONTROL_C: + // First chance: Pass this on to the system. + // Last chance: Display an appropriate error. + break; + + default: + // Handle other exceptions. + break; + } + + break; + + case CREATE_THREAD_DEBUG_EVENT: + // As needed, examine or change the thread's registers + // with the GetThreadContext and SetThreadContext functions; + // and suspend and resume thread execution with the + // SuspendThread and ResumeThread functions. + + _continueStatus = onCreateThreadDebugEvent(debug_event); + break; + + case CREATE_PROCESS_DEBUG_EVENT: + // As needed, examine or change the registers of the + // process's initial thread with the GetThreadContext and + // SetThreadContext functions; read from and write to the + // process's virtual memory with the ReadProcessMemory and + // WriteProcessMemory functions; and suspend and resume + // thread execution with the SuspendThread and ResumeThread + // functions. Be sure to close the handle to the process image + // file with CloseHandle. + + _continueStatus = onCreateProcessDebugEvent(debug_event); + break; + + case EXIT_THREAD_DEBUG_EVENT: + // Display the thread's exit code. + + _continueStatus = onExitThreadDebugEvent(debug_event); + break; + + case EXIT_PROCESS_DEBUG_EVENT: + // Display the process's exit code. + + _continueStatus = onExitProcessDebugEvent(debug_event); + break; + + case LOAD_DLL_DEBUG_EVENT: + // Read the debugging information included in the newly + // loaded DLL. Be sure to close the handle to the loaded DLL + // with CloseHandle. + + _continueStatus = onLoadDllDebugEvent(debug_event); + break; + + case UNLOAD_DLL_DEBUG_EVENT: + // Display a message that the DLL has been unloaded. + + _continueStatus = onUnloadDllDebugEvent(debug_event); + break; + + case OUTPUT_DEBUG_STRING_EVENT: + // Display the output debugging string. + + _continueStatus = onOutputDebugStringEvent(debug_event); + break; + + case RIP_EVENT: + _continueStatus = onRipEvent(debug_event); + break; + default: + // UNKNOWN EVENT + break; + } + } + + uint _continueStatus; + bool _stopRequested; + + bool enterDebugLoop() { + _continueStatus = DBG_CONTINUE; + DEBUG_EVENT debug_event; + for(;;) + { + if (!WaitForDebugEvent(&debug_event, INFINITE)) + return false; + processDebugEvent(debug_event); + ContinueDebugEvent(debug_event.dwProcessId, + debug_event.dwThreadId, + _continueStatus); + } + } +} From e185c2a935240304ffe9e20958773f07d61ed311 Mon Sep 17 00:00:00 2001 From: Vadim Lopatin Date: Thu, 12 Feb 2015 15:24:47 +0300 Subject: [PATCH 04/13] tooltips for toolbar buttons --- src/dlangide/ui/commands.d | 2 ++ src/dlangide/ui/frame.d | 2 +- views/res/i18n/en.ini | 1 + 3 files changed, 4 insertions(+), 1 deletion(-) diff --git a/src/dlangide/ui/commands.d b/src/dlangide/ui/commands.d index 9a92f09..f3c1d40 100644 --- a/src/dlangide/ui/commands.d +++ b/src/dlangide/ui/commands.d @@ -17,6 +17,7 @@ enum IDEActions : int { FileClose, FileExit, EditPreferences, + BuildConfigurations, BuildWorkspace, RebuildWorkspace, CleanWorkspace, @@ -59,6 +60,7 @@ const Action ACTION_FILE_EXIT = new Action(IDEActions.FileExit, "MENU_FILE_EXIT" const Action ACTION_WORKSPACE_BUILD = new Action(IDEActions.BuildWorkspace, "MENU_BUILD_WORKSPACE_BUILD"c); const Action ACTION_WORKSPACE_REBUILD = new Action(IDEActions.RebuildWorkspace, "MENU_BUILD_WORKSPACE_REBUILD"c); const Action ACTION_WORKSPACE_CLEAN = new Action(IDEActions.CleanWorkspace, "MENU_BUILD_WORKSPACE_CLEAN"c); +const Action ACTION_BUILD_CONFIGURATIONS = new Action(IDEActions.BuildConfigurations, "MENU_BUILD_CONFIGURATIONS"c); const Action ACTION_PROJECT_BUILD = new Action(IDEActions.BuildProject, "MENU_BUILD_PROJECT_BUILD"c, "run-build", KeyCode.F7, 0); const Action ACTION_PROJECT_REBUILD = new Action(IDEActions.RebuildProject, "MENU_BUILD_PROJECT_REBUILD"c, "run-build-clean", KeyCode.F7, KeyFlag.Control); const Action ACTION_PROJECT_CLEAN = new Action(IDEActions.CleanProject, "MENU_BUILD_PROJECT_CLEAN"c, null); diff --git a/src/dlangide/ui/frame.d b/src/dlangide/ui/frame.d index 93be601..e94e9ee 100644 --- a/src/dlangide/ui/frame.d +++ b/src/dlangide/ui/frame.d @@ -374,7 +374,7 @@ class IDEFrame : AppFrame { } return true; }; - cbBuildConfiguration.action = ACTION_PROJECT_BUILD; + cbBuildConfiguration.action = ACTION_BUILD_CONFIGURATIONS; tb.addControl(cbBuildConfiguration); tb.addButtons(ACTION_PROJECT_BUILD); diff --git a/views/res/i18n/en.ini b/views/res/i18n/en.ini index 91529fd..a069f2f 100644 --- a/views/res/i18n/en.ini +++ b/views/res/i18n/en.ini @@ -23,6 +23,7 @@ MENU_EDIT_TOGGLE_LINE_COMMENT=Toggle line comment MENU_EDIT_TOGGLE_BLOCK_COMMENT=Toggle block comment MENU_EDIT_ADVANCED=Advanced... MENU_EDIT_PREFERENCES=&Preferences +MENU_BUILD_CONFIGURATIONS=Build configurations MENU_BUILD=&BUILD MENU_BUILD_WORKSPACE_BUILD=Build Workspace MENU_BUILD_WORKSPACE_REBUILD=Rebuild Workspace From 3b12b936ff1dda4f86b2b6a192ac913ef7763110 Mon Sep 17 00:00:00 2001 From: Vadim Lopatin Date: Thu, 12 Feb 2015 16:39:10 +0300 Subject: [PATCH 05/13] fix shortcuts; fix tokenizer bug --- src/ddc/lexer/tokenizer.d | 8 +++----- src/dlangide/ui/commands.d | 4 ++-- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/src/ddc/lexer/tokenizer.d b/src/ddc/lexer/tokenizer.d index 5e951f2..7c0fd4d 100644 --- a/src/ddc/lexer/tokenizer.d +++ b/src/ddc/lexer/tokenizer.d @@ -1652,9 +1652,9 @@ class Tokenizer protected Token processIdent() { _sharedIdentToken.setPos(_startLine, _startPos); _identAppender.reset(); - int startPos = _pos - 1; + int startPos = _startPos; int endPos = _len; - for (int i = _pos; i < _len; i++) { + for (int i = startPos + 1; i < _len; i++) { dchar ch = _lineText[i]; if (!isIdentMiddleChar(ch)) { endPos = i; @@ -2606,9 +2606,7 @@ class Tokenizer _sharedOpToken.opCode = op; return _sharedOpToken; } - - // TODO: for tolerant parsing, return error token - return null; + return parserError("Invalid token", _line, _pos); } diff --git a/src/dlangide/ui/commands.d b/src/dlangide/ui/commands.d index f3c1d40..94de07a 100644 --- a/src/dlangide/ui/commands.d +++ b/src/dlangide/ui/commands.d @@ -77,12 +77,12 @@ const Action ACTION_EDIT_COPY = (new Action(EditorActions.Copy, "MENU_EDIT_COPY" const Action ACTION_EDIT_PASTE = (new Action(EditorActions.Paste, "MENU_EDIT_PASTE"c, "edit-paste"c, KeyCode.KEY_V, KeyFlag.Control)).disableByDefault(); const Action ACTION_EDIT_CUT = (new Action(EditorActions.Cut, "MENU_EDIT_CUT"c, "edit-cut"c, KeyCode.KEY_X, KeyFlag.Control)).disableByDefault(); const Action ACTION_EDIT_UNDO = (new Action(EditorActions.Undo, "MENU_EDIT_UNDO"c, "edit-undo"c, KeyCode.KEY_Z, KeyFlag.Control)).disableByDefault(); -const Action ACTION_EDIT_REDO = (new Action(EditorActions.Redo, "MENU_EDIT_REDO"c, "edit-redo"c, KeyCode.KEY_Z, KeyFlag.Control|KeyFlag.Shift)).disableByDefault(); +const Action ACTION_EDIT_REDO = (new Action(EditorActions.Redo, "MENU_EDIT_REDO"c, "edit-redo"c, KeyCode.KEY_Y, KeyFlag.Control)).addAccelerator(KeyCode.KEY_Z, KeyFlag.Control|KeyFlag.Shift).disableByDefault(); const Action ACTION_EDIT_INDENT = (new Action(EditorActions.Indent, "MENU_EDIT_INDENT"c, "edit-indent"c, KeyCode.TAB, 0)).addAccelerator(KeyCode.KEY_BRACKETCLOSE, KeyFlag.Control).disableByDefault(); const Action ACTION_EDIT_UNINDENT = (new Action(EditorActions.Unindent, "MENU_EDIT_UNINDENT"c, "edit-unindent", KeyCode.TAB, KeyFlag.Shift)).addAccelerator(KeyCode.KEY_BRACKETOPEN, KeyFlag.Control).disableByDefault(); const Action ACTION_EDIT_TOGGLE_LINE_COMMENT = (new Action(EditorActions.ToggleLineComment, "MENU_EDIT_TOGGLE_LINE_COMMENT"c, null, KeyCode.KEY_DIVIDE, KeyFlag.Control)).disableByDefault(); const Action ACTION_EDIT_TOGGLE_BLOCK_COMMENT = (new Action(EditorActions.ToggleBlockComment, "MENU_EDIT_TOGGLE_BLOCK_COMMENT"c, null, KeyCode.KEY_DIVIDE, KeyFlag.Control|KeyFlag.Shift)).disableByDefault(); -const Action ACTION_EDIT_PREFERENCES = (new Action(EditorActions.Redo, "MENU_EDIT_PREFERENCES"c, null)).disableByDefault(); +const Action ACTION_EDIT_PREFERENCES = (new Action(IDEActions.EditPreferences, "MENU_EDIT_PREFERENCES"c, null)).disableByDefault(); const Action ACTION_HELP_ABOUT = new Action(IDEActions.HelpAbout, "MENU_HELP_ABOUT"c); const Action ACTION_WINDOW_CLOSE_ALL_DOCUMENTS = new Action(IDEActions.WindowCloseAllDocuments, "MENU_WINDOW_CLOSE_ALL_DOCUMENTS"c); const Action ACTION_CREATE_NEW_WORKSPACE = new Action(IDEActions.CreateNewWorkspace, "Create new workspace"d); From df0450ee8783dc58d132a5e5b5b5a423cfb24d76 Mon Sep 17 00:00:00 2001 From: Vadim Lopatin Date: Thu, 12 Feb 2015 17:07:43 +0300 Subject: [PATCH 06/13] improve tokenizer --- src/ddc/lexer/tokenizer.d | 110 +++++++++++++++++++++++++++++++++++++- 1 file changed, 109 insertions(+), 1 deletion(-) diff --git a/src/ddc/lexer/tokenizer.d b/src/ddc/lexer/tokenizer.d index 7c0fd4d..34c07dc 100644 --- a/src/ddc/lexer/tokenizer.d +++ b/src/ddc/lexer/tokenizer.d @@ -1378,6 +1378,114 @@ struct StringAppender { void reset() { len = 0; } + static int parseHexDigit(dchar ch) { + if (ch >= '0' && ch <='9') + return ch - '0'; + if (ch >= 'a' && ch <='f') + return ch - 'a' + 10; + if (ch >= 'A' && ch <='F') + return ch - 'A' + 10; + return -1; + } + bool errorFlag = false; + dchar decodeHex(ref int pos, int count) { + dchar res = 0; + for (int i = 0; i < count; i++) { + if (pos >= len - 1) { + errorFlag = true; + return res; + } + dchar ch = buf[++pos]; + int digit = parseHexDigit(ch); + if (digit < 0) { + errorFlag = true; + digit = 0; + } + res = (res << 4) | digit; + } + return res; + } + dchar decodeOct(dchar firstChar, ref int pos) { + dchar res = 0; + res = firstChar - '0'; + if (pos < len - 1 && buf[pos + 1] >= '0' && buf[pos + 1] <= '7') { + res = (res << 3) | (buf[++pos] - '0'); + } + if (pos < len - 1 && buf[pos + 1] >= '0' && buf[pos + 1] <= '7') { + res = (res << 3) | (buf[++pos] - '0'); + } + return res; + } + bool processEscapeSequences() { + errorFlag = false; + int dst = 0; + for (int src = 0; src < len; src++) { + dchar ch = buf[src]; + if (ch == '\\') { + if (src == len - 1) + break; // INVALID + ch = buf[++src]; + switch (ch) { + case '\'': + case '\"': + case '?': + case '\\': + buf[dst++] = ch; + break; + case '0': + buf[dst++] = '\0'; + break; + case 'a': + buf[dst++] = '\a'; + break; + case 'b': + buf[dst++] = '\b'; + break; + case 'f': + buf[dst++] = '\f'; + break; + case 'n': + buf[dst++] = '\n'; + break; + case 'r': + buf[dst++] = '\r'; + break; + case 't': + buf[dst++] = '\t'; + break; + case 'v': + buf[dst++] = '\v'; + break; + case 'x': + buf[dst++] = decodeHex(src, 2); + break; + case 'u': + buf[dst++] = decodeHex(src, 4); + break; + case 'U': + buf[dst++] = decodeHex(src, 8); + break; + default: + if (ch >= '0' && ch <= '7') { + // octal X XX or XXX + buf[dst++] = decodeOct(ch, src); // something wrong + } else if (ch == '&') { + // named character entity + buf[dst++] = ch; + // just show it as is + } else { + buf[dst++] = ch; // something wrong + errorFlag = true; + } + break; + } + } else { + buf[dst++] = ch; + } + } + len = dst; + return errorFlag; + } } class Tokenizer @@ -2473,7 +2581,7 @@ class Tokenizer _sharedStringLiteralToken.setText(_stringLiteralAppender.get(), type); return _sharedStringLiteralToken; } - // TODO: process escape sequences + _stringLiteralAppender.processEscapeSequences(); _sharedStringLiteralToken.setText(_stringLiteralAppender.get(), type); return _sharedStringLiteralToken; } From 0fdb79fd1e6e2371830a3328c1261cfa4bd313d7 Mon Sep 17 00:00:00 2001 From: Vadim Lopatin Date: Thu, 12 Feb 2015 18:26:50 +0300 Subject: [PATCH 07/13] tokenizer improvements --- src/ddc/lexer/tokenizer.d | 304 +++++++++++++++++++++++++++++++++++++- 1 file changed, 303 insertions(+), 1 deletion(-) diff --git a/src/ddc/lexer/tokenizer.d b/src/ddc/lexer/tokenizer.d index 34c07dc..099b22c 100644 --- a/src/ddc/lexer/tokenizer.d +++ b/src/ddc/lexer/tokenizer.d @@ -1416,6 +1416,31 @@ struct StringAppender { } return res; } + + char[] entityNameBuf; + int entityNameLen; + + dchar decodeCharacterEntity(ref int pos) { + entityNameLen = 0; + pos++; + for(; pos < len && buf[pos] != ';'; pos++) { + dchar ch = buf[pos]; + if (ch >= 0x80) + errorFlag = true; + if (entityNameBuf.length < entityNameLen + 4) + entityNameBuf.length += 32; + entityNameBuf[entityNameLen++] = cast(char)ch; + } + if (pos < len && buf[pos] == ';') { + pos++; + dchar ch = entityToChar(cast(string)entityNameBuf[0 .. entityNameLen]); + if (ch) + return ch; + } + errorFlag = true; + return '?'; + } + bool processEscapeSequences() { errorFlag = false; int dst = 0; @@ -1471,7 +1496,7 @@ struct StringAppender { buf[dst++] = decodeOct(ch, src); // something wrong } else if (ch == '&') { // named character entity - buf[dst++] = ch; + buf[dst++] = decodeCharacterEntity(src); // just show it as is } else { buf[dst++] = ch; // something wrong @@ -2999,3 +3024,280 @@ unittest { } } } + +/// converts named entity to character, returns 0 if not found +dchar entityToChar(string name) { + if (auto ch = name in entityToCharMap) { + return *ch; + } + return 0; +} + +/// fings entity name for character, returns null if not found +string charToEntity(dchar ch) { + if (auto name = ch in charToEntityMap) { + return *name; + } + return null; +} + +private __gshared dchar[string]entityToCharMap; +private __gshared string[dchar]charToEntityMap; +private void addEntity(string name, dchar ch) { + entityToCharMap[name] = ch; + charToEntityMap[ch] = name; +} +__gshared static this() { + addEntity("quot", 34); + addEntity("amp", 38); + addEntity("lt", 60); + addEntity("gt", 62); + addEntity("OElig", 338); + addEntity("oelig", 339); + addEntity("Scaron", 352); + addEntity("scaron", 353); + addEntity("Yuml", 376); + addEntity("circ", 710); + addEntity("tilde", 732); + addEntity("ensp", 8194); + addEntity("emsp", 8195); + addEntity("thinsp", 8201); + addEntity("zwnj", 8204); + addEntity("zwj", 8205); + addEntity("lrm", 8206); + addEntity("rlm", 8207); + addEntity("ndash", 8211); + addEntity("mdash", 8212); + addEntity("lsquo", 8216); + addEntity("rsquo", 8217); + addEntity("sbquo", 8218); + addEntity("ldquo", 8220); + addEntity("rdquo", 8221); + addEntity("bdquo", 8222); + addEntity("dagger", 8224); + addEntity("Dagger", 8225); + addEntity("permil", 8240); + addEntity("lsaquo", 8249); + addEntity("rsaquo", 8250); + addEntity("euro", 8364); + addEntity("nbsp", 160); + addEntity("iexcl", 161); + addEntity("cent", 162); + addEntity("pound", 163); + addEntity("curren", 164); + addEntity("yen", 165); + addEntity("brvbar", 166); + addEntity("sect", 167); + addEntity("uml", 168); + addEntity("copy", 169); + addEntity("ordf", 170); + addEntity("laquo", 171); + addEntity("not", 172); + addEntity("shy", 173); + addEntity("reg", 174); + addEntity("macr", 175); + addEntity("deg", 176); + addEntity("plusmn", 177); + addEntity("sup2", 178); + addEntity("sup3", 179); + addEntity("acute", 180); + addEntity("micro", 181); + addEntity("para", 182); + addEntity("middot", 183); + addEntity("cedil", 184); + addEntity("sup1", 185); + addEntity("ordm", 186); + addEntity("raquo", 187); + addEntity("frac14", 188); + addEntity("frac12", 189); + addEntity("frac34", 190); + addEntity("iquest", 191); + addEntity("Agrave", 192); + addEntity("Aacute", 193); + addEntity("Acirc", 194); + addEntity("Atilde", 195); + addEntity("Auml", 196); + addEntity("Aring", 197); + addEntity("AElig", 198); + addEntity("Ccedil", 199); + addEntity("Egrave", 200); + addEntity("Eacute", 201); + addEntity("Ecirc", 202); + addEntity("Euml", 203); + addEntity("Igrave", 204); + addEntity("Iacute", 205); + addEntity("Icirc", 206); + addEntity("Iuml", 207); + addEntity("ETH", 208); + addEntity("Ntilde", 209); + addEntity("Ograve", 210); + addEntity("Oacute", 211); + addEntity("Ocirc", 212); + addEntity("Otilde", 213); + addEntity("Ouml", 214); + addEntity("times", 215); + addEntity("Oslash", 216); + addEntity("Ugrave", 217); + addEntity("Uacute", 218); + addEntity("Ucirc", 219); + addEntity("Uuml", 220); + addEntity("Yacute", 221); + addEntity("THORN", 222); + addEntity("szlig", 223); + addEntity("agrave", 224); + addEntity("aacute", 225); + addEntity("acirc", 226); + addEntity("atilde", 227); + addEntity("auml", 228); + addEntity("aring", 229); + addEntity("aelig", 230); + addEntity("ccedil", 231); + addEntity("egrave", 232); + addEntity("eacute", 233); + addEntity("ecirc", 234); + addEntity("euml", 235); + addEntity("igrave", 236); + addEntity("iacute", 237); + addEntity("icirc", 238); + addEntity("iuml", 239); + addEntity("eth", 240); + addEntity("ntilde", 241); + addEntity("ograve", 242); + addEntity("oacute", 243); + addEntity("ocirc", 244); + addEntity("otilde", 245); + addEntity("ouml", 246); + addEntity("divide", 247); + addEntity("oslash", 248); + addEntity("ugrave", 249); + addEntity("uacute", 250); + addEntity("ucirc", 251); + addEntity("uuml", 252); + addEntity("yacute", 253); + addEntity("thorn", 254); + addEntity("yuml", 255); + addEntity("fnof", 402); + addEntity("Alpha", 913); + addEntity("Beta", 914); + addEntity("Gamma", 915); + addEntity("Delta", 916); + addEntity("Epsilon", 917); + addEntity("Zeta", 918); + addEntity("Eta", 919); + addEntity("Theta", 920); + addEntity("Iota", 921); + addEntity("Kappa", 922); + addEntity("Lambda", 923); + addEntity("Mu", 924); + addEntity("Nu", 925); + addEntity("Xi", 926); + addEntity("Omicron", 927); + addEntity("Pi", 928); + addEntity("Rho", 929); + addEntity("Sigma", 931); + addEntity("Tau", 932); + addEntity("Upsilon", 933); + addEntity("Phi", 934); + addEntity("Chi", 935); + addEntity("Psi", 936); + addEntity("Omega", 937); + addEntity("alpha", 945); + addEntity("beta", 946); + addEntity("gamma", 947); + addEntity("delta", 948); + addEntity("epsilon", 949); + addEntity("zeta", 950); + addEntity("eta", 951); + addEntity("theta", 952); + addEntity("iota", 953); + addEntity("kappa", 954); + addEntity("lambda", 955); + addEntity("mu", 956); + addEntity("nu", 957); + addEntity("xi", 958); + addEntity("omicron", 959); + addEntity("pi", 960); + addEntity("rho", 961); + addEntity("sigmaf", 962); + addEntity("sigma", 963); + addEntity("tau", 964); + addEntity("upsilon", 965); + addEntity("phi", 966); + addEntity("chi", 967); + addEntity("psi", 968); + addEntity("omega", 969); + addEntity("thetasym", 977); + addEntity("upsih", 978); + addEntity("piv", 982); + addEntity("bull", 8226); + addEntity("hellip", 8230); + addEntity("prime", 8242); + addEntity("Prime", 8243); + addEntity("oline", 8254); + addEntity("frasl", 8260); + addEntity("weierp", 8472); + addEntity("image", 8465); + addEntity("real", 8476); + addEntity("trade", 8482); + addEntity("alefsym", 8501); + addEntity("larr", 8592); + addEntity("uarr", 8593); + addEntity("rarr", 8594); + addEntity("darr", 8595); + addEntity("harr", 8596); + addEntity("crarr", 8629); + addEntity("lArr", 8656); + addEntity("uArr", 8657); + addEntity("rArr", 8658); + addEntity("dArr", 8659); + addEntity("hArr", 8660); + addEntity("forall", 8704); + addEntity("part", 8706); + addEntity("exist", 8707); + addEntity("empty", 8709); + addEntity("nabla", 8711); + addEntity("isin", 8712); + addEntity("notin", 8713); + addEntity("ni", 8715); + addEntity("prod", 8719); + addEntity("sum", 8721); + addEntity("minus", 8722); + addEntity("lowast", 8727); + addEntity("radic", 8730); + addEntity("prop", 8733); + addEntity("infin", 8734); + addEntity("ang", 8736); + addEntity("and", 8743); + addEntity("or", 8744); + addEntity("cap", 8745); + addEntity("cup", 8746); + addEntity("int", 8747); + addEntity("there4", 8756); + addEntity("sim", 8764); + addEntity("cong", 8773); + addEntity("asymp", 8776); + addEntity("ne", 8800); + addEntity("equiv", 8801); + addEntity("le", 8804); + addEntity("ge", 8805); + addEntity("sub", 8834); + addEntity("sup", 8835); + addEntity("nsub", 8836); + addEntity("sube", 8838); + addEntity("supe", 8839); + addEntity("oplus", 8853); + addEntity("otimes", 8855); + addEntity("perp", 8869); + addEntity("sdot", 8901); + addEntity("lceil", 8968); + addEntity("rceil", 8969); + addEntity("lfloor", 8970); + addEntity("rfloor", 8971); + addEntity("loz", 9674); + addEntity("spades", 9824); + addEntity("clubs", 9827); + addEntity("hearts", 9829); + addEntity("diams", 9830); + addEntity("lang", 10216); + addEntity("rang", 10217); +} From fd570d23589b191adbe1ecd5e13ebe6c39e92e28 Mon Sep 17 00:00:00 2001 From: Vadim Lopatin Date: Thu, 12 Feb 2015 20:21:36 +0300 Subject: [PATCH 08/13] improvements in tokenizer --- dlangide.visualdproj | 10 +- src/ddc/lexer/textsource.d | 2 +- src/ddc/lexer/tokenizer.d | 588 +++++++++++++++++++++---------------- src/dlangide.d | 3 + 4 files changed, 343 insertions(+), 260 deletions(-) diff --git a/dlangide.visualdproj b/dlangide.visualdproj index b7abcc4..b4e63e5 100644 --- a/dlangide.visualdproj +++ b/dlangide.visualdproj @@ -198,6 +198,11 @@ + + + + + @@ -216,11 +221,6 @@ - - - - - diff --git a/src/ddc/lexer/textsource.d b/src/ddc/lexer/textsource.d index 3d0239d..3b6b59a 100644 --- a/src/ddc/lexer/textsource.d +++ b/src/ddc/lexer/textsource.d @@ -49,7 +49,7 @@ class ArraySourceLines : SourceLines { protected uint _line; protected uint _firstLine; protected dstring[] _lines; - static protected dchar[] _emptyLine = ""d.dup; + static __gshared protected dchar[] _emptyLine = ""d.dup; this() { } diff --git a/src/ddc/lexer/tokenizer.d b/src/ddc/lexer/tokenizer.d index 099b22c..5df8367 100644 --- a/src/ddc/lexer/tokenizer.d +++ b/src/ddc/lexer/tokenizer.d @@ -1225,7 +1225,7 @@ class StringLiteralToken : Token { return new StringLiteralToken(_file, _line, _pos, _text.dup, _literalType); } public override @property string toString() { - return "String:" ~ to!string(_text); + return toUTF8("String:\"" ~ _text ~ "\"" ~ (_literalType ? _literalType : ' ')); } } @@ -1319,7 +1319,7 @@ class RealLiteralToken : Token { return new RealLiteralToken(_file, _line, _pos, _value, _precision, _imaginary); } public override @property string toString() { - return "Integer:" ~ to!string(_value) ~ (_precision == 0 ? "f" : (_precision == 2 ? "L" : "")) ~ (_imaginary ? "i" : ""); + return "Real:" ~ to!string(_value) ~ (_precision == 0 ? "f" : (_precision == 2 ? "L" : "")) ~ (_imaginary ? "i" : ""); } } @@ -1432,7 +1432,6 @@ struct StringAppender { entityNameBuf[entityNameLen++] = cast(char)ch; } if (pos < len && buf[pos] == ';') { - pos++; dchar ch = entityToChar(cast(string)entityNameBuf[0 .. entityNameLen]); if (ch) return ch; @@ -1918,8 +1917,25 @@ class Tokenizer // protected Token processDecFloatSuffix(real value) { - _sharedRealToken.setValue(value); - // TODO + ubyte precision = 1; + bool imaginary = false; + dchar next = _pos < _len ? _lineText[_pos] : 0; + if (next == 'f') { + _pos++; + precision = 0; + } else if (next == 'L') { + _pos++; + precision = 2; + } + next = _pos < _len ? _lineText[_pos] : 0; + if (next == 'i') { + _pos++; + imaginary = true; + } + next = _pos < _len ? _lineText[_pos] : 0; + if (isIdentMiddleChar(next)) + return parserError("invalid suffix for floating point literal", _sharedRealToken); + _sharedRealToken.setValue(value, precision, imaginary); return _sharedRealToken; } @@ -2040,7 +2056,10 @@ class Tokenizer dchar next = _pos < _len ? _lineText[_pos] : 0; if (next == 0) return _sharedIntegerToken; - if (next == '.') { + if (next == 'e' || next == 'E') { + _pos++; + return processDecFloatExponent(number); + } else if (next == '.') { _pos++; return processDecFloatSecondPart(number); } @@ -2591,9 +2610,15 @@ class Tokenizer dchar t = 0; if (_pos < _len) { dchar ch = _lineText[_pos]; - if (ch == 'c' || ch == 'w' || ch == 'd') + if (ch == 'c' || ch == 'w' || ch == 'd') { t = ch; - else if (isIdentMiddleChar(ch)) + _pos++; + if (_pos < _len) { + ch = _lineText[_pos]; + if (isIdentMiddleChar(ch)) + return parserError("Unexpected character after string literal", _sharedStringLiteralToken); + } + } else if (isIdentMiddleChar(ch)) return parserError("Unexpected character after string literal", _sharedStringLiteralToken); } if (t != 0) { @@ -2601,7 +2626,7 @@ class Tokenizer return parserError("Cannot concatenate strings of different type", _sharedStringLiteralToken); type = t; } - if (!wysiwyg) { + if (wysiwyg) { // no escape processing _sharedStringLiteralToken.setText(_stringLiteralAppender.get(), type); return _sharedStringLiteralToken; @@ -2745,251 +2770,6 @@ class Tokenizer } -unittest { - import std.algorithm; - class TokenTest { - int _line; - string _file; - this(string file, int line) { - _file = file; - _line = line; - } - bool doTest(Token token) { - return true; - } - void execute(Tokenizer tokenizer) { - Token token = tokenizer.nextToken(); - if (!doTest(token)) { - assert(false, " token doesn not match at " ~ _file ~ ":" ~ to!string(_line) ~ " foundToken: " ~ token.toString ~ " expected: " ~ toString); - } - } - public override @property string toString() { - return "TokenTest"; - } - } - void testTokenizer(string code, TokenTest[] tokens, string file = __FILE__, uint line = __LINE__) { - Tokenizer tokenizer = new Tokenizer(code, "tokenizerTest:" ~ file ~ ":" ~ to!string(line)); - for (int i = 0; i < tokens.length; i++) { - tokens[i].execute(tokenizer); - } - } - class KeywordTest : TokenTest { - Keyword _code; - this(Keyword code, string file = __FILE__, uint line = __LINE__) { - super(file, line); - _code = code; - } - override bool doTest(Token token) { - if (token.type != TokenType.KEYWORD) - return false; - if (token.keyword != _code) - return false; - return true; - } - public override @property string toString() { - return "Keyword:" ~ to!string(_code); - } - } - class OpTest : TokenTest { - OpCode _code; - this(OpCode code, string file = __FILE__, uint line = __LINE__) { - super(file, line); - _code = code; - } - override bool doTest(Token token) { - if (token.type != TokenType.OP) - return false; - if (token.opCode != _code) - return false; - return true; - } - public override @property string toString() { - return "Op:" ~ to!string(_code); - } - } - class StringTest : TokenTest { - string _value; - this(string value, string file = __FILE__, uint line = __LINE__) { - super(file, line); - _value = value; - } - override bool doTest(Token token) { - if (token.type != TokenType.STRING) - return false; - if (to!string(token.text).equal(_value)) - return false; - return true; - } - public override @property string toString() { - return "String:" ~ _value; - } - } - class IntegerTest : TokenTest { - ulong _value; - bool _unsigned; - bool _long; - this(ulong value, bool unsignedFlag = false, bool longFlag = false, string file = __FILE__, uint line = __LINE__) { - super(file, line); - _value = value; - _unsigned = unsignedFlag; - _long = longFlag; - } - override bool doTest(Token token) { - if (token.type != TokenType.INTEGER) - return false; - if (token.intValue != _value) - return false; - if (token.isUnsigned != _unsigned) - return false; - if (token.isLong != _long) - return false; - return true; - } - public override @property string toString() { - return "Integer:" ~ to!string(_value); - } - } - class RealTest : TokenTest { - real _value; - ubyte _precision; - bool _imaginary; - this(real value, ubyte precision = 1, bool imaginary = false, string file = __FILE__, uint line = __LINE__) { - super(file, line); - _value = value; - _precision = precision; - _imaginary = imaginary; - } - override bool doTest(Token token) { - if (token.type != TokenType.FLOAT) - return false; - if (token.realValue != _value) - return false; - if (token.precision != _precision) - return false; - if (token.isImaginary != _imaginary) - return false; - return true; - } - public override @property string toString() { - return "Real:" ~ to!string(_value); - } - } - class IdentTest : TokenTest { - string _value; - this(string value, string file = __FILE__, uint line = __LINE__) { - super(file, line); - _value = value; - } - override bool doTest(Token token) { - if (token.type != TokenType.IDENTIFIER) - return false; - if (! to!string(token.text).equal(_value)) - return false; - return true; - } - public override @property string toString() { - return "Ident:" ~ _value; - } - } - class CommentTest : TokenTest { - this(string file = __FILE__, uint line = __LINE__) { - super(file, line); - } - override bool doTest(Token token) { - if (token.type != TokenType.COMMENT) - return false; - return true; - } - public override @property string toString() { - return "Comment"; - } - } - class EOFTest : TokenTest { - this(string file = __FILE__, uint line = __LINE__) { - super(file, line); - } - override bool doTest(Token token) { - if (token.type != TokenType.EOF) - return false; - return true; - } - public override @property string toString() { - return "EOF"; - } - } - class WhiteSpaceTest : TokenTest { - this(string file = __FILE__, uint line = __LINE__) { - super(file, line); - } - override bool doTest(Token token) { - if (token.type != TokenType.WHITESPACE) - return false; - return true; - } - public override @property string toString() { - return "whiteSpace"; - } - } - TokenTest checkString(string value, string file = __FILE__, uint line = __LINE__) { - return new StringTest(value, file, line); - } - TokenTest checkInteger(ulong value, bool unsignedFlag = false, bool longFlag = false, string file = __FILE__, uint line = __LINE__) { - return new IntegerTest(value, unsignedFlag, longFlag, file, line); - } - TokenTest checkReal(real value, byte precision = 0, bool imaginary = false, string file = __FILE__, uint line = __LINE__) { - return new RealTest(value, precision, imaginary, file, line); - } - TokenTest checkIdent(string value, string file = __FILE__, uint line = __LINE__) { - return new IdentTest(value, file, line); - } - TokenTest checkKeyword(Keyword value, string file = __FILE__, uint line = __LINE__) { - return new KeywordTest(value, file, line); - } - TokenTest checkOp(OpCode value, string file = __FILE__, uint line = __LINE__) { - return new OpTest(value, file, line); - } - TokenTest checkSpace(string file = __FILE__, uint line = __LINE__) { - return new WhiteSpaceTest(file, line); - } - TokenTest checkComment(string file = __FILE__, uint line = __LINE__) { - return new CommentTest(file, line); - } - TokenTest checkEOF(string file = __FILE__, uint line = __LINE__) { - return new EOFTest(file, line); - } - - testTokenizer(q"TEST -int i; -TEST" - , [ - checkKeyword(Keyword.INT), - checkSpace(), - checkIdent("i"), - checkOp(OpCode.SEMICOLON), - checkEOF() - ]); - testTokenizer("0b1101 0x123abcdU 0xABCL 0743 192837465 0 192_837_465 5.25" - , [ - checkInteger(13), - checkSpace(), - checkInteger(0x123abcd, true, false), - checkSpace(), - checkInteger(0xabc, false, true), - checkSpace(), - checkInteger(std.conv.octal!743), - checkSpace(), - checkInteger(192_837_465), - checkSpace(), - checkInteger(0), - checkSpace(), - checkInteger(192837465), - checkSpace(), - checkReal(5.25), - checkEOF() - ]); -} - - unittest { version(DisableLexerTest) { import std.stdio; @@ -3301,3 +3081,303 @@ __gshared static this() { addEntity("lang", 10216); addEntity("rang", 10217); } + + + +//void runTokenizerTest() +unittest +{ + import std.algorithm; + class TokenTest { + int _line; + string _file; + this(string file, int line) { + _file = file; + _line = line; + } + bool doTest(Token token) { + return true; + } + void execute(Tokenizer tokenizer) { + Token token = tokenizer.nextToken(); + if (!doTest(token)) { + assert(false, " token doesn not match at " ~ _file ~ ":" ~ to!string(_line) ~ " foundToken: " ~ token.toString ~ " expected: " ~ toString); + } + } + public override @property string toString() { + return "TokenTest"; + } + } + void testTokenizer(string code, TokenTest[] tokens, string file = __FILE__, uint line = __LINE__) { + Tokenizer tokenizer = new Tokenizer(code, "tokenizerTest:" ~ file ~ ":" ~ to!string(line)); + for (int i = 0; i < tokens.length; i++) { + tokens[i].execute(tokenizer); + } + } + class KeywordTest : TokenTest { + Keyword _code; + this(Keyword code, string file = __FILE__, uint line = __LINE__) { + super(file, line); + _code = code; + } + override bool doTest(Token token) { + if (token.type != TokenType.KEYWORD) + return false; + if (token.keyword != _code) + return false; + return true; + } + public override @property string toString() { + return "Keyword:" ~ to!string(_code); + } + } + class OpTest : TokenTest { + OpCode _code; + this(OpCode code, string file = __FILE__, uint line = __LINE__) { + super(file, line); + _code = code; + } + override bool doTest(Token token) { + if (token.type != TokenType.OP) + return false; + if (token.opCode != _code) + return false; + return true; + } + public override @property string toString() { + return "Op:" ~ to!string(_code); + } + } + class StringTest : TokenTest { + dstring _value; + dchar _literalType; + this(dstring value, dchar literalType = 0, string file = __FILE__, uint line = __LINE__) { + super(file, line); + _value = value; + _literalType = literalType; + } + override bool doTest(Token token) { + if (token.type != TokenType.STRING) + return false; + if (!token.text.equal(_value)) + return false; + if (token.literalType != _literalType) + return false; + return true; + } + public override @property string toString() { + return toUTF8("String:\"" ~ _value ~ "\"" ~ (_literalType ? _literalType : ' ')); + } + } + class IntegerTest : TokenTest { + ulong _value; + bool _unsigned; + bool _long; + this(ulong value, bool unsignedFlag = false, bool longFlag = false, string file = __FILE__, uint line = __LINE__) { + super(file, line); + _value = value; + _unsigned = unsignedFlag; + _long = longFlag; + } + override bool doTest(Token token) { + if (token.type != TokenType.INTEGER) + return false; + if (token.intValue != _value) + return false; + if (token.isUnsigned != _unsigned) + return false; + if (token.isLong != _long) + return false; + return true; + } + public override @property string toString() { + return "Integer:" ~ to!string(_value); + } + } + class RealTest : TokenTest { + real _value; + ubyte _precision; + bool _imaginary; + this(real value, ubyte precision = 1, bool imaginary = false, string file = __FILE__, uint line = __LINE__) { + super(file, line); + _value = value; + _precision = precision; + _imaginary = imaginary; + } + override bool doTest(Token token) { + if (token.type != TokenType.FLOAT) + return false; + real diff = token.realValue - _value; + real maxerr = _value / 1000000; + if (diff < 0) diff = -diff; + if (maxerr < 0) maxerr = -maxerr; + if (diff > maxerr) + return false; + if (token.precision != _precision) + return false; + if (token.isImaginary != _imaginary) + return false; + return true; + } + public override @property string toString() { + return "Real:" ~ to!string(_value) ~ (_precision == 0 ? "f" : (_precision == 2 ? "L" : "")) ~ (_imaginary ? "i" : ""); + } + } + class IdentTest : TokenTest { + string _value; + this(string value, string file = __FILE__, uint line = __LINE__) { + super(file, line); + _value = value; + } + override bool doTest(Token token) { + if (token.type != TokenType.IDENTIFIER) + return false; + if (! to!string(token.text).equal(_value)) + return false; + return true; + } + public override @property string toString() { + return "Ident:" ~ _value; + } + } + class CommentTest : TokenTest { + this(string file = __FILE__, uint line = __LINE__) { + super(file, line); + } + override bool doTest(Token token) { + if (token.type != TokenType.COMMENT) + return false; + return true; + } + public override @property string toString() { + return "Comment"; + } + } + class EOFTest : TokenTest { + this(string file = __FILE__, uint line = __LINE__) { + super(file, line); + } + override bool doTest(Token token) { + if (token.type != TokenType.EOF) + return false; + return true; + } + public override @property string toString() { + return "EOF"; + } + } + class WhiteSpaceTest : TokenTest { + this(string file = __FILE__, uint line = __LINE__) { + super(file, line); + } + override bool doTest(Token token) { + if (token.type != TokenType.WHITESPACE) + return false; + return true; + } + public override @property string toString() { + return "whiteSpace"; + } + } + TokenTest checkString(dstring value, dchar literalType = 0, string file = __FILE__, uint line = __LINE__) { + return new StringTest(value, literalType, file, line); + } + TokenTest checkInteger(ulong value, bool unsignedFlag = false, bool longFlag = false, string file = __FILE__, uint line = __LINE__) { + return new IntegerTest(value, unsignedFlag, longFlag, file, line); + } + TokenTest checkReal(real value, byte precision = 1, bool imaginary = false, string file = __FILE__, uint line = __LINE__) { + return new RealTest(value, precision, imaginary, file, line); + } + TokenTest checkIdent(string value, string file = __FILE__, uint line = __LINE__) { + return new IdentTest(value, file, line); + } + TokenTest checkKeyword(Keyword value, string file = __FILE__, uint line = __LINE__) { + return new KeywordTest(value, file, line); + } + TokenTest checkOp(OpCode value, string file = __FILE__, uint line = __LINE__) { + return new OpTest(value, file, line); + } + TokenTest checkSpace(string file = __FILE__, uint line = __LINE__) { + return new WhiteSpaceTest(file, line); + } + TokenTest checkComment(string file = __FILE__, uint line = __LINE__) { + return new CommentTest(file, line); + } + TokenTest checkEOF(string file = __FILE__, uint line = __LINE__) { + return new EOFTest(file, line); + } + + // test strings + testTokenizer("r\"simple\\nstring\"", [checkString( r"simple\nstring" )]); + + // test strings + testTokenizer(q"TEST +"simple string" +"simple\nstring" +`simple string` +"simple string"d +"simple string"c +"simple string"w +"simple\"string" +"\r\n\f\t\\\"\'&" +TEST" + , [ + checkString("simple string"), + checkSpace(), + checkString("simple\nstring"), + checkSpace(), + checkString("simple string"), + checkSpace(), + checkString("simple string", 'd'), + checkSpace(), + checkString("simple string", 'c'), + checkSpace(), + checkString("simple string", 'w'), + checkSpace(), + checkString("simple\"string"), + checkSpace(), + checkString("\r\n\f\t\\\"\'&"), + ]); + // basic test + testTokenizer(q"TEST +int i; +TEST" + , [ + checkKeyword(Keyword.INT), + checkSpace(), + checkIdent("i"), + checkOp(OpCode.SEMICOLON), + checkEOF() + ]); + // test numbers + testTokenizer("0b1101 0x123abcdU 0xABCL 0743 192837465 0 192_837_465 5.25 12.3f 54.1L 67.1i 3e3 25.67e-5f" + , [ + checkInteger(13), + checkSpace(), + checkInteger(0x123abcd, true, false), + checkSpace(), + checkInteger(0xabc, false, true), + checkSpace(), + checkInteger(std.conv.octal!743), + checkSpace(), + checkInteger(192_837_465), + checkSpace(), + checkInteger(0), + checkSpace(), + checkInteger(192837465), + checkSpace(), + checkReal(5.25), + checkSpace(), + checkReal(12.3f, 0), + checkSpace(), + checkReal(54.1L, 2), + checkSpace(), + checkReal(67.1, 1, true), + checkSpace(), + checkReal(3e3), + checkSpace(), + checkReal(25.67e-5f, 0), + checkEOF() + ]); +} + diff --git a/src/dlangide.d b/src/dlangide.d index d9dd669..545ca38 100644 --- a/src/dlangide.d +++ b/src/dlangide.d @@ -40,6 +40,9 @@ extern (C) int UIAppMain(string[] args) { } } + //import ddc.lexer.tokenizer; + //runTokenizerTest(); + // create window Window window = Platform.instance.createWindow("Dlang IDE", null, WindowFlag.Resizable, 900, 700); From b5973263633f4bb85a78d627e409d42144c3b0fc Mon Sep 17 00:00:00 2001 From: Vadim Lopatin Date: Thu, 12 Feb 2015 21:36:17 +0300 Subject: [PATCH 09/13] fix crashes --- src/ddc/lexer/tokenizer.d | 26 +++++++++++++++++--------- src/dlangide/ui/dsourceedit.d | 7 +++++-- 2 files changed, 22 insertions(+), 11 deletions(-) diff --git a/src/ddc/lexer/tokenizer.d b/src/ddc/lexer/tokenizer.d index 5df8367..f7776d2 100644 --- a/src/ddc/lexer/tokenizer.d +++ b/src/ddc/lexer/tokenizer.d @@ -1375,6 +1375,15 @@ struct StringAppender { buf[len .. len + s.length] = s; len += s.length; } + void append(dchar ch) { + if (len + 1 > buf.length) { + uint newsize = cast(uint)(buf.length * 2); + if (newsize < 128) + newsize = 128; + buf.length = newsize; + } + buf[len++] = ch; + } void reset() { len = 0; } @@ -1781,20 +1790,19 @@ class Tokenizer return null; } - protected Token processIdent() { + protected Token processIdent(dchar firstChar) { _sharedIdentToken.setPos(_startLine, _startPos); _identAppender.reset(); - int startPos = _startPos; - int endPos = _len; - for (int i = startPos + 1; i < _len; i++) { - dchar ch = _lineText[i]; + _identAppender.append(firstChar); + for (; _pos < _len; ) { + dchar ch = _lineText[_pos]; if (!isIdentMiddleChar(ch)) { - endPos = i; break; } + _identAppender.append(ch); + _pos++; } - _pos = endPos; - _sharedIdentToken.setText(_lineText[startPos .. endPos]); + _sharedIdentToken.setText(_identAppender.get); return _sharedIdentToken; } @@ -2756,7 +2764,7 @@ class Tokenizer return _sharedKeywordToken; } } - return processIdent(); + return processIdent(ch); } OpCode op = detectOp(ch); if (op != OpCode.NONE) { diff --git a/src/dlangide/ui/dsourceedit.d b/src/dlangide/ui/dsourceedit.d index cf67271..fc732b6 100644 --- a/src/dlangide/ui/dsourceedit.d +++ b/src/dlangide/ui/dsourceedit.d @@ -649,8 +649,11 @@ class SimpleDSyntaxHighlighter : SyntaxHighlighter { for (int i = tokenLine; i <= newLine; i++) { int start = i > tokenLine ? 0 : tokenPos; int end = i < newLine ? cast(int)lines[i].length : newPos; - for (int j = start; j < end; j++) - _props[i][j] = category; + for (int j = start; j < end; j++) { + if (j < _props[i].length) { + _props[i][j] = category; + } + } } // handle token - convert to category From 2fe2506ecac1e8bdfbdf68c6948d74dfd6b88baa Mon Sep 17 00:00:00 2001 From: Vadim Lopatin Date: Fri, 13 Feb 2015 10:51:21 +0300 Subject: [PATCH 10/13] fix opening file missing in project - issue #24 --- src/dlangide/ui/frame.d | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/dlangide/ui/frame.d b/src/dlangide/ui/frame.d index e94e9ee..da2fbfd 100644 --- a/src/dlangide/ui/frame.d +++ b/src/dlangide/ui/frame.d @@ -111,13 +111,16 @@ class IDEFrame : AppFrame { } bool openSourceFile(string filename, ProjectSourceFile file = null, bool activate = true) { + if (!file && !filename) + return false; if (!file) file = _wsPanel.findSourceFileItem(filename, false); - if(!file) - return false; + //if(!file) + // return false; - filename = file.filename; + if (file) + filename = file.filename; Log.d("openSourceFile ", filename); int index = _tabs.tabIndex(filename); From f60032cc4a66084144b5bf9ced40c60fea4b3d11 Mon Sep 17 00:00:00 2001 From: Vadim Lopatin Date: Fri, 13 Feb 2015 13:38:07 +0300 Subject: [PATCH 11/13] implement drag&drop files to application window - issue #24 --- src/dlangide/ui/commands.d | 8 +++++++- src/dlangide/ui/frame.d | 8 ++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/src/dlangide/ui/commands.d b/src/dlangide/ui/commands.d index 94de07a..bbe2971 100644 --- a/src/dlangide/ui/commands.d +++ b/src/dlangide/ui/commands.d @@ -4,7 +4,6 @@ public import dlangui.core.events; import dlangui.widgets.editors; enum IDEActions : int { - None = 0, //ProjectOpen = 1010000, FileNew = 1010000, FileNewWorkspace, @@ -43,6 +42,12 @@ enum IDEActions : int { ProjectFolderRenameItem, } +__gshared static this() { + // register editor action names and ids + registerActionEnum!IDEActions(); +} + + const Action ACTION_PROJECT_FOLDER_ADD_ITEM = new Action(IDEActions.ProjectFolderAddItem, "MENU_PROJECT_FOLDER_ADD_ITEM"c); const Action ACTION_PROJECT_FOLDER_OPEN_ITEM = new Action(IDEActions.ProjectFolderOpenItem, "MENU_PROJECT_FOLDER_OPEN_ITEM"c); const Action ACTION_PROJECT_FOLDER_REMOVE_ITEM = new Action(IDEActions.ProjectFolderRemoveItem, "MENU_PROJECT_FOLDER_REMOVE_ITEM"c); @@ -87,3 +92,4 @@ const Action ACTION_HELP_ABOUT = new Action(IDEActions.HelpAbout, "MENU_HELP_ABO const Action ACTION_WINDOW_CLOSE_ALL_DOCUMENTS = new Action(IDEActions.WindowCloseAllDocuments, "MENU_WINDOW_CLOSE_ALL_DOCUMENTS"c); const Action ACTION_CREATE_NEW_WORKSPACE = new Action(IDEActions.CreateNewWorkspace, "Create new workspace"d); const Action ACTION_ADD_TO_CURRENT_WORKSPACE = new Action(IDEActions.AddToCurrentWorkspace, "Add to current workspace"d); + diff --git a/src/dlangide/ui/frame.d b/src/dlangide/ui/frame.d index da2fbfd..fed2eaa 100644 --- a/src/dlangide/ui/frame.d +++ b/src/dlangide/ui/frame.d @@ -66,6 +66,7 @@ class IDEFrame : AppFrame { this(Window window) { super(); window.mainWidget = this; + window.onFilesDropped = &onFilesDropped; } override protected void init() { @@ -615,6 +616,13 @@ class IDEFrame : AppFrame { Builder op = new Builder(this, currentWorkspace.startupProject, _logPanel, currentWorkspace.buildConfiguration, buildOp, false); setBackgroundOperation(op); } + + void onFilesDropped(string[] filenames) { + //Log.d("onFilesDropped(", filenames, ")"); + for (int i = 0; i < filenames.length; i++) { + openSourceFile(filenames[i], null, i == 0); + } + } } Widget createAboutWidget() From b874f3e060f42672c1e7112bbc2be986518ed330 Mon Sep 17 00:00:00 2001 From: Vadim Lopatin Date: Fri, 13 Feb 2015 13:40:49 +0300 Subject: [PATCH 12/13] filter dragged files by file type - leave text files only --- src/dlangide/ui/frame.d | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/dlangide/ui/frame.d b/src/dlangide/ui/frame.d index fed2eaa..7dd0fe8 100644 --- a/src/dlangide/ui/frame.d +++ b/src/dlangide/ui/frame.d @@ -617,10 +617,15 @@ class IDEFrame : AppFrame { setBackgroundOperation(op); } + /// handle files dropped to application window void onFilesDropped(string[] filenames) { //Log.d("onFilesDropped(", filenames, ")"); + bool first = true; for (int i = 0; i < filenames.length; i++) { - openSourceFile(filenames[i], null, i == 0); + if (isSupportedSourceTextFileFormat(filenames[i])) { + openSourceFile(filenames[i], null, first); + first = false; + } } } } From b1d4e290573892a18f1e21e9b53a1a89e6456b56 Mon Sep 17 00:00:00 2001 From: Vadim Lopatin Date: Fri, 13 Feb 2015 14:21:53 +0300 Subject: [PATCH 13/13] ask user to save unsaved files when closing IDE window - issue #28 implemented for win32 backend --- src/dlangide/ui/frame.d | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/src/dlangide/ui/frame.d b/src/dlangide/ui/frame.d index 7dd0fe8..d852bcb 100644 --- a/src/dlangide/ui/frame.d +++ b/src/dlangide/ui/frame.d @@ -67,6 +67,7 @@ class IDEFrame : AppFrame { super(); window.mainWidget = this; window.onFilesDropped = &onFilesDropped; + window.onCanClose = &onCanClose; } override protected void init() { @@ -444,7 +445,8 @@ class IDEFrame : AppFrame { if (a) { switch (a.id) { case IDEActions.FileExit: - window.close(); + if (onCanClose()) + window.close(); return true; case IDEActions.HelpAbout: Window wnd = Platform.instance.createWindow("About...", window, WindowFlag.Modal); @@ -628,6 +630,14 @@ class IDEFrame : AppFrame { } } } + + /// return false to prevent closing + bool onCanClose() { + askForUnsavedEdits(delegate() { + window.close(); + }); + return false; + } } Widget createAboutWidget()