From d8145233c1109d3992d2cbc11e1538e9d9322f16 Mon Sep 17 00:00:00 2001 From: Vadim Lopatin Date: Mon, 12 Sep 2016 17:45:45 +0300 Subject: [PATCH] parser experiments --- src/ddc/lexer/parser.d | 62 ++++++++++++++++++++++++++++++--------- src/ddc/lexer/tokenizer.d | 22 ++++++++++++++ 2 files changed, 70 insertions(+), 14 deletions(-) diff --git a/src/ddc/lexer/parser.d b/src/ddc/lexer/parser.d index 1b77798..fea9649 100644 --- a/src/ddc/lexer/parser.d +++ b/src/ddc/lexer/parser.d @@ -5,22 +5,56 @@ import ddc.lexer.ast; import dlangui.core.textsource; import dlangui.core.logger; +class Parser { + SourceLines _lines; + SourceFile _file; + Token[] _tokens; + int[] _pairedBracket; + int[] _bracketLevel; + void init(SourceLines lines, SourceFile file) { + _lines = lines; + _file = file; + } + void init(dstring text, SourceFile file) { + import std.array; + ArraySourceLines lines = new ArraySourceLines(); + dstring[] src = text.split('\n'); + lines.initialize(src, file, 0); + init(lines, file); + } + void init(dstring src, string filename) { + init(src, new SourceFile(filename)); + } + bool findBracketPairs() { + bool res = true; + _pairedBracket.length = _tokens.length; + _pairedBracket[0 .. $] = -1; + _bracketLevel.length = _tokens.length; + _bracketLevel[0 .. $] = -1; + return res; + } + bool tokenize() { + bool res = false; + Tokenizer tokenizer = new Tokenizer(_lines); + //tokenizer.errorTolerant = true; + try { + _tokens = tokenizer.allTokens(); + Log.v("tokens: ", _tokens); + findBracketPairs(); + res = true; + } catch (Exception e) { + // error + Log.e("Tokenizer exception", e); + } + return res; + } +} + ASTNode parseSource(dstring text, SourceFile file) { ASTNode res; - import std.array; - ArraySourceLines lines = new ArraySourceLines(); - dstring[] src = text.split('\n'); - lines.initialize(src, file, 0); - Tokenizer tokenizer = new Tokenizer(lines); - //tokenizer.errorTolerant = true; - try { - Token[] tokens = tokenizer.allTokens(); - ulong len = tokens.length; - Log.v("tokens: ", tokens); - } catch (Exception e) { - // error - Log.e("Tokenizer exception"); - } + Parser parser = new Parser(); + parser.init(text, file); + parser.tokenize(); return res; } diff --git a/src/ddc/lexer/tokenizer.d b/src/ddc/lexer/tokenizer.d index 55605b2..3fbce93 100644 --- a/src/ddc/lexer/tokenizer.d +++ b/src/ddc/lexer/tokenizer.d @@ -924,6 +924,28 @@ class Token { @property float floatValue() { return 0; } @property byte precision() { return 0; } @property bool isImaginary() { return false; } + @property bool isBracket() { + OpCode op = opCode; + return op == OpCode.PAR_OPEN + || op == OpCode.PAR_CLOSE + || op == OpCode.SQ_OPEN + || op == OpCode.SQ_CLOSE + || op == OpCode.CURL_OPEN + || op == OpCode.CURL_CLOSE; + } + @property bool isOpenBracket() { + OpCode op = opCode; + return op == OpCode.PAR_OPEN + || op == OpCode.SQ_OPEN + || op == OpCode.CURL_OPEN; + } + @property bool isCloseBracket() { + OpCode op = opCode; + return op == OpCode.PAR_CLOSE + || op == OpCode.SQ_CLOSE + || op == OpCode.CURL_CLOSE; + } + @property bool isEof() { return type == TokenType.EOF; } /// returns opcode ID - for opcode tokens @property OpCode opCode() { return OpCode.NONE; }