Only split server code, no changes

This commit is contained in:
WebFreak001 2017-08-16 07:39:08 +02:00
parent cd4a027eac
commit 6c310d2d5e
5 changed files with 342 additions and 270 deletions

View File

@ -50,7 +50,8 @@ import common.messages;
* Returns: * Returns:
* the autocompletion response * the autocompletion response
*/ */
public AutocompleteResponse complete(const AutocompleteRequest request, ref ModuleCache moduleCache) public AutocompleteResponse complete(const AutocompleteRequest request,
ref ModuleCache moduleCache)
{ {
const(Token)[] tokenArray; const(Token)[] tokenArray;
auto stringCache = StringCache(StringCache.defaultBucketCount); auto stringCache = StringCache(StringCache.defaultBucketCount);
@ -58,8 +59,12 @@ public AutocompleteResponse complete(const AutocompleteRequest request, ref Modu
request.cursorPosition, stringCache, tokenArray); request.cursorPosition, stringCache, tokenArray);
if (beforeTokens.length >= 2) if (beforeTokens.length >= 2)
{ {
if (beforeTokens[$ - 1] == tok!"(" || beforeTokens[$ - 1] == tok!"[" if (beforeTokens[$ - 1] == tok!"(" || beforeTokens[$ - 1] == tok!"[")
|| beforeTokens[$ - 1] == tok!",") {
return parenCompletion(beforeTokens, tokenArray, request.cursorPosition,
moduleCache);
}
else if (beforeTokens[$ - 1] == tok!",")
{ {
immutable size_t end = goBackToOpenParen(beforeTokens); immutable size_t end = goBackToOpenParen(beforeTokens);
if (end != size_t.max) if (end != size_t.max)
@ -72,8 +77,9 @@ public AutocompleteResponse complete(const AutocompleteRequest request, ref Modu
if (kind == ImportKind.neither) if (kind == ImportKind.neither)
{ {
if (beforeTokens.isUdaExpression) if (beforeTokens.isUdaExpression)
beforeTokens = beforeTokens[$ - 1 .. $]; beforeTokens = beforeTokens[$-1 .. $];
return dotCompletion(beforeTokens, tokenArray, request.cursorPosition, moduleCache); return dotCompletion(beforeTokens, tokenArray, request.cursorPosition,
moduleCache);
} }
else else
return importCompletion(beforeTokens, kind, moduleCache); return importCompletion(beforeTokens, kind, moduleCache);
@ -123,7 +129,9 @@ AutocompleteResponse dotCompletion(T)(T beforeTokens, const(Token)[] tokenArray,
switch (significantTokenType) switch (significantTokenType)
{ {
mixin(STRING_LITERAL_CASES); case tok!"stringLiteral":
case tok!"wstringLiteral":
case tok!"dstringLiteral":
foreach (symbol; arraySymbols) foreach (symbol; arraySymbols)
{ {
response.completionKinds ~= symbol.kind; response.completionKinds ~= symbol.kind;
@ -131,15 +139,39 @@ AutocompleteResponse dotCompletion(T)(T beforeTokens, const(Token)[] tokenArray,
} }
response.completionType = CompletionType.identifiers; response.completionType = CompletionType.identifiers;
break; break;
mixin(TYPE_IDENT_CASES); case tok!"int":
case tok!"uint":
case tok!"long":
case tok!"ulong":
case tok!"char":
case tok!"wchar":
case tok!"dchar":
case tok!"bool":
case tok!"byte":
case tok!"ubyte":
case tok!"short":
case tok!"ushort":
case tok!"cent":
case tok!"ucent":
case tok!"float":
case tok!"ifloat":
case tok!"cfloat":
case tok!"idouble":
case tok!"cdouble":
case tok!"double":
case tok!"real":
case tok!"ireal":
case tok!"creal":
case tok!"identifier":
case tok!")": case tok!")":
case tok!"]": case tok!"]":
case tok!"this":
case tok!"super":
auto allocator = scoped!(ASTAllocator)(); auto allocator = scoped!(ASTAllocator)();
RollbackAllocator rba; RollbackAllocator rba;
ScopeSymbolPair pair = generateAutocompleteTrees(tokenArray, allocator, ScopeSymbolPair pair = generateAutocompleteTrees(tokenArray, allocator,
&rba, cursorPosition, moduleCache); &rba, cursorPosition, moduleCache);
scope (exit) scope(exit) pair.destroy();
pair.destroy();
response.setCompletions(pair.scope_, getExpression(beforeTokens), response.setCompletions(pair.scope_, getExpression(beforeTokens),
cursorPosition, CompletionType.identifiers, false, partial); cursorPosition, CompletionType.identifiers, false, partial);
break; break;
@ -195,6 +227,7 @@ AutocompleteResponse parenCompletion(T)(T beforeTokens,
break; break;
case tok!"characterLiteral": case tok!"characterLiteral":
case tok!"doubleLiteral": case tok!"doubleLiteral":
case tok!"dstringLiteral":
case tok!"floatLiteral": case tok!"floatLiteral":
case tok!"identifier": case tok!"identifier":
case tok!"idoubleLiteral": case tok!"idoubleLiteral":
@ -203,22 +236,22 @@ AutocompleteResponse parenCompletion(T)(T beforeTokens,
case tok!"irealLiteral": case tok!"irealLiteral":
case tok!"longLiteral": case tok!"longLiteral":
case tok!"realLiteral": case tok!"realLiteral":
case tok!"stringLiteral":
case tok!"uintLiteral": case tok!"uintLiteral":
case tok!"ulongLiteral": case tok!"ulongLiteral":
case tok!"wstringLiteral":
case tok!"this": case tok!"this":
case tok!"super": case tok!"super":
case tok!")": case tok!")":
case tok!"]": case tok!"]":
mixin(STRING_LITERAL_CASES);
auto allocator = scoped!(ASTAllocator)(); auto allocator = scoped!(ASTAllocator)();
RollbackAllocator rba; RollbackAllocator rba;
ScopeSymbolPair pair = generateAutocompleteTrees(tokenArray, allocator, ScopeSymbolPair pair = generateAutocompleteTrees(tokenArray, allocator,
&rba, cursorPosition, moduleCache); &rba, cursorPosition, moduleCache);
scope (exit) scope(exit) pair.destroy();
pair.destroy();
auto expression = getExpression(beforeTokens[0 .. $ - 1]); auto expression = getExpression(beforeTokens[0 .. $ - 1]);
response.setCompletions(pair.scope_, expression, cursorPosition, response.setCompletions(pair.scope_, expression,
CompletionType.calltips, beforeTokens[$ - 1] == tok!"["); cursorPosition, CompletionType.calltips, beforeTokens[$ - 1] == tok!"[");
break; break;
default: default:
break; break;
@ -236,7 +269,7 @@ AutocompleteResponse importCompletion(T)(T beforeTokens, ImportKind kind,
ref ModuleCache moduleCache) ref ModuleCache moduleCache)
in in
{ {
assert(beforeTokens.length >= 2); assert (beforeTokens.length >= 2);
} }
body body
{ {
@ -249,8 +282,8 @@ body
if (kind == ImportKind.normal) if (kind == ImportKind.normal)
{ {
while (beforeTokens[i].type != tok!"," while (beforeTokens[i].type != tok!"," && beforeTokens[i].type != tok!"import"
&& beforeTokens[i].type != tok!"import" && beforeTokens[i].type != tok!"=") && beforeTokens[i].type != tok!"=" )
i--; i--;
setImportCompletions(beforeTokens[i .. $], response, moduleCache); setImportCompletions(beforeTokens[i .. $], response, moduleCache);
return response; return response;
@ -276,11 +309,8 @@ body
size_t j = i; size_t j = i;
loop2: while (j <= beforeTokens.length) switch (beforeTokens[j].type) loop2: while (j <= beforeTokens.length) switch (beforeTokens[j].type)
{ {
case tok!":": case tok!":": break loop2;
break loop2; default: j++; break;
default:
j++;
break;
} }
if (i >= j) if (i >= j)
@ -289,8 +319,11 @@ body
return response; return response;
} }
immutable string path = beforeTokens[i + 1 .. j].filter!(token => token.type == tok!"identifier") immutable string path = beforeTokens[i + 1 .. j]
.map!(token => cast() token.text).joiner(dirSeparator).text(); .filter!(token => token.type == tok!"identifier")
.map!(token => cast() token.text)
.joiner(dirSeparator)
.text();
string resolvedLocation = moduleCache.resolveImportLocation(path); string resolvedLocation = moduleCache.resolveImportLocation(path);
if (resolvedLocation is null) if (resolvedLocation is null)
@ -301,15 +334,14 @@ body
auto symbols = moduleCache.getModuleSymbol(internString(resolvedLocation)); auto symbols = moduleCache.getModuleSymbol(internString(resolvedLocation));
import containers.hashset : HashSet; import containers.hashset : HashSet;
HashSet!string h; HashSet!string h;
void addSymbolToResponses(const(DSymbol)* sy) void addSymbolToResponses(const(DSymbol)* sy)
{ {
auto a = DSymbol(sy.name); auto a = DSymbol(sy.name);
if (!builtinSymbols.contains(&a) && sy.name !is null if (!builtinSymbols.contains(&a) && sy.name !is null && !h.contains(sy.name)
&& !h.contains(sy.name) && !sy.skipOver && !sy.skipOver && sy.name != CONSTRUCTOR_SYMBOL_NAME
&& sy.name != CONSTRUCTOR_SYMBOL_NAME && isPublicCompletionKind(sy.kind)) && isPublicCompletionKind(sy.kind))
{ {
response.completionKinds ~= sy.kind; response.completionKinds ~= sy.kind;
response.completions ~= sy.name; response.completions ~= sy.name;
@ -335,7 +367,8 @@ body
* tokens = the tokens after the "import" keyword and before the cursor * tokens = the tokens after the "import" keyword and before the cursor
* response = the response that should be populated * response = the response that should be populated
*/ */
void setImportCompletions(T)(T tokens, ref AutocompleteResponse response, ref ModuleCache cache) void setImportCompletions(T)(T tokens, ref AutocompleteResponse response,
ref ModuleCache cache)
{ {
response.completionType = CompletionType.identifiers; response.completionType = CompletionType.identifiers;
string partial = null; string partial = null;
@ -359,8 +392,8 @@ void setImportCompletions(T)(T tokens, ref AutocompleteResponse response, ref Mo
found = true; found = true;
auto n = importPath.baseName(".d").baseName(".di"); auto n = importPath.baseName(".d").baseName(".di");
if (isFile(importPath) && (importPath.endsWith(".d") if (isFile(importPath) && (importPath.endsWith(".d") || importPath.endsWith(".di"))
|| importPath.endsWith(".di")) && (partial is null || n.startsWith(partial))) && (partial is null || n.startsWith(partial)))
{ {
response.completions ~= n; response.completions ~= n;
response.completionKinds ~= CompletionKind.moduleName; response.completionKinds ~= CompletionKind.moduleName;
@ -374,11 +407,9 @@ void setImportCompletions(T)(T tokens, ref AutocompleteResponse response, ref Mo
found = true; found = true;
try try foreach (string name; dirEntries(p, SpanMode.shallow))
foreach (string name; dirEntries(p, SpanMode.shallow))
{ {
import std.path : baseName; import std.path: baseName;
if (name.baseName.startsWith(".#")) if (name.baseName.startsWith(".#"))
continue; continue;
@ -394,12 +425,13 @@ void setImportCompletions(T)(T tokens, ref AutocompleteResponse response, ref Mo
if (n[0] != '.' && (partial is null || n.startsWith(partial))) if (n[0] != '.' && (partial is null || n.startsWith(partial)))
{ {
response.completions ~= n; response.completions ~= n;
response.completionKinds ~= exists(buildPath(name, "package.d")) || exists(buildPath(name, response.completionKinds ~=
"package.di")) ? CompletionKind.moduleName : CompletionKind.packageName; exists(buildPath(name, "package.d")) || exists(buildPath(name, "package.di"))
? CompletionKind.moduleName : CompletionKind.packageName;
} }
} }
} }
catch (FileException) catch(FileException)
{ {
warning("Cannot access import path: ", importPath); warning("Cannot access import path: ", importPath);
} }
@ -412,21 +444,21 @@ void setImportCompletions(T)(T tokens, ref AutocompleteResponse response, ref Mo
/** /**
* *
*/ */
void setCompletions(T)(ref AutocompleteResponse response, Scope* completionScope, T tokens, void setCompletions(T)(ref AutocompleteResponse response,
size_t cursorPosition, CompletionType completionType, Scope* completionScope, T tokens, size_t cursorPosition,
bool isBracket = false, string partial = null) CompletionType completionType, bool isBracket = false, string partial = null)
{ {
static void addSymToResponse(const(DSymbol)* s, ref AutocompleteResponse r, static void addSymToResponse(const(DSymbol)* s, ref AutocompleteResponse r, string p,
string p, size_t[] circularGuard = []) size_t[] circularGuard = [])
{ {
if (circularGuard.canFind(cast(size_t) s)) if (circularGuard.canFind(cast(size_t) s))
return; return;
foreach (sym; s.opSlice()) foreach (sym; s.opSlice())
{ {
if (sym.name !is null && sym.name.length > 0 if (sym.name !is null && sym.name.length > 0 && isPublicCompletionKind(sym.kind)
&& isPublicCompletionKind(sym.kind) && (p is null ? true && (p is null ? true : toUpper(sym.name.data).startsWith(toUpper(p)))
: toUpper(sym.name.data).startsWith(toUpper(p))) && !r.completions.canFind(sym.name)
&& !r.completions.canFind(sym.name) && sym.name[0] != '*') && sym.name[0] != '*')
{ {
r.completionKinds ~= sym.kind; r.completionKinds ~= sym.kind;
r.completions ~= sym.name.dup; r.completions ~= sym.name.dup;
@ -467,7 +499,8 @@ void setCompletions(T)(ref AutocompleteResponse response, Scope* completionScope
|| symbols[0].kind == CompletionKind.importSymbol || symbols[0].kind == CompletionKind.importSymbol
|| symbols[0].kind == CompletionKind.aliasName) || symbols[0].kind == CompletionKind.aliasName)
{ {
symbols = symbols[0].type is null || symbols[0].type is symbols[0] ? [] : [symbols[0].type]; symbols = symbols[0].type is null || symbols[0].type is symbols[0] ? []
: [symbols[0].type];
if (symbols.length == 0) if (symbols.length == 0)
return; return;
} }
@ -477,7 +510,8 @@ void setCompletions(T)(ref AutocompleteResponse response, Scope* completionScope
else if (completionType == CompletionType.calltips) else if (completionType == CompletionType.calltips)
{ {
//trace("Showing call tips for ", symbols[0].name, " of kind ", symbols[0].kind); //trace("Showing call tips for ", symbols[0].name, " of kind ", symbols[0].kind);
if (symbols[0].kind != CompletionKind.functionName && symbols[0].callTip is null) if (symbols[0].kind != CompletionKind.functionName
&& symbols[0].callTip is null)
{ {
if (symbols[0].kind == CompletionKind.aliasName) if (symbols[0].kind == CompletionKind.aliasName)
{ {
@ -512,8 +546,8 @@ void setCompletions(T)(ref AutocompleteResponse response, Scope* completionScope
} }
} }
} }
if (symbols[0].kind == CompletionKind.structName || symbols[0].kind if (symbols[0].kind == CompletionKind.structName
== CompletionKind.className) || symbols[0].kind == CompletionKind.className)
{ {
auto constructor = symbols[0].getPartsByName(CONSTRUCTOR_SYMBOL_NAME); auto constructor = symbols[0].getPartsByName(CONSTRUCTOR_SYMBOL_NAME);
if (constructor.length == 0) if (constructor.length == 0)

View File

@ -40,7 +40,8 @@ import common.messages;
* Returns: * Returns:
* the autocompletion response. * the autocompletion response.
*/ */
public AutocompleteResponse findLocalUse(AutocompleteRequest request, ref ModuleCache moduleCache) public AutocompleteResponse findLocalUse(AutocompleteRequest request,
ref ModuleCache moduleCache)
{ {
AutocompleteResponse response; AutocompleteResponse response;
RollbackAllocator rba; RollbackAllocator rba;
@ -53,7 +54,8 @@ public AutocompleteResponse findLocalUse(AutocompleteRequest request, ref Module
// getSymbolsForCompletion() copy to avoid repetitive parsing // getSymbolsForCompletion() copy to avoid repetitive parsing
LexerConfig config; LexerConfig config;
config.fileName = ""; config.fileName = "";
const(Token)[] tokenArray = getTokensForParser(cast(ubyte[]) request.sourceCode, config, &cache); const(Token)[] tokenArray = getTokensForParser(cast(ubyte[]) request.sourceCode,
config, &cache);
SymbolStuff getSymbolsAtCursor(size_t cursorPosition) SymbolStuff getSymbolsAtCursor(size_t cursorPosition)
{ {
auto sortedTokens = assumeSorted(tokenArray); auto sortedTokens = assumeSorted(tokenArray);
@ -66,9 +68,8 @@ public AutocompleteResponse findLocalUse(AutocompleteRequest request, ref Module
} }
// gets the symbol matching to cursor pos // gets the symbol matching to cursor pos
SymbolStuff stuff = getSymbolsAtCursor(cast(size_t) request.cursorPosition); SymbolStuff stuff = getSymbolsAtCursor(cast(size_t)request.cursorPosition);
scope (exit) scope(exit) stuff.destroy();
stuff.destroy();
// starts searching only if no ambiguity with the symbol // starts searching only if no ambiguity with the symbol
if (stuff.symbols.length == 1) if (stuff.symbols.length == 1)
@ -79,11 +80,12 @@ public AutocompleteResponse findLocalUse(AutocompleteRequest request, ref Module
// gets the source token to avoid too much getSymbolsAtCursor() // gets the source token to avoid too much getSymbolsAtCursor()
const(Token)* sourceToken; const(Token)* sourceToken;
foreach (i, t; tokenArray) foreach(i, t; tokenArray)
{ {
if (t.type != tok!"identifier") if (t.type != tok!"identifier")
continue; continue;
if (request.cursorPosition >= t.index && request.cursorPosition < t.index + t.text.length) if (request.cursorPosition >= t.index &&
request.cursorPosition < t.index + t.text.length)
{ {
sourceToken = tokenArray.ptr + i; sourceToken = tokenArray.ptr + i;
break; break;
@ -91,17 +93,16 @@ public AutocompleteResponse findLocalUse(AutocompleteRequest request, ref Module
} }
// finds the tokens that match to the source symbol // finds the tokens that match to the source symbol
if (sourceToken != null) if (sourceToken != null) foreach (t; tokenArray)
foreach (t; tokenArray)
{ {
if (t.type == tok!"identifier" && t.text == sourceToken.text) if (t.type == tok!"identifier" && t.text == sourceToken.text)
{ {
size_t pos = cast(size_t) t.index + 1; // place cursor inside the token size_t pos = cast(size_t) t.index + 1; // place cursor inside the token
SymbolStuff candidate = getSymbolsAtCursor(pos); SymbolStuff candidate = getSymbolsAtCursor(pos);
scope (exit) scope(exit) candidate.destroy();
candidate.destroy(); if (candidate.symbols.length == 1 &&
if (candidate.symbols.length == 1 && candidate.symbols[0].location == sourceSymbol.location candidate.symbols[0].location == sourceSymbol.location &&
&& candidate.symbols[0].symbolFile == sourceSymbol.symbolFile) candidate.symbols[0].symbolFile == sourceSymbol.symbolFile)
{ {
response.locations ~= t.index; response.locations ~= t.index;
} }

View File

@ -49,10 +49,9 @@ public AutocompleteResponse findDeclaration(const AutocompleteRequest request,
RollbackAllocator rba; RollbackAllocator rba;
auto allocator = scoped!(ASTAllocator)(); auto allocator = scoped!(ASTAllocator)();
auto cache = StringCache(StringCache.defaultBucketCount); auto cache = StringCache(StringCache.defaultBucketCount);
SymbolStuff stuff = getSymbolsForCompletion(request, CompletionType.location, SymbolStuff stuff = getSymbolsForCompletion(request,
allocator, &rba, cache, moduleCache); CompletionType.location, allocator, &rba, cache, moduleCache);
scope (exit) scope(exit) stuff.destroy();
stuff.destroy();
if (stuff.symbols.length > 0) if (stuff.symbols.length > 0)
{ {
response.symbolLocation = stuff.symbols[0].location; response.symbolLocation = stuff.symbols[0].location;
@ -74,13 +73,13 @@ public AutocompleteResponse symbolSearch(const AutocompleteRequest request,
LexerConfig config; LexerConfig config;
config.fileName = ""; config.fileName = "";
auto cache = StringCache(StringCache.defaultBucketCount); auto cache = StringCache(StringCache.defaultBucketCount);
const(Token)[] tokenArray = getTokensForParser(cast(ubyte[]) request.sourceCode, config, &cache); const(Token)[] tokenArray = getTokensForParser(cast(ubyte[]) request.sourceCode,
config, &cache);
auto allocator = scoped!(ASTAllocator)(); auto allocator = scoped!(ASTAllocator)();
RollbackAllocator rba; RollbackAllocator rba;
ScopeSymbolPair pair = generateAutocompleteTrees(tokenArray, allocator, ScopeSymbolPair pair = generateAutocompleteTrees(tokenArray, allocator,
&rba, request.cursorPosition, moduleCache); &rba, request.cursorPosition, moduleCache);
scope (exit) scope(exit) pair.destroy();
pair.destroy();
static struct SearchResults static struct SearchResults
{ {

View File

@ -147,15 +147,36 @@ SymbolStuff getSymbolsForCompletion(const AutocompleteRequest request,
request.cursorPosition, type), pair.symbol, pair.scope_); request.cursorPosition, type), pair.symbol, pair.scope_);
} }
static void skip(alias O, alias C, T)(T t, ref size_t i)
{
int depth = 1;
while (i < t.length) switch (t[i].type)
{
case O:
i++;
depth++;
break;
case C:
i++;
depth--;
if (depth <= 0)
return;
break;
default:
i++;
break;
}
}
bool isSliceExpression(T)(T tokens, size_t index) bool isSliceExpression(T)(T tokens, size_t index)
{ {
while (index < tokens.length) switch (tokens[index].type) while (index < tokens.length) switch (tokens[index].type)
{ {
case tok!"[": case tok!"[":
tokens.skipParen(index, tok!"[", tok!"]"); skip!(tok!"[", tok!"]")(tokens, index);
break; break;
case tok!"(": case tok!"(":
tokens.skipParen(index, tok!"(", tok!")"); skip!(tok!"(", tok!")")(tokens, index);
break; break;
case tok!"]": case tok!"]":
case tok!"}": case tok!"}":
@ -179,6 +200,22 @@ DSymbol*[] getSymbolsByTokenChain(T)(Scope* completionScope,
//dumpTokens(tokens.release); //dumpTokens(tokens.release);
//writeln(">>>"); //writeln(">>>");
static size_t skipEnd(T tokenSlice, size_t i, IdType open, IdType close)
{
size_t j = i + 1;
for (int depth = 1; depth > 0 && j < tokenSlice.length; j++)
{
if (tokenSlice[j].type == open)
depth++;
else if (tokenSlice[j].type == close)
{
depth--;
if (depth == 0) break;
}
}
return j;
}
// Find the symbol corresponding to the beginning of the chain // Find the symbol corresponding to the beginning of the chain
DSymbol*[] symbols; DSymbol*[] symbols;
if (tokens.length == 0) if (tokens.length == 0)
@ -187,8 +224,7 @@ DSymbol*[] getSymbolsByTokenChain(T)(Scope* completionScope,
// e.g. (a.b!c).d // e.g. (a.b!c).d
if (tokens[0] == tok!"(") if (tokens[0] == tok!"(")
{ {
size_t j = 0; immutable j = skipEnd(tokens, 0, tok!"(", tok!")");
tokens.skipParen(j, tok!"(", tok!")");
symbols = getSymbolsByTokenChain(completionScope, tokens[1 .. j], symbols = getSymbolsByTokenChain(completionScope, tokens[1 .. j],
cursorPosition, completionType); cursorPosition, completionType);
tokens = tokens[j + 1 .. $]; tokens = tokens[j + 1 .. $];
@ -271,7 +307,7 @@ DSymbol*[] getSymbolsByTokenChain(T)(Scope* completionScope,
{ {
void skip(IdType open, IdType close) void skip(IdType open, IdType close)
{ {
tokens.skipParen(i, open, close); i = skipEnd(tokens, i, open, close);
} }
switch (tokens[i].type) switch (tokens[i].type)
@ -402,7 +438,7 @@ DSymbol*[] getSymbolsByTokenChain(T)(Scope* completionScope,
return symbols; return symbols;
} }
enum TYPE_IDENT_CASES = q{ private enum TYPE_IDENT_AND_LITERAL_CASES = q{
case tok!"int": case tok!"int":
case tok!"uint": case tok!"uint":
case tok!"long": case tok!"long":
@ -429,16 +465,11 @@ enum TYPE_IDENT_CASES = q{
case tok!"this": case tok!"this":
case tok!"super": case tok!"super":
case tok!"identifier": case tok!"identifier":
};
enum STRING_LITERAL_CASES = q{
case tok!"stringLiteral": case tok!"stringLiteral":
case tok!"wstringLiteral": case tok!"wstringLiteral":
case tok!"dstringLiteral": case tok!"dstringLiteral":
}; };
enum TYPE_IDENT_AND_LITERAL_CASES = TYPE_IDENT_CASES ~ STRING_LITERAL_CASES;
/** /**
* *
*/ */
@ -485,7 +516,18 @@ T getExpression(T)(T beforeTokens)
skip: skip:
mixin (EXPRESSION_LOOP_BREAK); mixin (EXPRESSION_LOOP_BREAK);
immutable bookmark = i; immutable bookmark = i;
beforeTokens.skipParenReverse(i, open, close); int depth = 1;
do
{
if (depth == 0 || i == 0)
break;
else
i--;
if (beforeTokens[i].type == open)
depth++;
else if (beforeTokens[i].type == close)
depth--;
} while (true);
skipCount++; skipCount++;
@ -532,7 +574,7 @@ T getExpression(T)(T beforeTokens)
*/ */
ImportKind determineImportKind(T)(T tokens) ImportKind determineImportKind(T)(T tokens)
{ {
assert(tokens.length > 1); assert (tokens.length > 1);
size_t i = tokens.length - 1; size_t i = tokens.length - 1;
if (!(tokens[i] == tok!":" || tokens[i] == tok!"," || tokens[i] == tok!"." if (!(tokens[i] == tok!":" || tokens[i] == tok!"," || tokens[i] == tok!"."
|| tokens[i] == tok!"identifier")) || tokens[i] == tok!"identifier"))
@ -580,7 +622,7 @@ bool isUdaExpression(T)(ref T tokens)
{ {
bool result; bool result;
ptrdiff_t skip; ptrdiff_t skip;
auto i = cast(ptrdiff_t) tokens.length - 2; ptrdiff_t i = tokens.length - 2;
if (i < 1) if (i < 1)
return result; return result;
@ -588,13 +630,13 @@ bool isUdaExpression(T)(ref T tokens)
// skips the UDA ctor // skips the UDA ctor
if (tokens[i].type == tok!")") if (tokens[i].type == tok!")")
{ {
skip++; ++skip;
i--; --i;
while (i >= 2) while (i >= 2)
{ {
skip += tokens[i].type == tok!")"; skip += tokens[i].type == tok!")";
skip -= tokens[i].type == tok!"("; skip -= tokens[i].type == tok!"(";
i--; --i;
if (skip == 0) if (skip == 0)
{ {
// @UDA!(TemplateParameters)(FunctionParameters) // @UDA!(TemplateParameters)(FunctionParameters)
@ -613,20 +655,20 @@ bool isUdaExpression(T)(ref T tokens)
{ {
// @UDA!SingleTemplateParameter // @UDA!SingleTemplateParameter
if (i > 2 && tokens[i].type == tok!"identifier" && tokens[i-1].type == tok!"!") if (i > 2 && tokens[i].type == tok!"identifier" && tokens[i-1].type == tok!"!")
{
i -= 2; i -= 2;
}
// @UDA // @UDA
if (i > 0 && tokens[i].type == tok!"identifier" && tokens[i-1].type == tok!"@") if (i > 0 && tokens[i].type == tok!"identifier" && tokens[i-1].type == tok!"@")
{
result = true; result = true;
} }
}
return result; return result;
} }
/**
* Traverses a token slice in reverse to find the opening parentheses or square bracket
* that begins the block the last token is in.
*/
size_t goBackToOpenParen(T)(T beforeTokens) size_t goBackToOpenParen(T)(T beforeTokens)
in in
{ {
@ -635,7 +677,8 @@ in
body body
{ {
size_t i = beforeTokens.length - 1; size_t i = beforeTokens.length - 1;
IdType open;
IdType close;
while (true) switch (beforeTokens[i].type) while (true) switch (beforeTokens[i].type)
{ {
case tok!",": case tok!",":
@ -663,69 +706,36 @@ body
case tok!"[": case tok!"[":
return i + 1; return i + 1;
case tok!")": case tok!")":
beforeTokens.skipParenReverse!true(i, tok!")", tok!"("); open = tok!")";
break; close = tok!"(";
goto skip;
case tok!"}": case tok!"}":
beforeTokens.skipParenReverse!true(i, tok!"}", tok!"{"); open = tok!"}";
break; close = tok!"{";
goto skip;
case tok!"]": case tok!"]":
beforeTokens.skipParenReverse!true(i, tok!"]", tok!"["); open = tok!"]";
break; close = tok!"[";
default: skip:
return size_t.max;
}
}
/**
* Skips blocks of parentheses until the starting block has been closed
*/
void skipParen(T)(T tokenSlice, ref size_t i, IdType open, IdType close)
{
if (i >= tokenSlice.length || tokenSlice.length <= 0)
return;
int depth = 1;
while (depth != 0 && i + 1 != tokenSlice.length)
{
i++;
if (tokenSlice[i].type == open)
depth++;
else if (tokenSlice[i].type == close)
depth--;
}
}
/**
* Skips blocks of parentheses in reverse until the starting block has been opened
*/
void skipParenReverse(bool before = false, T)(T beforeTokens, ref size_t i, IdType open, IdType close)
{
if (i == 0) if (i == 0)
return; return size_t.max;
else
i--;
int depth = 1; int depth = 1;
while (depth != 0 && i != 0) do
{ {
if (depth == 0 || i == 0)
break;
else
i--; i--;
if (beforeTokens[i].type == open) if (beforeTokens[i].type == open)
depth++; depth++;
else if (beforeTokens[i].type == close) else if (beforeTokens[i].type == close)
depth--; depth--;
} while (true);
break;
default:
return size_t.max;
} }
static if (before) return size_t.max;
if (i != 0)
i--;
}
///
unittest
{
Token[] t = [
Token(tok!"identifier"), Token(tok!"identifier"), Token(tok!"("),
Token(tok!"identifier"), Token(tok!"("), Token(tok!")"), Token(tok!",")
];
size_t i = t.length - 1;
skipParenReverse!false(t, i, tok!")", tok!"(");
assert(i == 2);
i = t.length - 1;
skipParenReverse!true(t, i, tok!")", tok!"(");
assert(i == 1);
} }

View File

@ -211,8 +211,7 @@ int main(string[] args)
s.shutdown(SocketShutdown.BOTH); s.shutdown(SocketShutdown.BOTH);
s.close(); s.close();
} }
ptrdiff_t bytesReceived = s.receive(buffer);
auto bytesReceived = s.receive(buffer);
auto requestWatch = StopWatch(AutoStart.yes); auto requestWatch = StopWatch(AutoStart.yes);
@ -238,7 +237,6 @@ int main(string[] args)
AutocompleteRequest request; AutocompleteRequest request;
msgpack.unpack(buffer[size_t.sizeof .. bytesReceived], request); msgpack.unpack(buffer[size_t.sizeof .. bytesReceived], request);
if (request.kind & RequestKind.clearCache) if (request.kind & RequestKind.clearCache)
{ {
info("Clearing cache."); info("Clearing cache.");
@ -251,39 +249,77 @@ int main(string[] args)
} }
else if (request.kind & RequestKind.query) else if (request.kind & RequestKind.query)
{ {
s.sendResponse(AutocompleteResponse.ack); AutocompleteResponse response;
response.completionType = "ack";
ubyte[] responseBytes = msgpack.pack(response);
s.send(responseBytes);
continue; continue;
} }
if (request.kind & RequestKind.addImport) if (request.kind & RequestKind.addImport)
{ {
cache.addImportPaths(request.importPaths); cache.addImportPaths(request.importPaths);
} }
if (request.kind & RequestKind.listImports) if (request.kind & RequestKind.listImports)
{ {
AutocompleteResponse response; AutocompleteResponse response;
response.importPaths = cache.getImportPaths().map!(a => cast() a).array(); response.importPaths = cache.getImportPaths().map!(a => cast() a).array();
ubyte[] responseBytes = msgpack.pack(response);
info("Returning import path list"); info("Returning import path list");
s.sendResponse(response); s.send(responseBytes);
} }
else if (request.kind & RequestKind.autocomplete) else if (request.kind & RequestKind.autocomplete)
{ {
info("Getting completions"); info("Getting completions");
s.sendResponse(complete(request, cache)); AutocompleteResponse response = complete(request, cache);
ubyte[] responseBytes = msgpack.pack(response);
s.send(responseBytes);
} }
else if (request.kind & RequestKind.doc) else if (request.kind & RequestKind.doc)
{ {
info("Getting doc comment"); info("Getting doc comment");
s.trySendResponse(getDoc(request, cache), "Could not get DDoc information"); try
{
AutocompleteResponse response = getDoc(request, cache);
ubyte[] responseBytes = msgpack.pack(response);
s.send(responseBytes);
}
catch (Exception e)
{
warning("Could not get DDoc information", e.msg);
}
} }
else if (request.kind & RequestKind.symbolLocation) else if (request.kind & RequestKind.symbolLocation)
s.trySendResponse(findDeclaration(request, cache), "Could not get symbol location"); {
try
{
AutocompleteResponse response = findDeclaration(request, cache);
ubyte[] responseBytes = msgpack.pack(response);
s.send(responseBytes);
}
catch (Exception e)
{
warning("Could not get symbol location", e.msg);
}
}
else if (request.kind & RequestKind.search) else if (request.kind & RequestKind.search)
s.sendResponse(symbolSearch(request, cache)); {
AutocompleteResponse response = symbolSearch(request, cache);
ubyte[] responseBytes = msgpack.pack(response);
s.send(responseBytes);
}
else if (request.kind & RequestKind.localUse) else if (request.kind & RequestKind.localUse)
s.trySendResponse(findLocalUse(request, cache), "Could not find local usage"); {
try
{
AutocompleteResponse response = findLocalUse(request, cache);
ubyte[] responseBytes = msgpack.pack(response);
s.send(responseBytes);
}
catch (Exception e)
{
warning("Could not find local usage", e.msg);
}
}
info("Request processed in ", requestWatch.peek().to!("msecs", float), " milliseconds"); info("Request processed in ", requestWatch.peek().to!("msecs", float), " milliseconds");
} }
return 0; return 0;
@ -298,24 +334,16 @@ union IPv4Union
uint i; uint i;
} }
/// Lazily evaluates a response with an exception handler and sends it to a socket or logs msg if evaluating response fails. import std.regex : ctRegex;
void trySendResponse(Socket socket, lazy AutocompleteResponse response, string msg) alias envVarRegex = ctRegex!(`\$\{([_a-zA-Z][_a-zA-Z 0-9]*)\}`);
{
try
{
sendResponse(socket, response);
}
catch (Exception e)
{
warningf("%s: %s", msg, e.msg);
}
}
/// Packs an AutocompleteResponse and sends it to a socket. private unittest
void sendResponse(Socket socket, AutocompleteResponse response)
{ {
ubyte[] responseBytes = msgpack.pack(response); import std.regex : replaceAll;
socket.send(responseBytes);
enum input = `${HOME}/aaa/${_bb_b}/ccc`;
assert(replaceAll!(m => m[1])(input, envVarRegex) == `HOME/aaa/_bb_b/ccc`);
} }
/** /**