Skip to content

Commit

Permalink
Add Intellisense to SWIS (#58)
Browse files Browse the repository at this point in the history
* Added intellisense to SwqlStudio

* Updated intellisense to show only related words

* Added enabling/disabling intellisense

* Move "Enable Intellisense" setting to Settings
  • Loading branch information
nothrow authored and tdanner committed Feb 10, 2017
1 parent 6f7f905 commit 499779b
Show file tree
Hide file tree
Showing 16 changed files with 660 additions and 266 deletions.
17 changes: 17 additions & 0 deletions Src/SwqlStudio/Intellisense/ExpectedCaretPosition.cs
@@ -0,0 +1,17 @@
namespace SwqlStudio.Intellisense
{
// what do we detect user is/can be typing?
internal struct ExpectedCaretPosition
{
public ExpectedCaretPosition(ExpectedCaretPositionType type, string proposedEntity)
{
Type = type;
ProposedEntity = proposedEntity;
}

// what do we detect user is/can be typing?
public ExpectedCaretPositionType Type { get; }
// what entity columns to display? canonical name
public string ProposedEntity { get; }
}
}
12 changes: 12 additions & 0 deletions Src/SwqlStudio/Intellisense/ExpectedCaretPositionType.cs
@@ -0,0 +1,12 @@
using System;

namespace SwqlStudio.Intellisense
{
[Flags]
internal enum ExpectedCaretPositionType
{
Keyword = 0x1,
Column = 0x2,
Entity = 0x4
}
}
142 changes: 142 additions & 0 deletions Src/SwqlStudio/Intellisense/IntellisenseProvider.cs
@@ -0,0 +1,142 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Web;

namespace SwqlStudio.Intellisense
{
// we are not reusing full swis grammar, we can do more 'educated guess' here.
internal class IntellisenseProvider
{
private static readonly HashSet<string> _keyWords;
private readonly string _text;

public IntellisenseProvider(string text)
{
_text = text;
}

static IntellisenseProvider()
{
_keyWords = new HashSet<string>("all any and as asc between class desc distinct exists false full group having in inner into is isa from join left like not null or outer right select set some true union where end when then else case on top return xml raw auto with limitation rows to order by desc totalrows noplancache queryplan querystats".Split(' '), StringComparer.OrdinalIgnoreCase);
}

enum LastInterestingElement
{
Nothing, // nothing interesting
Dot, // last thing was dot, so when we find identifier, we append
As // last thing was identifier (or AS keyword) , we want to detect alias
}

public ExpectedCaretPosition ParseFor(int caretPosition)
{
var aliasList = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);

var rv = DoTheParsing(caretPosition, aliasList);
string possibleAlias;

switch (rv.Item2)
{
case LastInterestingElement.Nothing:
return new ExpectedCaretPosition(ExpectedCaretPositionType.Entity | ExpectedCaretPositionType.Keyword, null);
case LastInterestingElement.Dot:
if (!aliasList.TryGetValue(rv.Item1, out possibleAlias))
possibleAlias = rv.Item1;

return new ExpectedCaretPosition(ExpectedCaretPositionType.Column, possibleAlias);
case LastInterestingElement.As:

if (!aliasList.TryGetValue(rv.Item1, out possibleAlias))
possibleAlias = rv.Item1;

return new ExpectedCaretPosition(ExpectedCaretPositionType.Keyword | ExpectedCaretPositionType.Column, possibleAlias);
default:
throw new ArgumentOutOfRangeException();
}
}

private Tuple<string, LastInterestingElement> DoTheParsing(int caretPosition,
Dictionary<string, string> aliasList)
{
string lastIdentifier = "";
var lastInterestingElement = LastInterestingElement.Nothing;

string underCaretIdentifier = "";
var underCaretInterestingElement = LastInterestingElement.Nothing;

bool detected = false;

foreach (var tok in new IntellisenseTokenizer(_text))
{
if (tok.Item3 == IntellisenseTokenizer.Token.Special)
{
if (_text[tok.Item1] == '.')
{
lastInterestingElement = LastInterestingElement.Dot;
}
else
{
lastInterestingElement = LastInterestingElement.Nothing;
}
}

if (!detected && tok.Item1 <= caretPosition && (tok.Item1 + tok.Item2) >= caretPosition)
{
// here we are. what do we see right now?
detected = true;
underCaretIdentifier = lastIdentifier;
underCaretInterestingElement = lastInterestingElement;
}

switch (tok.Item3)
{
case IntellisenseTokenizer.Token.Identifier:
var value = _text.Substring(tok.Item1, tok.Item2);
if (value == "as")
// alias. only interesting keyword for us. however, ignore, since Table X and Table as X are equivalent.
// this may mean someone writing SELECT A B FROM D - A B are aliases - but, whatever. Full scan would be much worse.
{
}
else if (_keyWords.Contains(value)) // reset the interesting stuff state
{
lastInterestingElement = LastInterestingElement.Nothing;
lastIdentifier = "";
}
else
{
var realIdentifier = value[0] == '[' ? value.Substring(1, value.Length - 2) : value;

switch (lastInterestingElement)
{
case LastInterestingElement.Nothing:
lastIdentifier = realIdentifier;
lastInterestingElement = LastInterestingElement.As;
break;
case LastInterestingElement.Dot:
lastIdentifier += "." + realIdentifier;
lastInterestingElement = LastInterestingElement.As;
break;
case LastInterestingElement.As:
aliasList[realIdentifier] = lastIdentifier;
lastInterestingElement = LastInterestingElement.Nothing;
lastIdentifier = "";
break;
default:
throw new ArgumentOutOfRangeException();
}
}
break;
case IntellisenseTokenizer.Token.Number:
case IntellisenseTokenizer.Token.String:
// we do not care here
lastInterestingElement = LastInterestingElement.Nothing;
break;
}
}


return Tuple.Create(underCaretIdentifier, underCaretInterestingElement);
}
}
}
78 changes: 78 additions & 0 deletions Src/SwqlStudio/Intellisense/IntellisenseTokenizer.cs
@@ -0,0 +1,78 @@
using System;
using System.Collections.Generic;
using System.Text.RegularExpressions;

namespace SwqlStudio.Intellisense
{
internal class IntellisenseTokenizer
{
public enum Token
{
Special,
Number,
Identifier,
String,
EOF
}

private readonly string _input;
private static readonly IEnumerable<Regex> _ignoredRegexes;
private static readonly IEnumerable<Tuple<Regex, Token>> _regexes;
static IntellisenseTokenizer()
{
_ignoredRegexes = new[]
{
new Regex(@"\G--.*\n"), // comment
new Regex(@"\G\s+") // whitespace
};
_regexes = new[]
{

Tuple.Create(new Regex(@"\G[0-9]+.[0-9]*([eE][0-9]+)?"), Token.Number), // number
Tuple.Create(new Regex(@"\G@[a-zA-Z_]\w*"), Token.Identifier), // ident
Tuple.Create(new Regex(@"\G\[[^\]]*\]"), Token.Identifier), // quoted ident
Tuple.Create(new Regex(@"\G""(?:[^""\\]|\\.)*"""), Token.String), // quoted string
Tuple.Create(new Regex(@"\G[a-zA-Z_][a-zA-Z_0-9]*"), Token.Identifier) // ident
};
}

public IntellisenseTokenizer(string input)
{
_input = input;
}

public IEnumerator<Tuple<int, int, Token>> GetEnumerator()
{
int position = 0;
while (position < _input.Length)
{
foreach (var ignored in _ignoredRegexes)
{
var m = ignored.Match(_input, position);
if (m.Success)
{
position += m.Groups[0].Length;
goto end;
}
}

foreach (var rx in _regexes)
{
var m = rx.Item1.Match(_input, position);
if (m.Success)
{
yield return Tuple.Create(position, m.Groups[0].Length, rx.Item2);
position += m.Groups[0].Length;
goto end;
}
}
yield return Tuple.Create(position, 1, Token.Special);
position++;
end:
;
}

yield return Tuple.Create(position, 0, Token.EOF);
}
}
}

0 comments on commit 499779b

Please sign in to comment.