-
Notifications
You must be signed in to change notification settings - Fork 0
/
Lexer.cs
101 lines (93 loc) · 3.22 KB
/
Lexer.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
using System.Text.RegularExpressions;
namespace ORM.Lexicography
{
class Lexer
{
private static Token MapToToken(string characters)
{
var token = Tokens.Token.UNDEFINED_TOKEN;
switch (characters)
{
case ".":
token = Tokens.Token.DOT;
break;
case "=>":
token = Tokens.Token.ARROW;
break;
case "==":
token = Tokens.Token.IS_EQUAL;
break;
case "!=":
token = Tokens.Token.NOT_EQUAL;
break;
case "(":
token = Tokens.Token.LEFT_PARENTHESIS;
break;
case ")":
token = Tokens.Token.RIGHT_PARENTHESIS;
break;
case "=":
token = Tokens.Token.EQUAL;
break;
case "True":
token = Tokens.Token.TRUE;
break;
case "False":
token = Tokens.Token.FALSE;
break;
case "new":
token = Tokens.Token.NEW_LAMBDA_KEYWORD;
break;
case " ":
token = Tokens.Token.SPACE;
break;
case "AndAlso":
token = Tokens.Token.AND_ALSO;
break;
case "OrElse":
token = Tokens.Token.OR_ELSE;
break;
default:
token = Tokens.Token.UNDEFINED_TOKEN;
break;
}
if (token == Tokens.Token.UNDEFINED_TOKEN)
{
if (Regex.Match(characters, @"[a-zA-Z]\.[a-zA-Z]").Success)
{
return new Token(Tokens.Token.OBJECT_PROPERTY, characters.Replace(",", ""));
}
if (Regex.Match(characters, @"^[A-Za-z]+$").Success)
{
return new Token(Tokens.Token.WORD, characters);
}
if (Regex.Match(characters, @"^\d+$").Success)
{
return new Token(Tokens.Token.INTEGER, characters);
}
if (Regex.Match(characters, @"(\w*\s\w*)\s+\w{2}\d\s+\d*").Success)
{
return new Token(Tokens.Token.SPACE, characters);
}
if (characters.StartsWith('"') && characters.EndsWith('"'))
{
return new Token(Tokens.Token.STRING, characters);
}
}
return new Token(token, characters);
}
public static List<Token> Tokenize(String expr)
{
var splitted = expr
.Replace("(", " ( ")
.Replace(")", " ) ")
.Split(' ');
var tokens = new List<Token>();
foreach (var item in splitted)
{
tokens.Add(Lexer.MapToToken(item));
}
return tokens;
}
}
}