Skip to content

Commit 32db2c0

Browse files
committed
fix conflict, i cannot resolve it by rewrite yacc syntax, so i using lexer to eliminate ambiguity
1 parent 87292ee commit 32db2c0

File tree

6 files changed

+2309
-2235
lines changed

6 files changed

+2309
-2235
lines changed

lexer.go

Lines changed: 27 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ func AddTokenBetween(left []byte, right []byte, matchEnd bool, action lexmachine
8282
})
8383
}
8484

85-
func AddIdentToken(rs string, action lexmachine.Action) {
85+
func genKeywordIdent(rs string) []byte {
8686
l := strings.ToLower(rs)
8787
u := strings.ToUpper(rs)
8888
var regex bytes.Buffer
@@ -96,7 +96,26 @@ func AddIdentToken(rs string, action lexmachine.Action) {
9696
regex.WriteString("]")
9797
}
9898
}
99-
lexer.Add(regex.Bytes(), action)
99+
return regex.Bytes()
100+
}
101+
102+
func genGroupKeywordIdent(rss ...string) []byte {
103+
var regex bytes.Buffer
104+
for i, rs := range rss {
105+
regex.Write(genKeywordIdent(rs))
106+
if i < len(rss)-1 {
107+
regex.Write([]byte("( |\t|\n|\r)+"))
108+
}
109+
}
110+
return regex.Bytes()
111+
}
112+
113+
func AddIdentToken(rs string, action lexmachine.Action) {
114+
lexer.Add(genKeywordIdent(rs), action)
115+
}
116+
117+
func AddIdentGroupToken(action lexmachine.Action, rss ...string) {
118+
lexer.Add(genGroupKeywordIdent(rss...), action)
100119
}
101120

102121
var stdTokenMap = map[string]int{
@@ -346,6 +365,12 @@ var unReservedKeyword = map[string]int{
346365
func init() {
347366
lexer = lexmachine.NewLexer()
348367

368+
AddIdentGroupToken(token(_not_deferrable), "not", "deferrable")
369+
AddIdentGroupToken(token(_no_inmemory), "no", "inmemory")
370+
AddIdentGroupToken(token(_no_duplicate), "no", "duplicate")
371+
AddIdentGroupToken(token(_row_level_locking), "row", "level", "locking")
372+
AddIdentGroupToken(token(_drop_index), "drop", "index")
373+
349374
for keyword, tokenId := range stdTokenMap {
350375
AddIdentToken(keyword, token(tokenId))
351376
}

lexer_test.go

Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
package parser
2+
3+
import (
4+
"testing"
5+
6+
"github.com/stretchr/testify/assert"
7+
)
8+
9+
func TestGenKeywordIdent(t *testing.T) {
10+
testCases := map[string][]byte{
11+
"a": []byte("[Aa]"),
12+
"B": []byte("[Bb]"),
13+
"case": []byte("[Cc][Aa][Ss][Ee]"),
14+
"CASE": []byte("[Cc][Aa][Ss][Ee]"),
15+
"CaSe": []byte("[Cc][Aa][Ss][Ee]"),
16+
"a_b": []byte("[Aa]_[Bb]"),
17+
"": nil,
18+
}
19+
for input, expect := range testCases {
20+
actual := genKeywordIdent(input)
21+
assert.Equal(t, string(expect), string(actual))
22+
}
23+
}
24+
25+
type genGroupKeywordCase struct {
26+
expect []byte
27+
input []string
28+
}
29+
30+
func TestGenGroupKeywordIdent(t *testing.T) {
31+
testCases := []genGroupKeywordCase{
32+
{
33+
input: []string{},
34+
expect: nil,
35+
},
36+
{
37+
input: []string{"a"},
38+
expect: []byte("[Aa]"),
39+
},
40+
{
41+
input: []string{"B"},
42+
expect: []byte("[Bb]"),
43+
},
44+
{
45+
input: []string{"a", "B"},
46+
expect: []byte("[Aa]( |\t|\n|\r)+[Bb]"),
47+
},
48+
{
49+
input: []string{"a", "B", "CasE"},
50+
expect: []byte("[Aa]( |\t|\n|\r)+[Bb]( |\t|\n|\r)+[Cc][Aa][Ss][Ee]"),
51+
},
52+
}
53+
for _, c := range testCases {
54+
actual := genGroupKeywordIdent(c.input...)
55+
assert.Equal(t, string(c.expect), string(actual))
56+
}
57+
}

0 commit comments

Comments
 (0)