-
Notifications
You must be signed in to change notification settings - Fork 0
/
lexer_test.go
95 lines (78 loc) · 2.26 KB
/
lexer_test.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
package main
import (
"testing"
)
func assertLexingFails(t *testing.T, json string) {
_, ok := Lex(json)
if ok {
t.Fatal("Expected lexing to fail, but it succeeded.")
}
}
func areTokensEqual(expected []Token, actual []Token) bool {
if len(expected) != len(actual) {
return false
}
for i := 0; i < len(expected); i++ {
if expected[i] != actual[i] {
return false
}
}
return true
}
func assertTokensEqual(t *testing.T, json string, expected []Token) {
actual, ok := Lex(json)
if !ok {
t.Fatal("Lexing failed.")
}
if !areTokensEqual(expected, actual) {
t.Error("Token arrays are not equal.")
t.Error()
t.Error("Expected:")
t.Error(expected)
t.Error()
t.Error("Actual:")
t.Error(actual)
t.FailNow()
}
}
func TestLexNumber(t *testing.T) {
assertTokensEqual(t, "1", []Token{{TokenTypeNumber, "1"}})
assertTokensEqual(t, "12", []Token{{TokenTypeNumber, "12"}})
assertTokensEqual(t, "123", []Token{{TokenTypeNumber, "123"}})
assertTokensEqual(t, "123.", []Token{{TokenTypeNumber, "123."}})
assertTokensEqual(t, "123.4", []Token{{TokenTypeNumber, "123.4"}})
// These strings are not valid JSON, but they are sequences of valid tokens.
// The failure will occur during parsing.
assertTokensEqual(t, "1..3", []Token{{TokenTypeNumber, "1."}, {TokenTypeNumber, ".3"}})
assertTokensEqual(t, "1.2.3", []Token{{TokenTypeNumber, "1.2"}, {TokenTypeNumber, ".3"}})
assertLexingFails(t, "1x3")
}
func TestLexBoolean(t *testing.T) {
assertTokensEqual(t, "false", []Token{{TokenTypeBoolean, "false"}})
assertTokensEqual(t, "true", []Token{{TokenTypeBoolean, "true"}})
}
func TestLexString(t *testing.T) {
assertTokensEqual(t, `"abc"`, []Token{{TokenTypeString, `"abc"`}})
}
func TestLexArray(t *testing.T) {
assertTokensEqual(t, `[0, 1]`, []Token{
{TokenTypeOpenSquareBracket, "["},
{TokenTypeNumber, "0"},
{TokenTypeComma, ","},
{TokenTypeNumber, "1"},
{TokenTypeCloseSquareBracket, "]"},
})
}
func TestLexObject(t *testing.T) {
assertTokensEqual(t, `{"a": 0, "b": false}`, []Token{
{TokenTypeOpenCurlyBrace, "{"},
{TokenTypeString, `"a"`},
{TokenTypeColon, ":"},
{TokenTypeNumber, "0"},
{TokenTypeComma, ","},
{TokenTypeString, `"b"`},
{TokenTypeColon, ":"},
{TokenTypeBoolean, "false"},
{TokenTypeCloseCurlyBrace, "}"},
})
}