-
Notifications
You must be signed in to change notification settings - Fork 5
/
lex_test.go
72 lines (69 loc) · 1.89 KB
/
lex_test.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
// Copyright (c) 2014 Alex Kalyvitis
package mustache
import "testing"
func TestLexer(t *testing.T) {
for _, test := range []struct {
template string
expected []token
}{
{
"foo {{{bar}}}\nbaz {{! this is ignored }}",
[]token{
{typ: tokenText, val: "foo "},
{typ: tokenLeftDelim, val: "{{"},
{typ: tokenRawStart, val: "{"},
{typ: tokenIdentifier, val: "bar"},
{typ: tokenRawEnd, val: "}"},
{typ: tokenRightDelim, val: "}}"},
{typ: tokenText, val: "\nbaz "},
{typ: tokenLeftDelim, val: "{{"},
{typ: tokenComment, val: "!"},
{typ: tokenText, val: " this is ignored "},
{typ: tokenRightDelim, val: "}}"},
{typ: tokenEOF},
},
},
{
"\nfoo {{bar}} baz {{=| |=}}\r\n |foo| |={{ }}=| {{bar}}",
[]token{
{typ: tokenText, val: "\nfoo "},
{typ: tokenLeftDelim, val: "{{"},
{typ: tokenIdentifier, val: "bar"},
{typ: tokenRightDelim, val: "}}"},
{typ: tokenText, val: " baz "},
{typ: tokenSetDelim},
{typ: tokenText, val: "\r\n "},
{typ: tokenLeftDelim, val: "|"},
{typ: tokenIdentifier, val: "foo"},
{typ: tokenRightDelim, val: "|"},
{typ: tokenText, val: " "},
{typ: tokenSetDelim},
{typ: tokenText, val: " "},
{typ: tokenLeftDelim, val: "{{"},
{typ: tokenIdentifier, val: "bar"},
{typ: tokenRightDelim, val: "}}"},
{typ: tokenEOF},
},
},
} {
var (
lexer = newLexer(test.template, "{{", "}}")
token = lexer.token()
i = 0
)
for token.typ > tokenEOF {
t.Logf("%s\n", token)
if i >= len(test.expected) {
t.Fatalf("token stream exceeded the length of expected tokens.")
}
if token.typ != test.expected[i].typ {
t.Errorf("unexpected token %q, expected %q", token.typ, test.expected[i].typ)
}
if token.val != test.expected[i].val {
t.Errorf("unexpected value %q, expected %q", token.val, test.expected[i].val)
}
token = lexer.token()
i++
}
}
}