-
Notifications
You must be signed in to change notification settings - Fork 7
/
Copy pathlexeme.go
135 lines (123 loc) · 2.47 KB
/
lexeme.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
package main
import (
"go/scanner"
"go/token"
"io/ioutil"
"os"
)
type Lexeme struct {
pos token.Position
p token.Pos
tok token.Token
lit string
}
// LexemeChan streams a source file
func LexemeChan(srcpath string) (<-chan *Lexeme, error) {
fs := token.NewFileSet()
st, err := os.Stat(srcpath)
if err != nil {
return nil, err
}
tf := fs.AddFile(srcpath, fs.Base(), int(st.Size()))
src, err := ioutil.ReadFile(tf.Name())
if err != nil {
return nil, err
}
s := &scanner.Scanner{}
s.Init(tf, src, nil, scanner.ScanComments)
lexc := make(chan *Lexeme)
go func() {
defer close(lexc)
for {
p, t, lit := s.Scan()
if t == token.EOF {
return
}
lexeme := &Lexeme{tf.Position(p), p, t, lit}
lexc <- lexeme
}
}()
return lexc, nil
}
func Filter(lc <-chan *Lexeme, f func([]*Lexeme) State) <-chan *Lexeme {
retc := make(chan *Lexeme)
go func() {
ls := []*Lexeme{}
for l := range lc {
ls = append(ls, l)
st := f(ls)
if st == Reject {
ls = nil
continue
}
if st != Accept {
continue
}
for _, v := range ls {
retc <- v
}
if len(ls) > 1 {
retc <- &Lexeme{tok: token.ILLEGAL}
}
ls = nil
}
close(retc)
}()
return retc
}
func CommentFilter(l []*Lexeme) State {
return dfa([]xfer{
{token.COMMENT: Accept},
}, l)
}
// DeclRootCommentFilter gives a comment header for types and functions.
func DeclRootCommentFilter(l []*Lexeme) State {
return dfa([]xfer{
{token.COMMENT: 1},
{
token.COMMENT: 1,
token.TYPE: 2,
token.FUNC: 3,
token.IDENT: 5,
token.PACKAGE: 2,
},
{token.IDENT: Accept},
{token.IDENT: Accept, token.LPAREN: 4},
{token.RPAREN: 5, token.ILLEGAL: 4},
{token.IDENT: Accept},
}, l)
}
// DeclTypeFilter captures the contents of types.
func DeclTypeFilter(l []*Lexeme) State {
return dfa([]xfer{
{token.TYPE: 1},
{token.IDENT: 2},
{token.INTERFACE: 3, token.STRUCT: 3},
{token.LBRACE: 4},
{token.RBRACE: Accept, token.ILLEGAL: 4},
}, l)
}
// DeclIdentCommentFilter captures comments preceding an identifier.
func DeclIdentCommentFilter(l []*Lexeme) State {
return dfa([]xfer{
{token.COMMENT: 1},
{token.COMMENT: 1, token.IDENT: Accept},
}, l)
}
func LexemeMux(lc <-chan *Lexeme, n int) []chan *Lexeme {
ret := []chan *Lexeme{}
for i := 0; i < n; i++ {
ret = append(ret, make(chan *Lexeme, 32))
}
go func() {
for l := range lc {
for i := 0; i < n; i++ {
ret[i] <- l
}
}
for i := 0; i < n; i++ {
close(ret[i])
}
}()
return ret
}