-
Notifications
You must be signed in to change notification settings - Fork 28
/
lexer.go
337 lines (311 loc) · 8.94 KB
/
lexer.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
package lexmachine
import (
"bytes"
"fmt"
)
import (
dfapkg "github.com/timtadh/lexmachine/dfa"
"github.com/timtadh/lexmachine/frontend"
"github.com/timtadh/lexmachine/inst"
"github.com/timtadh/lexmachine/machines"
)
// Token is an optional token representation you could use to represent the
// tokens produced by a lexer built with lexmachine.
//
// Here is an example for constructing a lexer Action which turns a
// machines.Match struct into a token using the scanners Token helper function.
//
// func token(name string, tokenIds map[string]int) lex.Action {
// return func(s *lex.Scanner, m *machines.Match) (interface{}, error) {
// return s.Token(tokenIds[name], string(m.Bytes), m), nil
// }
// }
//
type Token struct {
Type int
Value interface{}
Lexeme []byte
TC int
StartLine int
StartColumn int
EndLine int
EndColumn int
}
// Equals checks the equality of two tokens ignoring the Value field.
func (t *Token) Equals(other *Token) bool {
if t == nil && other == nil {
return true
} else if t == nil {
return false
} else if other == nil {
return false
}
return t.TC == other.TC &&
t.StartLine == other.StartLine &&
t.StartColumn == other.StartColumn &&
t.EndLine == other.EndLine &&
t.EndColumn == other.EndColumn &&
bytes.Equal(t.Lexeme, other.Lexeme) &&
t.Type == other.Type
}
// String formats the token in a human readable form.
func (t *Token) String() string {
return fmt.Sprintf("%d %q %d (%d, %d)-(%d, %d)", t.Type, t.Value, t.TC, t.StartLine, t.StartColumn, t.EndLine, t.EndColumn)
}
// An Action is a function which get called when the Scanner finds a match
// during the lexing process. They turn a low level machines.Match struct into
// a token for the users program. As different compilers/interpretters/parsers
// have different needs Actions merely return an interface{}. This allows you
// to represent a token in anyway you wish. An example Token struct is provided
// above.
type Action func(scan *Scanner, match *machines.Match) (interface{}, error)
type pattern struct {
regex []byte
action Action
}
// Lexer is a "builder" object which lets you construct a Scanner type which
// does the actual work of tokenizing (splitting up and categorizing) a byte
// string. Get a new Lexer by calling the NewLexer() function. Add patterns to
// match (with their callbacks) by using the Add function. Finally, construct a
// scanner with Scanner to tokenizing a byte string.
type Lexer struct {
patterns []*pattern
nfaMatches map[int]int // match_idx -> pat_idx
dfaMatches map[int]int // match_idx -> pat_idx
program inst.Slice
dfa *dfapkg.DFA
}
// Scanner tokenizes a byte string based on the patterns provided to the lexer
// object which constructed the scanner. This object works as functional
// iterator using the Next method.
//
// Example
//
// lexer, err := CreateLexer()
// if err != nil {
// return err
// }
// scanner, err := lexer.Scanner(someBytes)
// if err != nil {
// return err
// }
// for tok, err, eos := scanner.Next(); !eos; tok, err, eos = scanner.Next() {
// if err != nil {
// return err
// }
// fmt.Println(tok)
// }
//
type Scanner struct {
lexer *Lexer
matches map[int]int
scan machines.Scanner
Text []byte
TC int
pTC int
sLine int
sColumn int
eLine int
eColumn int
}
// Next iterates through the string being scanned returning one token at a time
// until either an error is encountered or the end of the string is reached.
// The token is returned by the tok value. An error is indicated by err.
// Finally, eos (a bool) indicates the End Of String when it returns as true.
//
// Example
//
// for tok, err, eos := scanner.Next(); !eos; tok, err, eos = scanner.Next() {
// if err != nil {
// // handle the error and exit the loop. For example:
// return err
// }
// // do some processing on tok or store it somewhere. eg.
// fmt.Println(tok)
// }
//
// One useful error type which could be returned by Next() is a
// match.UnconsumedInput which provides the position information for where in
// the text the scanning failed.
//
// For more information on functional iterators see:
// http://hackthology.com/functional-iteration-in-go.html
func (s *Scanner) Next() (tok interface{}, err error, eos bool) {
var token interface{}
for token == nil {
tc, match, err, scan := s.scan(s.TC)
if scan == nil {
return nil, nil, true
} else if err != nil {
return nil, err, false
} else if match == nil {
return nil, fmt.Errorf("No match but no error"), false
}
s.scan = scan
s.pTC = s.TC
s.TC = tc
s.sLine = match.StartLine
s.sColumn = match.StartColumn
s.eLine = match.EndLine
s.eColumn = match.EndColumn
pattern := s.lexer.patterns[s.matches[match.PC]]
token, err = pattern.action(s, match)
if err != nil {
return nil, err, false
}
}
return token, nil, false
}
// Token is a helper function for constructing a Token type inside of a Action.
func (s *Scanner) Token(typ int, value interface{}, m *machines.Match) *Token {
return &Token{
Type: typ,
Value: value,
Lexeme: m.Bytes,
TC: m.TC,
StartLine: m.StartLine,
StartColumn: m.StartColumn,
EndLine: m.EndLine,
EndColumn: m.EndColumn,
}
}
// NewLexer constructs a new lexer object.
func NewLexer() *Lexer {
return &Lexer{}
}
// Scanner creates a scanner for a particular byte string from the lexer.
func (l *Lexer) Scanner(text []byte) (*Scanner, error) {
if l.program == nil && l.dfa == nil {
err := l.Compile()
if err != nil {
return nil, err
}
}
// prevent the user from modifying the text under scan
textCopy := make([]byte, len(text))
copy(textCopy, text)
var s *Scanner
if l.dfa != nil {
s = &Scanner{
lexer: l,
matches: l.dfaMatches,
scan: machines.DFALexerEngine(l.dfa.Start, l.dfa.Error, l.dfa.Trans, l.dfa.Accepting, textCopy),
Text: textCopy,
TC: 0,
}
} else {
s = &Scanner{
lexer: l,
matches: l.nfaMatches,
scan: machines.LexerEngine(l.program, textCopy),
Text: textCopy,
TC: 0,
}
}
return s, nil
}
// Add pattern to match on. When a match occurs during scanning the action
// function will be called by the Scanner to turn the low level machines.Match
// struct into a token.
func (l *Lexer) Add(regex []byte, action Action) {
if l.program != nil {
l.program = nil
}
l.patterns = append(l.patterns, &pattern{regex, action})
}
// Compile the supplied patterns to an DFA (default). You don't need to call
// this method (it is called automatically by Scanner). However, you may want to
// call this method if you construct a lexer once and then use it many times as
// it will precompile the lexing program.
func (l *Lexer) Compile() error {
return l.CompileDFA()
}
func (l *Lexer) assembleAST() (frontend.AST, error) {
asts := make([]frontend.AST, 0, len(l.patterns))
for _, p := range l.patterns {
ast, err := frontend.Parse(p.regex)
if err != nil {
return nil, err
}
asts = append(asts, ast)
}
lexast := asts[len(asts)-1]
for i := len(asts) - 2; i >= 0; i-- {
lexast = frontend.NewAltMatch(asts[i], lexast)
}
return lexast, nil
}
// CompileNFA compiles an NFA explicitly. If no DFA has been created (which is
// only created explicitly) this will be used by Scanners when they are created.
func (l *Lexer) CompileNFA() error {
if len(l.patterns) == 0 {
return fmt.Errorf("No patterns added")
}
if l.program != nil {
return nil
}
lexast, err := l.assembleAST()
if err != nil {
return err
}
program, err := frontend.Generate(lexast)
if err != nil {
return err
}
l.program = program
l.nfaMatches = make(map[int]int)
ast := 0
for i, instruction := range l.program {
if instruction.Op == inst.MATCH {
l.nfaMatches[i] = ast
ast++
}
}
if mes, err := l.matchesEmptyString(); err != nil {
return err
} else if mes {
l.program = nil
l.nfaMatches = nil
return fmt.Errorf("One or more of the supplied patterns match the empty string")
}
return nil
}
// CompileDFA compiles an DFA explicitly. This will be used by Scanners when
// they are created.
func (l *Lexer) CompileDFA() error {
if len(l.patterns) == 0 {
return fmt.Errorf("No patterns added")
}
if l.dfa != nil {
return nil
}
lexast, err := l.assembleAST()
if err != nil {
return err
}
dfa := dfapkg.Generate(lexast)
l.dfa = dfa
l.dfaMatches = make(map[int]int)
for mid := range dfa.Matches {
l.dfaMatches[mid] = mid
}
if mes, err := l.matchesEmptyString(); err != nil {
return err
} else if mes {
l.dfa = nil
l.dfaMatches = nil
return fmt.Errorf("One or more of the supplied patterns match the empty string")
}
return nil
}
func (l *Lexer) matchesEmptyString() (bool, error) {
s, err := l.Scanner([]byte(""))
if err != nil {
return false, err
}
_, err, _ = s.Next()
if ese, is := err.(*machines.EmptyMatchError); ese != nil && is {
return true, nil
}
return false, nil
}