1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
|
package lexpr
import (
"context"
"fmt"
"strconv"
)
func (l *Lexpr) tokenize(ctx context.Context, lexems <-chan lexem) <-chan Token {
out := make(chan Token)
go func() {
defer close(out)
for {
select {
case <-ctx.Done():
return
case lexem, ok := <-lexems:
if !ok {
return
}
switch {
case lexem.Type == lp:
out <- Token{
typ: lp,
}
case lexem.Type == rp:
out <- Token{
typ: rp,
}
case lexem.Type == sep:
out <- Token{
typ: sep,
}
case lexem.Type == number:
ivalue, _ := strconv.Atoi(lexem.Value)
out <- Token{
typ: number,
ivalue: ivalue,
}
case lexem.Type == str:
out <- Token{
typ: str,
value: lexem.Value,
}
case lexem.Type == op:
o, isOp := l.operators[lexem.Value]
if !isOp {
out <- Token{
typ: tokError,
value: fmt.Sprintf("unknown operator: %s", lexem.Value),
}
return
}
out <- Token{
typ: op,
value: lexem.Value,
priority: o.priority,
leftAssoc: o.leftAssoc,
}
case lexem.Type == word:
o, isOp := l.operators[lexem.Value]
_, isFunc := l.functions[lexem.Value]
switch {
case isOp:
out <- Token{
typ: op,
value: lexem.Value,
priority: o.priority,
leftAssoc: o.leftAssoc,
}
case isFunc:
out <- Token{
typ: funct,
value: lexem.Value,
}
default:
out <- Token{
typ: word,
value: lexem.Value,
}
}
case lexem.Type == tokError:
out <- Token{
typ: tokError,
value: lexem.Value,
}
return
}
}
}
}()
return out
}
|