-
Notifications
You must be signed in to change notification settings - Fork 2
/
lexer.go
111 lines (95 loc) · 2.71 KB
/
lexer.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
package specfile
import (
"io"
"strings"
)
// Tokenizers flat list of Tokenizer
type Tokenizers []Tokenizer
// NewTokenizers Dump the specfile plain text to Tokenizers
func NewTokenizers(rd io.ReaderAt) (tokenizers Tokenizers, err error) {
err = walkFile(rd, true, func(reader io.ReaderAt, line *Line) (error, int64) {
var c Counter
if line.isConditional() {
tmp := line.Offset
err1 := readConditionalLine(reader, line, &c, 0)
if err1 != nil && err1 != io.EOF {
return err1, tmp
}
tokenizers = append(tokenizers, NewTokenizer("Conditional", strings.Join(line.Lines, "")))
if err1 == io.EOF {
return err1, line.Offset
}
return nil, line.Offset
}
if line.isSection() {
for {
tmp := line.Offset
last := line.Last
err1 := readLine(reader, line, &c)
if err1 != nil && err1 != io.EOF {
return err1, tmp
}
if line.Last == "\n" && last == "\n" {
// we allow two line-breaks here, or we think this section is done
break
}
if line.isSection() && line.Last != last {
line.Lines = line.Lines[:line.Len-1]
line.Len--
line.Last = line.Lines[line.Len-1]
line.Offset = tmp
break
}
if err1 == io.EOF {
return err1, line.Offset
}
}
// the unclosed if here
if strings.HasPrefix(line.Last, "%if") || strings.HasPrefix(line.Last, "%else") {
n := int64(len(line.Last))
line.Lines = line.Lines[:line.Len-1]
line.Len--
line.Last = line.Lines[line.Len-1]
line.Offset -= n
}
tokenizers = append(tokenizers, NewTokenizer("Section", strings.Join(line.Lines, "")))
return nil, line.Offset
}
if line.isMacro() {
tokenizers = append(tokenizers, NewTokenizer("Macro", strings.Join(line.Lines, "")))
return nil, line.Offset
}
if line.isDependency() {
tokenizers = append(tokenizers, NewTokenizer("Dependency", strings.Join(line.Lines, "")))
return nil, line.Offset
}
if line.isTag() {
tokenizers = append(tokenizers, NewTokenizer("Tag", strings.Join(line.Lines, "")))
return nil, line.Offset
}
// empty line
if strings.Join(line.Lines, "\n") == "\n" {
tokenizers = append(tokenizers, NewTokenizer("Empty", "\n"))
return nil, line.Offset
}
// comment
if line.Len > 0 {
tokenizers = append(tokenizers, NewTokenizer("Comment", strings.Join(line.Lines, "")))
}
return nil, line.Offset
})
return tokenizers, err
}
// Tokenizer like Tokenizer{"Macro", "%define fcitx5_version 5.0.1\n"}
type Tokenizer struct {
Type string
Content string
}
// NewTokenizer return a new tokenizer
func NewTokenizer(typ, content string) Tokenizer {
return Tokenizer{typ, content}
}
// String return the raw content
/*func (token Tokenizer) String() string {
return token.Content
}*/