-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtokenizer_comments.go
71 lines (56 loc) · 1.57 KB
/
tokenizer_comments.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
package golex
import "fmt"
var (
SlashSingleLineCommentSyntax = CommentSyntax{Opener: "//"}
SlashMultilineCommentSyntax = CommentSyntax{Opener: "/*", Closer: "*/"}
HashtagSingleLineCommentSyntax = CommentSyntax{Opener: "#"}
cachedCommentSyntax *CommentSyntax
)
type CommentSyntax struct {
Opener string
Closer string
}
type CommentTokenizer struct{}
func (c CommentTokenizer) CanTokenize(l *Lexer) bool {
if len(l.CommentSyntaxes) < 1 {
return false
}
for _, syntax := range l.CommentSyntaxes {
if l.NextCharsAre([]rune(syntax.Opener)) {
cachedCommentSyntax = &syntax
return true
}
}
return false
}
func (c CommentTokenizer) Tokenize(l *Lexer) (Token, error) {
if cachedCommentSyntax == nil {
if !c.CanTokenize(l) {
return Token{Type: TypeInvalid, Position: l.GetPosition()},
NewError(fmt.Sprintf("Invalid token '%c' found", l.CharAtCursor()), l.GetPosition(), l.state.Content)
} else {
return c.Tokenize(l)
}
}
var reachedEndOfComment func(*Lexer) bool
if cachedCommentSyntax.Closer == "" {
reachedEndOfComment = func(l *Lexer) bool {
return l.CharAtCursor() == '\n'
}
} else {
reachedEndOfComment = func(l *Lexer) bool {
return l.NextCharsAre([]rune(cachedCommentSyntax.Closer))
}
}
token := Token{Type: TypeComment, Position: l.GetPosition()}
for l.GetCursor() <= l.state.ContentLength && !reachedEndOfComment(l) {
token.AppendChar(l.CharAtCursor())
l.IncrementCursor(1)
}
l.IncrementCursor(len(cachedCommentSyntax.Closer))
cachedCommentSyntax = nil
if l.IgnoreComments {
return l.NextToken()
}
return token, nil
}