-
Notifications
You must be signed in to change notification settings - Fork 2
/
lexer_test.go
86 lines (75 loc) · 2.76 KB
/
lexer_test.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
package specfile
import (
"strings"
"testing"
)
func TestNewTokenizer(t *testing.T) {
tokenizer := NewTokenizer("Tag", "test")
if tokenizer.Type != "Tag" || tokenizer.Content != "test" {
t.Error("[lexer]NewTokenizer test failed")
}
}
/*func TestTokenizerToString(t *testing.T) {
tokenizer := NewTokenizer("Tag", "BuildRequires: xz")
if tokenizer.String() != "BuildRequires: xz" {
t.Error("[lexer]tokenizer to String test failed")
}
}*/
func TestConditionalTokenizer(t *testing.T) {
str := "%if 0%{?suse_version} > 1550\nBuildRequires: xz\n%endif"
tokenizers, err := NewTokenizers(strings.NewReader(str))
if err != nil || tokenizers[0].Type != "Conditional" || tokenizers[0].Content != str {
t.Error("[lexer]conditional tokenizer test failed")
}
}
func TestMacroTokenizer(t *testing.T) {
str := "%global suse_version 1550"
tokenizers, _ := NewTokenizers(strings.NewReader(str))
if tokenizers[0].Type != "Macro" || tokenizers[0].Content != str {
t.Error("[lexer]macro tokenizer test failed")
}
}
func TestDependencyTokenizer(t *testing.T) {
str := "BuildRequires: xz"
tokenizers, _ := NewTokenizers(strings.NewReader(str))
if tokenizers[0].Type != "Dependency" || tokenizers[0].Content != str {
t.Error("[lexer]dependency tokenizer test failed")
}
}
func TestTagTokenizer(t *testing.T) {
str := "Name: xz"
tokenizers, _ := NewTokenizers(strings.NewReader(str))
if tokenizers[0].Type != "Tag" || tokenizers[0].Content != str {
t.Error("[lexer]tag tokenizer test failed")
}
}
func TestEmptyTokenizer(t *testing.T) {
str := "\n"
tokenizers, _ := NewTokenizers(strings.NewReader(str))
if tokenizers[0].Type != "Empty" || tokenizers[0].Content != str {
t.Error("[lexer]empty tokenizer test failed")
}
}
func TestCommentTokenizer(t *testing.T) {
str := "# spec file for package gcc10"
tokenizers, err := NewTokenizers(strings.NewReader(str))
if err != nil || tokenizers[0].Type != "Comment" || tokenizers[0].Content != str {
t.Error("[lexer]comment tokenizer test failed")
}
}
func TestSectionTokenizer(t *testing.T) {
str := "%description\nCore package for the GNU Compiler Collection, including the C language frontend.\n\n"
str1 := "%package "
tokenizers, err := NewTokenizers(strings.NewReader(str + str1))
if err != nil || tokenizers[0].Type != "Section" || tokenizers[0].Content != str {
t.Error("[lexer]section tokenizer test failed")
}
}
func TestSectionTokenizerWithUnclosedIf(t *testing.T) {
str := "%description\nCore package for the GNU Compiler Collection, including the C language frontend.\n\n"
str1 := "%if 0%{?suse_version} > 1550\n%package "
tokenizers, err := NewTokenizers(strings.NewReader(str + str1))
if err != nil || tokenizers[0].Type != "Section" || tokenizers[0].Content != str {
t.Error("[lexer]section tokenizer test failed")
}
}