1
1
import pytest
2
2
import logging
3
3
import sys
4
- from ..commonkafka import create_kafka_connector , delete_kafka_connector
4
+ import time
5
+ from ..commonkafka import create_kafka_connector , delete_kafka_connector , update_kafka_connector , get_kafka_connector_tasks , get_kafka_connector_status , pause_kafka_connector , resume_kafka_connector , restart_kafka_connector
5
6
6
7
logger = logging .getLogger (__name__ )
7
8
logger .setLevel (logging .INFO )
11
12
logger .addHandler (handler )
12
13
13
14
@pytest .mark .parametrize ("test_input,expected" , [
14
- ("create_and_delete_valid_task " , True )
15
+ ("test_valid_CRUD_tasks " , True )
15
16
])
16
- def test_create_and_delete_valid_task (setup , test_input , expected ):
17
+ def test_valid_CRUD_tasks (setup , test_input , expected ):
17
18
'''
18
- Test that valid kafka connect task can be created
19
+ Test that valid kafka connect task can be created, updated, paused, resumed, restarted and deleted
19
20
'''
20
- logger .info ("testing create_and_delete_valid_task input={0} expected={1} " .format (
21
+ logger .info ("testing test_valid_CRUD_tasks input={0} expected={1} " .format (
21
22
test_input , expected ))
22
23
23
24
# defining a connector definition dict for the parameters to be sent to the API
@@ -26,16 +27,153 @@ def test_create_and_delete_valid_task(setup, test_input, expected):
26
27
"config" : {
27
28
"connector.class" : "com.splunk.kafka.connect.SplunkSinkConnector" ,
28
29
"tasks.max" : "3" ,
29
- "topics" : "test-datagen" , # set kafka topic later
30
- "splunk.indexes" : setup ["kafka_topic " ],
30
+ "topics" : setup [ "kafka_topic" ],
31
+ "splunk.indexes" : setup ["splunk_index " ],
31
32
"splunk.hec.uri" : setup ["splunkd_url" ],
32
33
"splunk.hec.token" : setup ["splunk_token" ],
33
34
"splunk.hec.raw" : "false" ,
34
35
"splunk.hec.ack.enabled" : "false" ,
35
- "splunk.hec.ssl.validate.certs" : "true "
36
+ "splunk.hec.ssl.validate.certs" : "false "
36
37
}
37
38
}
38
39
40
+ #Validate create task
39
41
assert create_kafka_connector (setup , connector_definition ) == expected
40
42
41
- assert delete_kafka_connector (setup , connector_definition ) == expected
43
+ # updating the definition to use 5 tasks instead of 3
44
+ connector_definition = {
45
+ "name" : "kafka-connect-splunk" ,
46
+ "config" : {
47
+ "connector.class" : "com.splunk.kafka.connect.SplunkSinkConnector" ,
48
+ "tasks.max" : "5" ,
49
+ "topics" : setup ["kafka_topic" ],
50
+ "splunk.indexes" : setup ["splunk_index" ],
51
+ "splunk.hec.uri" : setup ["splunkd_url" ],
52
+ "splunk.hec.token" : setup ["splunk_token" ],
53
+ "splunk.hec.raw" : "false" ,
54
+ "splunk.hec.ack.enabled" : "false" ,
55
+ "splunk.hec.ssl.validate.certs" : "false"
56
+ }
57
+ }
58
+
59
+ # Validate update task
60
+ assert update_kafka_connector (setup , connector_definition ) == expected
61
+
62
+ # Validate get tasks
63
+ tasks = get_kafka_connector_tasks (setup , connector_definition )
64
+ assert tasks == int (connector_definition ["config" ]["tasks.max" ])
65
+
66
+ # Validate pause task
67
+ assert pause_kafka_connector (setup , connector_definition ) == expected
68
+
69
+ # Validate resume task
70
+ assert resume_kafka_connector (setup , connector_definition ) == expected
71
+
72
+ # Validate restart task
73
+ assert restart_kafka_connector (setup , connector_definition ) == expected
74
+
75
+ # Validate delete task
76
+ assert delete_kafka_connector (setup , connector_definition ) == expected
77
+
78
+
79
+ @pytest .mark .parametrize ("test_input,expected" , [
80
+ ("test_invalid_CRUD_tasks" , False )
81
+ ])
82
+ def test_invalid_CRUD_tasks (setup , test_input , expected ):
83
+ '''
84
+ Test that invalid kafka connect task cannot be created
85
+ '''
86
+ logger .info ("testing test_invalid_CRUD_tasks input={0} expected={1} " .format (
87
+ test_input , expected ))
88
+
89
+ # connector definition with tasks.max invalid(not number)
90
+ connector_definition_invalid_tasks = {
91
+ "name" : "kafka-connect-splunk" ,
92
+ "config" : {
93
+ "connector.class" : "com.splunk.kafka.connect.SplunkSinkConnector" ,
94
+ "tasks.max" : "dummy-string" ,
95
+ "topics" : setup ["kafka_topic" ],
96
+ "splunk.indexes" : setup ["splunk_index" ],
97
+ "splunk.hec.uri" : setup ["splunkd_url" ],
98
+ "splunk.hec.token" : setup ["splunk_token" ],
99
+ "splunk.hec.raw" : "false" ,
100
+ "splunk.hec.ack.enabled" : "false" ,
101
+ "splunk.hec.ssl.validate.certs" : "false"
102
+ }
103
+ }
104
+
105
+ assert create_kafka_connector (setup , connector_definition_invalid_tasks ) == expected
106
+
107
+ # connector definition with splunk.hec.raw invalid(not boolean)
108
+ connector_definition_invalid_tasks = {
109
+ "name" : "kafka-connect-splunk" ,
110
+ "config" : {
111
+ "connector.class" : "com.splunk.kafka.connect.SplunkSinkConnector" ,
112
+ "tasks.max" : "3" ,
113
+ "topics" : setup ["kafka_topic" ],
114
+ "splunk.indexes" : setup ["splunk_index" ],
115
+ "splunk.hec.uri" : setup ["splunkd_url" ],
116
+ "splunk.hec.token" : setup ["splunk_token" ],
117
+ "splunk.hec.raw" : "disable" ,
118
+ "splunk.hec.ack.enabled" : "false" ,
119
+ "splunk.hec.ssl.validate.certs" : "false"
120
+ }
121
+ }
122
+
123
+ assert create_kafka_connector (setup , connector_definition_invalid_tasks ) == expected
124
+
125
+ # connector definition with topics invalid(empty string)
126
+ connector_definition_invalid_tasks = {
127
+ "name" : "kafka-connect-splunk" ,
128
+ "config" : {
129
+ "connector.class" : "com.splunk.kafka.connect.SplunkSinkConnector" ,
130
+ "tasks.max" : "3" ,
131
+ "topics" : "" ,
132
+ "splunk.indexes" : setup ["splunk_index" ],
133
+ "splunk.hec.uri" : setup ["splunkd_url" ],
134
+ "splunk.hec.token" : setup ["splunk_token" ],
135
+ "splunk.hec.raw" : "false" ,
136
+ "splunk.hec.ack.enabled" : "false" ,
137
+ "splunk.hec.ssl.validate.certs" : "false"
138
+ }
139
+ }
140
+
141
+ assert create_kafka_connector (setup , connector_definition_invalid_tasks ) == expected
142
+
143
+ # connector definition with splunk.hec.json.event.enrichment invalid(non key value pairs)
144
+ connector_definition_invalid_tasks = {
145
+ "name" : "kafka-connect-splunk" ,
146
+ "config" : {
147
+ "connector.class" : "com.splunk.kafka.connect.SplunkSinkConnector" ,
148
+ "tasks.max" : "3" ,
149
+ "topics" : setup ["kafka_topic" ],
150
+ "splunk.indexes" : setup ["splunk_index" ],
151
+ "splunk.hec.uri" : setup ["splunkd_url" ],
152
+ "splunk.hec.token" : setup ["splunk_token" ],
153
+ "splunk.hec.raw" : "false" ,
154
+ "splunk.hec.ack.enabled" : "false" ,
155
+ "splunk.hec.ssl.validate.certs" : "false" ,
156
+ "splunk.hec.json.event.enrichment" : "testing-testing non KV"
157
+ }
158
+ }
159
+
160
+ assert create_kafka_connector (setup , connector_definition_invalid_tasks ) == expected
161
+
162
+ # connector definition with splunk.hec.json.event.enrichment invalid(key value pairs not separated by commas)
163
+ connector_definition_invalid_tasks = {
164
+ "name" : "kafka-connect-splunk" ,
165
+ "config" : {
166
+ "connector.class" : "com.splunk.kafka.connect.SplunkSinkConnector" ,
167
+ "tasks.max" : "3" ,
168
+ "topics" : setup ["kafka_topic" ],
169
+ "splunk.indexes" : setup ["splunk_index" ],
170
+ "splunk.hec.uri" : setup ["splunkd_url" ],
171
+ "splunk.hec.token" : setup ["splunk_token" ],
172
+ "splunk.hec.raw" : "false" ,
173
+ "splunk.hec.ack.enabled" : "false" ,
174
+ "splunk.hec.ssl.validate.certs" : "false" ,
175
+ "splunk.hec.json.event.enrichment" : "key1=value1 key2=value2"
176
+ }
177
+ }
178
+
179
+ assert create_kafka_connector (setup , connector_definition_invalid_tasks ) == expected
0 commit comments