Skip to content

Commit eb6db72

Browse files
Merge pull request #253 from splunk/INGEST-17730
Kafka CI pipeline for multiple version of kafka
2 parents febc85c + 961dedc commit eb6db72

File tree

3 files changed

+96
-56
lines changed

3 files changed

+96
-56
lines changed

.circleci/config.yml

Lines changed: 84 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -1,38 +1,76 @@
11
version: 2.1
22

33
workflows:
4-
version: 2.1
54
test:
65
jobs:
7-
- build_test:
6+
# - build_test_kafka:
7+
# kafka_version: 1.0.0
8+
# kafka_package: kafka_2.11-1.0.0.tgz
9+
# filters:
10+
# branches:
11+
# ignore:
12+
# - /^release\/.*/
13+
# - master
14+
- build_test_kafka:
15+
kafka_version: 1.1.1
16+
kafka_package: kafka_2.11-1.1.1.tgz
17+
filters:
18+
branches:
19+
ignore:
20+
- /^release\/.*/
21+
- master
22+
- build_test_kafka:
23+
kafka_version: 2.0.0
24+
kafka_package: kafka_2.11-2.0.0.tgz
25+
filters:
26+
branches:
27+
ignore:
28+
- /^release\/.*/
29+
- master
30+
- build_test_kafka:
31+
kafka_version: 2.1.0
32+
kafka_package: kafka_2.12-2.1.0.tgz
33+
filters:
34+
branches:
35+
ignore:
36+
- /^release\/.*/
37+
- master
38+
- build_test_kafka:
39+
kafka_version: 2.6.0
40+
kafka_package: kafka_2.13-2.6.0.tgz
841
filters:
942
branches:
1043
ignore:
1144
- /^release\/.*/
1245
- master
1346

1447
jobs:
15-
build_test:
16-
resource_class: large
17-
machine:
18-
image: ubuntu-1604:201903-01
19-
environment:
20-
MINIKUBE_HOME: /home/circleci
21-
CI_SPLUNK_VERSION: "8.0.2.1"
22-
CI_SPLUNK_FILENAME: splunk-8.0.2.1-f002026bad55-linux-2.6-amd64.deb
23-
CI_SPLUNK_HOST: 127.0.0.1
24-
CI_SPLUNK_PORT: 8089
25-
CI_SPLUNK_USERNAME: admin
26-
CI_SPLUNK_HEC_TOKEN: a6b5e77f-d5f6-415a-bd43-930cecb12959
27-
CI_SPLUNK_HEC_TOKEN_ACK: a6b5e77f-d5f6-415a-bd43-930cecb12950
28-
CI_SPLUNK_PASSWORD: helloworld
29-
CI_INDEX_EVENTS: main
30-
CI_KAFKA_HEADER_INDEX: kafka
31-
CI_DATAGEN_IMAGE: rock1017/log-generator:latest
32-
CI_KAFKA_VERSION: 2.4.0
33-
CI_KAFKA_FILENAME: kafka_2.13-2.4.0.tgz
34-
CI_OLD_CONNECTOR_VERSION: v1.2.0
48+
build_test_kafka:
3549
working_directory: ~/repo
50+
executor:
51+
name: kafka_connect_linux
52+
parameters:
53+
kafka_version:
54+
description: "Kafka version"
55+
type: string
56+
kafka_package:
57+
description: "Kafka package name"
58+
type: string
59+
steps:
60+
- build_test:
61+
kafka_version: << parameters.kafka_version >>
62+
kafka_package: << parameters.kafka_package >>
63+
64+
65+
commands:
66+
build_test:
67+
parameters:
68+
kafka_version:
69+
description: "Kafka version"
70+
type: string
71+
kafka_package:
72+
description: "Kafka package name"
73+
type: string
3674
steps:
3775
- attach_workspace:
3876
at: /tmp
@@ -59,15 +97,6 @@ jobs:
5997
- run:
6098
name: Create file for checksum
6199
command: echo $CI_SPLUNK_VERSION > /tmp/splunk_version.txt
62-
- install_splunk
63-
- install_kafka
64-
- test_kafka_connect_upgrade
65-
- run_functional_tests
66-
67-
commands:
68-
install_splunk:
69-
description: "Install Splunk"
70-
steps:
71100
- run:
72101
name: Install Splunk
73102
command: |
@@ -109,16 +138,12 @@ commands:
109138
curl -X POST -u $CI_SPLUNK_USERNAME:$CI_SPLUNK_PASSWORD -k -d "name=$CI_INDEX_EVENTS&datatype=event" https://$CI_SPLUNK_HOST:$CI_SPLUNK_PORT/servicesNS/-/search/data/indexes
110139
curl -X POST -u $CI_SPLUNK_USERNAME:$CI_SPLUNK_PASSWORD -k -d "name=$CI_KAFKA_HEADER_INDEX&datatype=event" https://$CI_SPLUNK_HOST:$CI_SPLUNK_PORT/servicesNS/-/search/data/indexes
111140
sudo /opt/splunk/bin/splunk restart --accept-license --answer-yes --no-prompt
112-
113-
install_kafka:
114-
description: "Install Kafka"
115-
steps:
116141
- run:
117-
name: Install Kafka
142+
name: Install Kafka << parameters.kafka_version >>
118143
command: |
119-
cd /tmp && wget http://www-us.apache.org/dist/kafka/$CI_KAFKA_VERSION/$CI_KAFKA_FILENAME
120-
sudo tar xzf $CI_KAFKA_FILENAME
121-
rm $CI_KAFKA_FILENAME
144+
cd /tmp && wget https://archive.apache.org/dist/kafka/<< parameters.kafka_version >>/<< parameters.kafka_package >>
145+
sudo tar xzf << parameters.kafka_package >>
146+
rm << parameters.kafka_package >>
122147
sudo mv kafka_* /usr/local/kafka
123148
sudo tee /etc/systemd/system/zookeeper.service > /dev/null \<< EOF
124149
[Unit]
@@ -154,9 +179,6 @@ commands:
154179
sudo systemctl daemon-reload
155180
sudo systemctl start zookeeper
156181
sudo systemctl start kafka
157-
test_kafka_connect_upgrade:
158-
description: "Test kafka connect upgrade"
159-
steps:
160182
- run:
161183
name: Test kafka connect upgrade
162184
command: |
@@ -173,10 +195,6 @@ commands:
173195
export PYTHONWARNINGS="ignore:Unverified HTTPS request"
174196
echo "Test kafka connect upgrade ..."
175197
python test/lib/connector_upgrade.py
176-
177-
run_functional_tests:
178-
description: "Run functional tests on current kafka connect version"
179-
steps:
180198
- run:
181199
name: Install kafka connect
182200
command: |
@@ -191,3 +209,23 @@ commands:
191209
sleep 5
192210
echo "Running functional tests....."
193211
python -m pytest -p no:warnings -s
212+
213+
executors:
214+
kafka_connect_linux:
215+
resource_class: large
216+
machine:
217+
image: ubuntu-1604:201903-01
218+
environment:
219+
MINIKUBE_HOME: /home/circleci
220+
CI_SPLUNK_VERSION: "8.0.2.1"
221+
CI_SPLUNK_FILENAME: splunk-8.0.2.1-f002026bad55-linux-2.6-amd64.deb
222+
CI_SPLUNK_HOST: 127.0.0.1
223+
CI_SPLUNK_PORT: 8089
224+
CI_SPLUNK_USERNAME: admin
225+
CI_SPLUNK_HEC_TOKEN: a6b5e77f-d5f6-415a-bd43-930cecb12959
226+
CI_SPLUNK_HEC_TOKEN_ACK: a6b5e77f-d5f6-415a-bd43-930cecb12950
227+
CI_SPLUNK_PASSWORD: helloworld
228+
CI_INDEX_EVENTS: main
229+
CI_KAFKA_HEADER_INDEX: kafka
230+
CI_DATAGEN_IMAGE: rock1017/log-generator:latest
231+
CI_OLD_CONNECTOR_VERSION: v1.2.0

test/lib/commonkafka.py

Lines changed: 12 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -118,15 +118,18 @@ def get_running_kafka_connector_task_status(setup, params):
118118
'''
119119
Get running kafka connect connector tasks status using kafka connect REST API
120120
'''
121-
response = requests.get(url=setup["kafka_connect_url"] + "/connectors/" + params["name"] + "/status",
122-
headers={'Accept': 'application/json', 'Content-Type': 'application/json'})
123-
content = response.json()
124-
125-
if content.get('connector'):
126-
if content['connector']['state'] == 'RUNNING':
127-
task_status = jsonpath.jsonpath(content, '$.tasks.*.state')
128-
return task_status
129-
121+
t_end = time.time() + 60
122+
url = setup["kafka_connect_url"] + "/connectors/" + params["name"] + "/status"
123+
header = {'Accept': 'application/json', 'Content-Type': 'application/json'}
124+
while time.time() < t_end:
125+
content = requests.get(url=url, headers=header).json()
126+
if content.get('connector'):
127+
if content['connector']['state'] == 'RUNNING' and len(content['tasks']) > 0:
128+
time.sleep(2)
129+
content = requests.get(url=url, headers=header).json()
130+
logger.info(content)
131+
task_status = jsonpath.jsonpath(content, '$.tasks.*.state')
132+
return task_status
130133

131134

132135

test/testcases/test_crud.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -128,5 +128,4 @@ def test_invalid_crud_event_enrichment_tasks(self, setup, test_case, config_inpu
128128
setup['connectors'].append(test_case)
129129

130130
assert create_kafka_connector(setup, connector_definition_invalid_tasks) is True
131-
time.sleep(2)
132131
assert get_running_kafka_connector_task_status(setup, connector_definition_invalid_tasks) == expected

0 commit comments

Comments
 (0)