Skip to content

Commit 8655c75

Browse files
authored
Increase some integration test timeouts (dpkp#1374)
1 parent 7d8f9a4 commit 8655c75

File tree

5 files changed

+26
-11
lines changed

5 files changed

+26
-11
lines changed

.gitignore

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,4 +13,5 @@ docs/_build
1313
.cache*
1414
.idea/
1515
integration-test/
16-
tests-env/
16+
tests-env/
17+
.pytest_cache/

test/fixtures.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -141,7 +141,7 @@ def open(self):
141141

142142
# Party!
143143
timeout = 5
144-
max_timeout = 30
144+
max_timeout = 120
145145
backoff = 1
146146
end_at = time.time() + max_timeout
147147
tries = 1
@@ -161,6 +161,7 @@ def open(self):
161161
timeout *= 2
162162
time.sleep(backoff)
163163
tries += 1
164+
backoff += 1
164165
else:
165166
raise RuntimeError('Failed to start Zookeeper before max_timeout')
166167
self.out("Done!")
@@ -278,7 +279,7 @@ def open(self):
278279
env = self.kafka_run_class_env()
279280

280281
timeout = 5
281-
max_timeout = 30
282+
max_timeout = 120
282283
backoff = 1
283284
end_at = time.time() + max_timeout
284285
tries = 1
@@ -301,6 +302,7 @@ def open(self):
301302
timeout *= 2
302303
time.sleep(backoff)
303304
tries += 1
305+
backoff += 1
304306
else:
305307
raise RuntimeError('Failed to start KafkaInstance before max_timeout')
306308
self.out("Done!")

test/test_consumer_integration.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -647,13 +647,14 @@ def test_kafka_consumer_offsets_for_time(self):
647647
early_time = late_time - 2000
648648
tp = TopicPartition(self.topic, 0)
649649

650+
timeout = 10
650651
kafka_producer = self.kafka_producer()
651652
early_msg = kafka_producer.send(
652653
self.topic, partition=0, value=b"first",
653-
timestamp_ms=early_time).get(1)
654+
timestamp_ms=early_time).get(timeout)
654655
late_msg = kafka_producer.send(
655656
self.topic, partition=0, value=b"last",
656-
timestamp_ms=late_time).get(1)
657+
timestamp_ms=late_time).get(timeout)
657658

658659
consumer = self.kafka_consumer()
659660
offsets = consumer.offsets_for_times({tp: early_time})
@@ -699,12 +700,13 @@ def test_kafka_consumer_offsets_search_many_partitions(self):
699700

700701
kafka_producer = self.kafka_producer()
701702
send_time = int(time.time() * 1000)
703+
timeout = 10
702704
p0msg = kafka_producer.send(
703705
self.topic, partition=0, value=b"XXX",
704-
timestamp_ms=send_time).get()
706+
timestamp_ms=send_time).get(timeout)
705707
p1msg = kafka_producer.send(
706708
self.topic, partition=1, value=b"XXX",
707-
timestamp_ms=send_time).get()
709+
timestamp_ms=send_time).get(timeout)
708710

709711
consumer = self.kafka_consumer()
710712
offsets = consumer.offsets_for_times({

test/test_producer.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -38,12 +38,12 @@ def test_end_to_end(kafka_broker, compression):
3838
connect_str = ':'.join([kafka_broker.host, str(kafka_broker.port)])
3939
producer = KafkaProducer(bootstrap_servers=connect_str,
4040
retries=5,
41-
max_block_ms=10000,
41+
max_block_ms=30000,
4242
compression_type=compression,
4343
value_serializer=str.encode)
4444
consumer = KafkaConsumer(bootstrap_servers=connect_str,
4545
group_id=None,
46-
consumer_timeout_ms=10000,
46+
consumer_timeout_ms=30000,
4747
auto_offset_reset='earliest',
4848
value_deserializer=bytes.decode)
4949

@@ -87,7 +87,7 @@ def test_kafka_producer_proper_record_metadata(kafka_broker, compression):
8787
connect_str = ':'.join([kafka_broker.host, str(kafka_broker.port)])
8888
producer = KafkaProducer(bootstrap_servers=connect_str,
8989
retries=5,
90-
max_block_ms=10000,
90+
max_block_ms=30000,
9191
compression_type=compression)
9292
magic = producer._max_usable_produce_magic()
9393

test/testutil.py

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212
from . import unittest
1313

1414
from kafka import SimpleClient
15+
from kafka.errors import LeaderNotAvailableError, KafkaTimeoutError
1516
from kafka.structs import OffsetRequestPayload
1617

1718
__all__ = [
@@ -98,7 +99,16 @@ def setUp(self):
9899
if self.create_client:
99100
self.client = SimpleClient('%s:%d' % (self.server.host, self.server.port))
100101

101-
self.client.ensure_topic_exists(self.topic)
102+
timeout = time.time() + 30
103+
while time.time() < timeout:
104+
try:
105+
self.client.load_metadata_for_topics(self.topic, ignore_leadernotavailable=False)
106+
if self.client.has_metadata_for_topic(topic):
107+
break
108+
except LeaderNotAvailableError:
109+
time.sleep(1)
110+
else:
111+
raise KafkaTimeoutError('Timeout loading topic metadata!')
102112

103113
self._messages = {}
104114

0 commit comments

Comments
 (0)