Skip to content

Commit

Permalink
Fixed src build and docker logs retrieval in case of error (#263)
Browse files Browse the repository at this point in the history
* Added memtier_benchmark-3Mkeys-load-string-with-512B-values: Runs memtier_benchmark, for a keyspace length of 3M keys loading STRINGs in which the value has a data size of 512 Bytes, with 650 clients running sequential SET commands

* Added tests for io-threads setups

* Removed duplicate test

* Fixed missing update tests

* Added way of specifying test-regex via dockerhub cli

* Fixed src build and docker logs retrieval in case of error
  • Loading branch information
fcostaoliveira authored Aug 18, 2024
1 parent ff49e64 commit e9b242f
Show file tree
Hide file tree
Showing 8 changed files with 137 additions and 36 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "redis-benchmarks-specification"
version = "0.1.215"
version = "0.1.216"
description = "The Redis benchmarks specification describes the cross-language/tools requirements and expectations to foster performance and observability standards around redis related technologies. Members from both industry and academia, including organizations and individuals are encouraged to contribute."
authors = ["filipecosta90 <[email protected]>","Redis Performance Group <[email protected]>"]
readme = "Readme.md"
Expand Down
2 changes: 2 additions & 0 deletions redis_benchmarks_specification/__builder__/builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -343,6 +343,8 @@ def builder_process_stream(
"linenoise",
"lua",
]
if "fast_float" in deps_dir:
deps_list.append("fast_float")
if "hdr_histogram" in deps_dir:
deps_list.append("hdr_histogram")
if "fpconv" in deps_dir:
Expand Down
17 changes: 17 additions & 0 deletions redis_benchmarks_specification/__cli__/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,23 @@ def trigger_tests_dockerhub_cli_command_logic(args, project_name, project_versio
args.build_arch,
testDetails,
"n/a",
[],
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
".*",
0,
10000,
args.tests_regexp,
)
build_stream_fields["github_repo"] = args.gh_repo
build_stream_fields["github_org"] = args.gh_org
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def generate_standalone_redis_server_args(
"{}".format(port),
]
if dbdir != "":
command.extend(["--dbdir", dbdir])
command.extend(["--dir", dbdir])
if configuration_parameters is not None:
for parameter, parameter_value in configuration_parameters.items():
if parameter not in added_params:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -832,39 +832,16 @@ def process_self_contained_coordinator_stream(
db_cpuset_cpus, current_cpu_pos = generate_cpuset_cpus(
ceil_db_cpu_limit, current_cpu_pos
)
logging.info(
"Running redis-server on docker image {} (cpuset={}) with the following args: {}".format(
run_image, db_cpuset_cpus, command_str
)
)
volumes = {}
working_dir = "/"
if mnt_point != "":
volumes = {
temporary_dir: {
"bind": mnt_point,
"mode": "rw",
},
}
working_dir = mnt_point
redis_container = docker_client.containers.run(
image=run_image,
volumes=volumes,
auto_remove=True,
privileged=True,
working_dir=mnt_point,
command=command_str,
network_mode="host",
detach=True,
cpuset_cpus=db_cpuset_cpus,
pid_mode="host",
publish_all_ports=True,
redis_container = start_redis_container(
command_str,
db_cpuset_cpus,
docker_client,
mnt_point,
redis_containers,
run_image,
temporary_dir,
)

time.sleep(5)

redis_containers.append(redis_container)

r = redis.StrictRedis(port=redis_proc_start_port)
r.ping()
redis_conns = [r]
Expand Down Expand Up @@ -1251,9 +1228,10 @@ def process_self_contained_coordinator_stream(
stdout=True, stderr=True
)
)
redis_container.remove()
except docker.errors.NotFound:
logging.info(
"When trying to stop DB container with id {} and image {} it was already stopped".format(
"When trying to fetch logs from DB container with id {} and image {} it was already stopped".format(
redis_container.id,
redis_container.image,
)
Expand All @@ -1269,6 +1247,7 @@ def process_self_contained_coordinator_stream(
for redis_container in redis_containers:
try:
redis_container.stop()
redis_container.remove()
except docker.errors.NotFound:
logging.info(
"When trying to stop DB container with id {} and image {} it was already stopped".format(
Expand All @@ -1282,6 +1261,7 @@ def process_self_contained_coordinator_stream(
if type(redis_container) == Container:
try:
redis_container.stop()
redis_container.remove()
except docker.errors.NotFound:
logging.info(
"When trying to stop Client container with id {} and image {} it was already stopped".format(
Expand All @@ -1295,6 +1275,7 @@ def process_self_contained_coordinator_stream(
temporary_dir, temporary_dir_client
)
)

shutil.rmtree(temporary_dir, ignore_errors=True)
shutil.rmtree(temporary_dir_client, ignore_errors=True)

Expand Down Expand Up @@ -1475,6 +1456,50 @@ def process_self_contained_coordinator_stream(
return stream_id, overall_result, total_test_suite_runs


def start_redis_container(
command_str,
db_cpuset_cpus,
docker_client,
mnt_point,
redis_containers,
run_image,
temporary_dir,
auto_remove=False,
):
logging.info(
"Running redis-server on docker image {} (cpuset={}) with the following args: {}".format(
run_image, db_cpuset_cpus, command_str
)
)
volumes = {}
working_dir = "/"
if mnt_point != "":
volumes = {
temporary_dir: {
"bind": mnt_point,
"mode": "rw",
},
}
logging.info(f"setting volume as follow: {volumes}. working_dir={mnt_point}")
working_dir = mnt_point
redis_container = docker_client.containers.run(
image=run_image,
volumes=volumes,
auto_remove=auto_remove,
privileged=True,
working_dir=mnt_point,
command=command_str,
network_mode="host",
detach=True,
cpuset_cpus=db_cpuset_cpus,
pid_mode="host",
publish_all_ports=True,
)
time.sleep(5)
redis_containers.append(redis_container)
return redis_container


def filter_test_files(
defaults_filename,
priority_lower_limit,
Expand Down
4 changes: 3 additions & 1 deletion tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,9 @@ docker =

deps =
-r{toxinidir}/dev_requirements.txt
# passenv = TST_BUILDER_X,TST_RUNNER_X,GH_TOKEN,TST_REDIS_DIR,DOCKER_HOST,DOCKER_TLS_VERIFY,DOCKER_CERT_PATH


passenv = TST_BUILDER_X,TST_RUNNER_X,GH_TOKEN,TST_REDIS_DIR,DOCKER_HOST,DOCKER_TLS_VERIFY,DOCKER_CERT_PATH,TST_BINARY_REDIS_DIR

stoponfail =
True
Expand Down
55 changes: 55 additions & 0 deletions utils/tests/test_self_contained_coordinator.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@

from redisbench_admin.utils.remote import get_overall_dashboard_keynames
from redisbench_admin.utils.utils import get_ts_metric_name
import logging

from redis_benchmarks_specification.__common__.env import (
STREAM_KEYNAME_NEW_BUILD_EVENTS,
Expand All @@ -18,6 +19,7 @@
)
from redis_benchmarks_specification.__self_contained_coordinator__.self_contained_coordinator import (
self_contained_coordinator_blocking_read,
start_redis_container,
)

from redis_benchmarks_specification.__self_contained_coordinator__.runners import (
Expand All @@ -31,6 +33,11 @@
from utils.tests.test_data.api_builder_common import flow_1_and_2_api_builder_checks


from redis_benchmarks_specification.__self_contained_coordinator__.docker import (
generate_standalone_redis_server_args,
)


def test_extract_client_cpu_limit():
with open(
"./utils/tests/test_data/test-suites/redis-benchmark-full-suite-1Mkeys-100B.yml",
Expand Down Expand Up @@ -263,3 +270,51 @@ def test_self_contained_coordinator_blocking_read():

except redis.exceptions.ConnectionError:
pass


def test_start_redis_container():
temporary_dir = os.getenv("TST_BINARY_REDIS_DIR", "")
if temporary_dir == "":
return

mnt_point = "/mnt/redis/"
executable = f"{mnt_point}redis-server"
redis_proc_start_port = 6379
current_cpu_pos = 0
ceil_db_cpu_limit = 1
redis_configuration_parameters = None
redis_arguments = ""
docker_client = docker.from_env()
redis_containers = []

command = generate_standalone_redis_server_args(
executable,
redis_proc_start_port,
mnt_point,
redis_configuration_parameters,
redis_arguments,
)
command_str = " ".join(command)
db_cpuset_cpus, current_cpu_pos = generate_cpuset_cpus(
ceil_db_cpu_limit, current_cpu_pos
)
run_image = "gcc:8.5"
redis_container = start_redis_container(
command_str,
db_cpuset_cpus,
docker_client,
mnt_point,
redis_containers,
run_image,
temporary_dir,
)
r = redis.StrictRedis(port=redis_proc_start_port)
try:
r.ping()
except redis.exceptions.ConnectionError:
# Access and print the logs
logs = redis_container.logs().decode("utf-8")
logging.error("Container failed. Here are the logs:")
logging.error(logs)
raise
redis_container.remove()
2 changes: 1 addition & 1 deletion utils/tests/test_self_contained_coordinator_memtier.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ def test_self_contained_coordinator_blocking_read():
assert len(datasink_conn.smembers(running_platforms_setname)) == 1
assert len(datasink_conn.smembers(testcases_setname)) == 1
assert len(datasink_conn.smembers(project_branches_setname)) == 1
assert len(datasink_conn.smembers(project_versions_setname)) == 0
assert len(datasink_conn.smembers(project_versions_setname)) == 1

except redis.exceptions.ConnectionError:
pass
Expand Down

0 comments on commit e9b242f

Please sign in to comment.