Skip to content

Commit

Permalink
Add airgap option to cli docker triggering
Browse files Browse the repository at this point in the history
  • Loading branch information
fcostaoliveira committed Aug 8, 2024
1 parent 6b2e772 commit 2961fb5
Show file tree
Hide file tree
Showing 4 changed files with 169 additions and 22 deletions.
48 changes: 26 additions & 22 deletions redis_benchmarks_specification/__builder__/builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -312,28 +312,7 @@ def builder_process_stream(
if "run_image" in build_config:
run_image = build_config["run_image"]
if docker_air_gap:
airgap_key = "docker:air-gap:{}".format(run_image)
logging.info(
"DOCKER AIR GAP: storing run image named: {} in redis key {}".format(
run_image, airgap_key
)
)
run_image_binary_stream = io.BytesIO()
run_image_docker = docker_client.images.get(run_image)
for chunk in run_image_docker.save():
run_image_binary_stream.write(chunk)
# 7 days expire
binary_exp_secs = 24 * 60 * 60 * 7
res_airgap = conn.set(
airgap_key,
run_image_binary_stream.getbuffer(),
ex=binary_exp_secs,
)
logging.info(
"DOCKER AIR GAP: result of set bin data to {}: {}".format(
airgap_key, res_airgap
)
)
store_airgap_image_redis(conn, docker_client, run_image)

compiler = build_config["compiler"]
cpp_compiler = build_config["cpp_compiler"]
Expand Down Expand Up @@ -535,6 +514,31 @@ def builder_process_stream(
return previous_id, new_builds_count, build_stream_fields_arr


def store_airgap_image_redis(conn, docker_client, run_image):
airgap_key = "docker:air-gap:{}".format(run_image)
logging.info(
"DOCKER AIR GAP: storing run image named: {} in redis key {}".format(
run_image, airgap_key
)
)
run_image_binary_stream = io.BytesIO()
run_image_docker = docker_client.images.get(run_image)
for chunk in run_image_docker.save():
run_image_binary_stream.write(chunk)
# 7 days expire
binary_exp_secs = 24 * 60 * 60 * 7
res_airgap = conn.set(
airgap_key,
run_image_binary_stream.getbuffer(),
ex=binary_exp_secs,
)
logging.info(
"DOCKER AIR GAP: result of set bin data to {}: {}".format(
airgap_key, res_airgap
)
)


def generate_benchmark_stream_request(
id,
conn,
Expand Down
6 changes: 6 additions & 0 deletions redis_benchmarks_specification/__cli__/args.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,12 @@ def spec_cli_args(parser):
parser.add_argument("--id", type=str, default="dockerhub")
parser.add_argument("--mnt_point", type=str, default="")
parser.add_argument("--trigger-unstable-commits", type=bool, default=True)
parser.add_argument(
"--docker-dont-air-gap",
default=False,
action="store_true",
help="Dont store the docker images in redis keys.",
)
parser.add_argument(
"--use-tags",
default=False,
Expand Down
7 changes: 7 additions & 0 deletions redis_benchmarks_specification/__cli__/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
import subprocess
import sys
import tempfile

import docker
import git
import packaging
import redis
Expand All @@ -20,6 +22,7 @@

from redis_benchmarks_specification.__builder__.builder import (
generate_benchmark_stream_request,
store_airgap_image_redis,
)
from redis_benchmarks_specification.__common__.github import (
update_comment_if_needed,
Expand Down Expand Up @@ -92,6 +95,10 @@ def trigger_tests_dockerhub_cli_command_logic(args, project_name, project_versio
server_name = args.server_name
build_stream_fields["server_name"] = server_name
build_stream_fields["mnt_point"] = args.mnt_point
if args.docker_dont_air_gap is False:
docker_client = docker.from_env()
store_airgap_image_redis(conn, docker_client, args.run_image)

if result is True:
benchmark_stream_id = conn.xadd(
STREAM_KEYNAME_NEW_BUILD_EVENTS, build_stream_fields
Expand Down
130 changes: 130 additions & 0 deletions utils/tests/test_self_contained_coordinator_memtier.py
Original file line number Diff line number Diff line change
Expand Up @@ -502,6 +502,7 @@ def test_dockerhub_via_cli():
)
parser = spec_cli_args(parser)
run_args = [
"--docker-dont-air-gap",
"--server_name",
"valkey",
"--run_image",
Expand Down Expand Up @@ -609,6 +610,135 @@ def test_dockerhub_via_cli():
assert redis_version == rts.info(hash_key).labels["version"]


def test_dockerhub_via_cli_airgap():
if run_coordinator_tests_dockerhub():
import argparse

db_port = int(os.getenv("DATASINK_PORT", "6379"))
conn = redis.StrictRedis(port=db_port)
conn.ping()
conn.flushall()
redis_version = "7.2.6"
run_image = f"valkey/valkey:{redis_version}-bookworm"
github_org = "valkey"
github_repo = "valkey"

db_port = os.getenv("DATASINK_PORT", "6379")

# should error due to missing --use-tags or --use-branch
parser = argparse.ArgumentParser(
description="test",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser = spec_cli_args(parser)
run_args = [
"--server_name",
"valkey",
"--run_image",
run_image,
"--gh_org",
github_org,
"--gh_repo",
github_repo,
"--redis_port",
"{}".format(db_port),
]
args = parser.parse_args(
args=run_args,
)
try:
trigger_tests_dockerhub_cli_command_logic(args, "tool", "v0")
except SystemExit as e:
assert e.code == 0

# confirm request was made via the cli
assert conn.exists(STREAM_KEYNAME_NEW_BUILD_EVENTS)
assert conn.xlen(STREAM_KEYNAME_NEW_BUILD_EVENTS) > 0
running_platform = "fco-ThinkPad-T490"

build_runners_consumer_group_create(conn, running_platform, "0")
datasink_conn = redis.StrictRedis(port=db_port)
docker_client = docker.from_env()
home = str(Path.home())
stream_id = ">"
topologies_map = get_topologies(
"./redis_benchmarks_specification/setups/topologies/topologies.yml"
)
# we use a benchmark spec with smaller CPU limit for client given github machines only contain 2 cores
# and we need 1 core for DB and another for CLIENT
testsuite_spec_files = [
"./utils/tests/test_data/test-suites/test-memtier-dockerhub.yml"
]
defaults_filename = "./utils/tests/test_data/test-suites/defaults.yml"
(
_,
_,
default_metrics,
_,
_,
_,
) = get_defaults(defaults_filename)

(
result,
stream_id,
number_processed_streams,
num_process_test_suites,
) = self_contained_coordinator_blocking_read(
conn,
True,
docker_client,
home,
stream_id,
datasink_conn,
testsuite_spec_files,
topologies_map,
running_platform,
False,
[],
"",
0,
6399,
1,
True,
5,
default_metrics,
"amd64",
None,
0,
10000,
"unstable",
"",
True,
False,
)

assert result == True
assert number_processed_streams == 1
assert num_process_test_suites == 1
by_version_key = f"ci.benchmarks.redislabs/ci/{github_org}/{github_repo}/memtier_benchmark-1Mkeys-load-string-with-10B-values/by.version/{redis_version}/benchmark_end/oss-standalone/memory_maxmemory"
assert datasink_conn.exists(by_version_key)
rts = datasink_conn.ts()
# check we have by version metrics
assert "version" in rts.info(by_version_key).labels
assert redis_version == rts.info(by_version_key).labels["version"]

# get all keys
all_keys = datasink_conn.keys("*")
by_hash_keys = []
for key in all_keys:
if "/by.hash/" in key.decode():
by_hash_keys.append(key)

# ensure we have by hash keys
assert len(by_hash_keys) > 0
for hash_key in by_hash_keys:
# ensure we have both version and hash info on the key
assert "version" in rts.info(hash_key).labels
assert "hash" in rts.info(hash_key).labels
assert redis_version == rts.info(hash_key).labels["version"]


def test_self_contained_coordinator_skip_build_variant():
try:
if run_coordinator_tests():
Expand Down

0 comments on commit 2961fb5

Please sign in to comment.