From 6b2e7720ca2455efd1f2785ef97c42dfb9f96ee7 Mon Sep 17 00:00:00 2001 From: fcosta_oliveira Date: Thu, 8 Aug 2024 23:09:48 +0100 Subject: [PATCH] Include a way of triggering dockerhub run --- .../__cli__/args.py | 8 +- redis_benchmarks_specification/__cli__/cli.py | 55 ++++++- ...test_self_contained_coordinator_memtier.py | 140 +++++++++++++++++- 3 files changed, 196 insertions(+), 7 deletions(-) diff --git a/redis_benchmarks_specification/__cli__/args.py b/redis_benchmarks_specification/__cli__/args.py index a45a8cb..c546c06 100644 --- a/redis_benchmarks_specification/__cli__/args.py +++ b/redis_benchmarks_specification/__cli__/args.py @@ -21,6 +21,7 @@ START_TIME_LAST_YEAR_UTC = START_TIME_NOW_UTC - datetime.timedelta(days=90) CLI_TOOL_STATS = "stats" CLI_TOOL_TRIGGER = "trigger" +CLI_TOOL_DOCKERHUB = "dockerhub" PERFORMANCE_GH_TOKEN = os.getenv("PERFORMANCE_GH_TOKEN", None) @@ -109,7 +110,7 @@ def spec_cli_args(parser): type=str, default=CLI_TOOL_TRIGGER, help="subtool to use. One of '{}' ".format( - ",".join([CLI_TOOL_STATS, CLI_TOOL_TRIGGER]) + ",".join([CLI_TOOL_STATS, CLI_TOOL_TRIGGER, CLI_TOOL_DOCKERHUB]) ), ) parser.add_argument("--gh_token", type=str, default=GH_TOKEN) @@ -129,6 +130,11 @@ def spec_cli_args(parser): parser.add_argument("--redis_repo", type=str, default=None) parser.add_argument("--gh_org", type=str, default="redis") parser.add_argument("--gh_repo", type=str, default="redis") + parser.add_argument("--server_name", type=str, default=None) + parser.add_argument("--run_image", type=str, default="redis") + parser.add_argument("--build_arch", type=str, default="amd64") + parser.add_argument("--id", type=str, default="dockerhub") + parser.add_argument("--mnt_point", type=str, default="") parser.add_argument("--trigger-unstable-commits", type=bool, default=True) parser.add_argument( "--use-tags", diff --git a/redis_benchmarks_specification/__cli__/cli.py b/redis_benchmarks_specification/__cli__/cli.py index 661d4f8..e71246b 100644 --- a/redis_benchmarks_specification/__cli__/cli.py +++ b/redis_benchmarks_specification/__cli__/cli.py @@ -18,7 +18,9 @@ from packaging import version import time - +from redis_benchmarks_specification.__builder__.builder import ( + generate_benchmark_stream_request, +) from redis_benchmarks_specification.__common__.github import ( update_comment_if_needed, create_new_pr_comment, @@ -38,6 +40,7 @@ REDIS_BINS_EXPIRE_SECS, STREAM_KEYNAME_GH_EVENTS_COMMIT, STREAM_GH_EVENTS_COMMIT_BUILDERS_CG, + STREAM_KEYNAME_NEW_BUILD_EVENTS, ) from redis_benchmarks_specification.__common__.package import ( get_version_string, @@ -52,6 +55,54 @@ ) +def trigger_tests_dockerhub_cli_command_logic(args, project_name, project_version): + logging.info( + "Using: {project_name} {project_version}".format( + project_name=project_name, project_version=project_version + ) + ) + logging.info( + "Checking connection to redis with user: {}, host: {}, port: {}".format( + args.redis_user, + args.redis_host, + args.redis_port, + ) + ) + conn = redis.StrictRedis( + host=args.redis_host, + port=args.redis_port, + password=args.redis_pass, + username=args.redis_user, + decode_responses=False, + ) + conn.ping() + testDetails = {} + build_stream_fields, result = generate_benchmark_stream_request( + args.id, + conn, + args.run_image, + args.build_arch, + testDetails, + "n/a", + ) + build_stream_fields["github_repo"] = args.gh_repo + build_stream_fields["github_org"] = args.gh_org + server_name = args.gh_repo + if args.server_name is not None: + server_name = args.server_name + build_stream_fields["server_name"] = server_name + build_stream_fields["mnt_point"] = args.mnt_point + if result is True: + benchmark_stream_id = conn.xadd( + STREAM_KEYNAME_NEW_BUILD_EVENTS, build_stream_fields + ) + logging.info( + "sucessfully requested a new run {}. Stream id: {}".format( + build_stream_fields, benchmark_stream_id + ) + ) + + def main(): _, _, project_version = populate_with_poetry_data() project_name = "redis-benchmarks-spec-cli" @@ -65,6 +116,8 @@ def main(): trigger_tests_cli_command_logic(args, project_name, project_version) if args.tool == "stats": generate_stats_cli_command_logic(args, project_name, project_version) + if args.tool == "dockerhub": + trigger_tests_dockerhub_cli_command_logic(args, project_name, project_version) def get_commits_by_branch(args, repo): diff --git a/utils/tests/test_self_contained_coordinator_memtier.py b/utils/tests/test_self_contained_coordinator_memtier.py index 865ef01..216145c 100644 --- a/utils/tests/test_self_contained_coordinator_memtier.py +++ b/utils/tests/test_self_contained_coordinator_memtier.py @@ -13,6 +13,10 @@ from redis_benchmarks_specification.__builder__.builder import ( generate_benchmark_stream_request, ) +from redis_benchmarks_specification.__cli__.args import spec_cli_args +from redis_benchmarks_specification.__cli__.cli import ( + trigger_tests_dockerhub_cli_command_logic, +) from redis_benchmarks_specification.__common__.env import ( STREAM_KEYNAME_NEW_BUILD_EVENTS, ) @@ -359,6 +363,8 @@ def test_self_contained_coordinator_dockerhub_valkey(): id = "dockerhub" redis_version = "7.2.6" run_image = f"valkey/valkey:{redis_version}-bookworm" + github_org = "valkey" + github_repo = "valkey" build_arch = "amd64" testDetails = {} build_os = "test_build_os" @@ -370,8 +376,6 @@ def test_self_contained_coordinator_dockerhub_valkey(): testDetails, build_os, ) - github_org = "valkey" - github_repo = "valkey" build_stream_fields["github_repo"] = github_repo build_stream_fields["github_org"] = github_org build_stream_fields["server_name"] = github_repo @@ -386,9 +390,6 @@ def test_self_contained_coordinator_dockerhub_valkey(): ) ) - build_variant_name = "gcc:8.5.0-amd64-debian-buster-default" - expected_datapoint_ts = None - assert conn.exists(STREAM_KEYNAME_NEW_BUILD_EVENTS) assert conn.xlen(STREAM_KEYNAME_NEW_BUILD_EVENTS) > 0 running_platform = "fco-ThinkPad-T490" @@ -479,6 +480,135 @@ def test_self_contained_coordinator_dockerhub_valkey(): pass +def test_dockerhub_via_cli(): + if run_coordinator_tests_dockerhub(): + import argparse + + db_port = int(os.getenv("DATASINK_PORT", "6379")) + conn = redis.StrictRedis(port=db_port) + conn.ping() + conn.flushall() + redis_version = "7.2.6" + run_image = f"valkey/valkey:{redis_version}-bookworm" + github_org = "valkey" + github_repo = "valkey" + + db_port = os.getenv("DATASINK_PORT", "6379") + + # should error due to missing --use-tags or --use-branch + parser = argparse.ArgumentParser( + description="test", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) + parser = spec_cli_args(parser) + run_args = [ + "--server_name", + "valkey", + "--run_image", + run_image, + "--gh_org", + github_org, + "--gh_repo", + github_repo, + "--redis_port", + "{}".format(db_port), + ] + args = parser.parse_args( + args=run_args, + ) + try: + trigger_tests_dockerhub_cli_command_logic(args, "tool", "v0") + except SystemExit as e: + assert e.code == 0 + + # confirm request was made via the cli + assert conn.exists(STREAM_KEYNAME_NEW_BUILD_EVENTS) + assert conn.xlen(STREAM_KEYNAME_NEW_BUILD_EVENTS) > 0 + running_platform = "fco-ThinkPad-T490" + + build_runners_consumer_group_create(conn, running_platform, "0") + datasink_conn = redis.StrictRedis(port=db_port) + docker_client = docker.from_env() + home = str(Path.home()) + stream_id = ">" + topologies_map = get_topologies( + "./redis_benchmarks_specification/setups/topologies/topologies.yml" + ) + # we use a benchmark spec with smaller CPU limit for client given github machines only contain 2 cores + # and we need 1 core for DB and another for CLIENT + testsuite_spec_files = [ + "./utils/tests/test_data/test-suites/test-memtier-dockerhub.yml" + ] + defaults_filename = "./utils/tests/test_data/test-suites/defaults.yml" + ( + _, + _, + default_metrics, + _, + _, + _, + ) = get_defaults(defaults_filename) + + ( + result, + stream_id, + number_processed_streams, + num_process_test_suites, + ) = self_contained_coordinator_blocking_read( + conn, + True, + docker_client, + home, + stream_id, + datasink_conn, + testsuite_spec_files, + topologies_map, + running_platform, + False, + [], + "", + 0, + 6399, + 1, + False, + 5, + default_metrics, + "amd64", + None, + 0, + 10000, + "unstable", + "", + True, + False, + ) + + assert result == True + assert number_processed_streams == 1 + assert num_process_test_suites == 1 + by_version_key = f"ci.benchmarks.redislabs/ci/{github_org}/{github_repo}/memtier_benchmark-1Mkeys-load-string-with-10B-values/by.version/{redis_version}/benchmark_end/oss-standalone/memory_maxmemory" + assert datasink_conn.exists(by_version_key) + rts = datasink_conn.ts() + # check we have by version metrics + assert "version" in rts.info(by_version_key).labels + assert redis_version == rts.info(by_version_key).labels["version"] + + # get all keys + all_keys = datasink_conn.keys("*") + by_hash_keys = [] + for key in all_keys: + if "/by.hash/" in key.decode(): + by_hash_keys.append(key) + + # ensure we have by hash keys + assert len(by_hash_keys) > 0 + for hash_key in by_hash_keys: + # ensure we have both version and hash info on the key + assert "version" in rts.info(hash_key).labels + assert "hash" in rts.info(hash_key).labels + assert redis_version == rts.info(hash_key).labels["version"] + + def test_self_contained_coordinator_skip_build_variant(): try: if run_coordinator_tests():