diff --git a/pyproject.toml b/pyproject.toml index 39e20f8..8e1b547 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "redis-benchmarks-specification" -version = "0.1.216" +version = "0.1.217" description = "The Redis benchmarks specification describes the cross-language/tools requirements and expectations to foster performance and observability standards around redis related technologies. Members from both industry and academia, including organizations and individuals are encouraged to contribute." authors = ["filipecosta90 ","Redis Performance Group "] readme = "Readme.md" diff --git a/redis_benchmarks_specification/__builder__/builder.py b/redis_benchmarks_specification/__builder__/builder.py index 678a71d..70c0132 100644 --- a/redis_benchmarks_specification/__builder__/builder.py +++ b/redis_benchmarks_specification/__builder__/builder.py @@ -234,6 +234,7 @@ def builder_process_stream( if b"git_hash" in testDetails: git_hash = testDetails[b"git_hash"] logging.info("Received commit hash specifier {}.".format(git_hash)) + logging.info(f"Received the following build stream: {testDetails}.") binary_zip_key = testDetails[b"zip_archive_key"] logging.info( "Retriving zipped source from key {}.".format( @@ -271,6 +272,16 @@ def builder_process_stream( if b"tests_groups_regexp" in testDetails: tests_groups_regexp = testDetails[b"tests_groups_regexp"].decode() + github_org = "redis" + if b"github_org" in testDetails: + github_org = testDetails[b"github_org"].decode() + logging.info(f"detected github_org info on build stream {github_org}") + + github_repo = "redis" + if b"github_repo" in testDetails: + github_repo = testDetails[b"github_repo"].decode() + logging.info(f"detected github_repo info on build stream {github_repo}") + # github updates is_actionable_pr = False contains_regression_comment = False @@ -321,6 +332,14 @@ def builder_process_stream( build_artifacts = ["redis-server"] if "build_artifacts" in build_config: build_artifacts = build_config["build_artifacts"] + if b"build_artifacts" in testDetails: + new_build_artifacts = ( + testDetails[b"build_artifacts"].decode().split(",") + ) + logging.info( + f"overriding default build artifacts {build_artifacts} by {new_build_artifacts}" + ) + build_artifacts = new_build_artifacts build_vars_str = "" if "env" in build_config: if build_config["env"] is not None: @@ -361,6 +380,12 @@ def builder_process_stream( "redis-server", build_vars_str, ) + if b"build_command" in testDetails: + build_command = testDetails[b"build_command"].decode() + server_name = "redis" + if b"server_name" in testDetails: + server_name = testDetails[b"server_name"].decode() + build_start_datetime = datetime.datetime.utcnow() logging.info( "Using the following build command {}.".format(build_command) @@ -435,6 +460,9 @@ def builder_process_stream( tests_priority_upper_limit, tests_regexp, use_git_timestamp, + server_name, + github_org, + github_repo, ) if result is True: benchmark_stream_id = conn.xadd( @@ -572,6 +600,9 @@ def generate_benchmark_stream_request( tests_priority_upper_limit=10000, tests_regexp=".*", use_git_timestamp=False, + server_name="redis", + github_org="redis", + github_repo="redis", ): build_stream_fields = { "id": id, @@ -584,6 +615,9 @@ def generate_benchmark_stream_request( "tests_priority_upper_limit": tests_priority_upper_limit, "tests_priority_lower_limit": tests_priority_lower_limit, "tests_groups_regexp": tests_groups_regexp, + "server_name": server_name, + "github_org": github_org, + "github_repo": github_repo, } if build_config_metadata is not None: build_stream_fields["metadata"] = json.dumps(build_config_metadata) @@ -594,6 +628,7 @@ def generate_benchmark_stream_request( if build_vars_str is not None: build_stream_fields["build_vars"] = build_vars_str if build_command is not None: + logging.info(f"adding build_command: {build_command}") build_stream_fields["build_command"] = build_command if build_image is not None: build_stream_fields["build_image"] = build_image diff --git a/redis_benchmarks_specification/__cli__/args.py b/redis_benchmarks_specification/__cli__/args.py index 44f78e9..b685455 100644 --- a/redis_benchmarks_specification/__cli__/args.py +++ b/redis_benchmarks_specification/__cli__/args.py @@ -166,6 +166,21 @@ def spec_cli_args(parser): action="store_true", help="Iterate over the git commits.", ) + parser.add_argument( + "--build_artifacts", + type=str, + default="", + ) + parser.add_argument( + "--build_command", + type=str, + default="", + ) + parser.add_argument( + "--git_hash", + type=str, + default="", + ) parser.add_argument( "--dry-run", default=False, diff --git a/redis_benchmarks_specification/__cli__/cli.py b/redis_benchmarks_specification/__cli__/cli.py index 2baddd4..0b6eaf4 100644 --- a/redis_benchmarks_specification/__cli__/cli.py +++ b/redis_benchmarks_specification/__cli__/cli.py @@ -355,6 +355,14 @@ def trigger_tests_cli_command_logic(args, project_name, project_version): filtered_hash_commits = [] for cdict in commits: commit_hash = cdict["git_hash"] + if args.git_hash != "": + if args.git_hash != commit_hash: + logging.info( + "Skipping {} given it does not match commit hash {}".format( + commit_hash, args.git_hash + ) + ) + continue commit_summary = cdict["commit_summary"] commit_datetime = cdict["commit_datetime"] match_obj = re.search(hash_regexp_string, commit_hash) @@ -412,6 +420,14 @@ def trigger_tests_cli_command_logic(args, project_name, project_version): commit_dict["tests_priority_lower_limit"] = tests_priority_lower_limit commit_dict["tests_regexp"] = tests_regexp commit_dict["tests_groups_regexp"] = tests_groups_regexp + commit_dict["github_org"] = args.gh_org + commit_dict["github_repo"] = args.gh_repo + if args.server_name is not None and args.server_name != "": + commit_dict["server_name"] = args.server_name + if args.build_artifacts != "": + commit_dict["build_artifacts"] = args.build_artifacts + if args.build_command != "": + commit_dict["build_command"] = args.build_command if pull_request is not None: logging.info( f"Have a pull request info to include in build request {pull_request}" diff --git a/redis_benchmarks_specification/__common__/runner.py b/redis_benchmarks_specification/__common__/runner.py index 39f3046..64667e2 100644 --- a/redis_benchmarks_specification/__common__/runner.py +++ b/redis_benchmarks_specification/__common__/runner.py @@ -150,7 +150,7 @@ def exporter_datasink_common( git_hash=None, ): logging.info( - f"Using datapoint_time_ms: {datapoint_time_ms}. git_has={git_hash}, git_branch={git_branch}, git_version={git_version}" + f"Using datapoint_time_ms: {datapoint_time_ms}. git_hash={git_hash}, git_branch={git_branch}, git_version={git_version}. gh_org={tf_github_org}, gh_repo={tf_github_repo}" ) timeseries_test_sucess_flow( datasink_push_results_redistimeseries, diff --git a/redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py b/redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py index 42d77bf..50a985b 100644 --- a/redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py +++ b/redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py @@ -547,6 +547,11 @@ def process_self_contained_coordinator_stream( logging.info( f"detected a server_name definition on the streamdata: {server_name}." ) + new_executable = f"{mnt_point}{server_name}-server" + logging.info( + "changing executable from {executable} to {new_executable}" + ) + executable = new_executable if b"restore_build_artifacts" in testDetails: restore_build_artifacts = bool( diff --git a/utils/tests/test_builder.py b/utils/tests/test_builder.py index 9f826fb..4d70035 100644 --- a/utils/tests/test_builder.py +++ b/utils/tests/test_builder.py @@ -4,7 +4,13 @@ # All rights reserved. # import os +import logging +from pathlib import Path +import docker + +from redis_benchmarks_specification.__cli__.args import spec_cli_args +from redis_benchmarks_specification.__cli__.cli import trigger_tests_cli_command_logic from redis_benchmarks_specification.__common__.builder_schema import ( commit_schema_to_stream, get_branch_version_from_test_details, @@ -20,6 +26,17 @@ STREAM_KEYNAME_GH_EVENTS_COMMIT, STREAM_KEYNAME_NEW_BUILD_EVENTS, ) +from redis_benchmarks_specification.__common__.timeseries import ( + get_ts_metric_name, + get_overall_dashboard_keynames, +) +from redis_benchmarks_specification.__self_contained_coordinator__.runners import ( + build_runners_consumer_group_create, +) +from redis_benchmarks_specification.__self_contained_coordinator__.self_contained_coordinator import ( + self_contained_coordinator_blocking_read, +) +from redis_benchmarks_specification.__setups__.topologies import get_topologies def test_build_spec_image_prefetch(): @@ -133,3 +150,242 @@ def test_get_branch_version_from_test_details(): testDetails = {b"git_version": "555.555.555"} _, git_version = get_branch_version_from_test_details(testDetails) assert git_version == "555.555.555" + + +def test_cli_build(): + try: + # if should_run_builder(): + if True: + + db_port = int(os.getenv("DATASINK_PORT", "6379")) + conn = redis.StrictRedis(port=db_port) + conn.ping() + conn.flushall() + + builder_consumer_group_create(conn, "0") + + import argparse + + run_image = "debian:buster" + github_org = "valkey-io" + github_repo = "valkey" + git_hash = "7795152fff06f8200f5e4239ff612b240f638e14" + git_branch = "unstable" + + # should error due to missing --use-tags or --use-branch + parser = argparse.ArgumentParser( + description="test", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) + parser = spec_cli_args(parser) + run_args = [ + "--docker-dont-air-gap", + "--server_name", + "valkey", + "--run_image", + run_image, + "--gh_org", + github_org, + "--gh_repo", + github_repo, + "--redis_port", + "{}".format(db_port), + "--use-branch", + "--build_artifacts", + "valkey-server", + "--build_command", + "sh -c 'make -j'", + "--git_hash", + git_hash, + "--branch", + git_branch, + ] + logging.info("running with args: {}".format(" ".join(run_args))) + args = parser.parse_args( + args=run_args, + ) + try: + trigger_tests_cli_command_logic(args, "tool", "v0") + except SystemExit as e: + assert e.code == 0 + assert STREAM_KEYNAME_GH_EVENTS_COMMIT.encode() in conn.keys() + events_in_pipe = conn.xlen(STREAM_KEYNAME_GH_EVENTS_COMMIT) + assert events_in_pipe > 0 + builders_folder = "./redis_benchmarks_specification/setups/builders" + different_build_specs = ["gcc:8.5.0-amd64-debian-buster-default.yml"] + previous_id = ">" + previous_id, new_builds_count, _ = builder_process_stream( + builders_folder, conn, different_build_specs, previous_id + ) + assert new_builds_count == 1 + assert conn.exists(STREAM_KEYNAME_NEW_BUILD_EVENTS) + + running_platform = "fco-ThinkPad-T490" + + build_runners_consumer_group_create(conn, running_platform, "0") + datasink_conn = redis.StrictRedis(port=db_port) + docker_client = docker.from_env() + home = str(Path.home()) + stream_id = ">" + topologies_map = get_topologies( + "./redis_benchmarks_specification/setups/topologies/topologies.yml" + ) + # we use a benchmark spec with smaller CPU limit for client given github machines only contain 2 cores + # and we need 1 core for DB and another for CLIENT + testsuite_spec_files = [ + "./utils/tests/test_data/test-suites/memtier_benchmark-1Mkeys-100B-expire-use-case.yml" + ] + ( + result, + stream_id, + number_processed_streams, + _, + ) = self_contained_coordinator_blocking_read( + conn, + True, + docker_client, + home, + stream_id, + datasink_conn, + testsuite_spec_files, + topologies_map, + running_platform, + False, + [], + "", + 0, + 6399, + 1, + False, + 1, + None, + "amd64", + None, + 0, + 10000, + "unstable", + "", + True, + ) + assert result == True + assert number_processed_streams == 1 + + test_name = "memtier_benchmark-1Mkeys-100B-expire-use-case" + tf_triggering_env = "ci" + deployment_name = "oss-standalone" + deployment_type = "oss-standalone" + use_metric_context_path = False + metric_context_path = None + gh_org = github_org + gh_repo = github_repo + build_variant_name = "gcc:8.5.0-amd64-debian-buster-default" + for metric_name in ["ALL_STATS.Totals.Latency", "ALL_STATS.Totals.Ops/sec"]: + ts_key_name = get_ts_metric_name( + "by.branch", + "unstable", + gh_org, + gh_repo, + deployment_name, + deployment_type, + test_name, + tf_triggering_env, + metric_name, + metric_context_path, + use_metric_context_path, + build_variant_name, + running_platform, + ) + rts = datasink_conn.ts() + assert ts_key_name.encode() in conn.keys() + assert len(rts.range(ts_key_name, 0, "+")) == 1 + ( + prefix, + testcases_setname, + deployment_name_setname, + tsname_project_total_failures, + tsname_project_total_success, + running_platforms_setname, + build_variant_setname, + testcases_metric_context_path_setname, + testcases_and_metric_context_path_setname, + project_archs_setname, + project_oss_setname, + project_branches_setname, + project_versions_setname, + project_compilers_setname, + ) = get_overall_dashboard_keynames( + gh_org, + gh_repo, + tf_triggering_env, + build_variant_name, + running_platform, + test_name, + ) + + assert datasink_conn.exists(testcases_setname) + assert datasink_conn.exists(running_platforms_setname) + assert datasink_conn.exists(build_variant_setname) + assert datasink_conn.exists(testcases_and_metric_context_path_setname) + assert datasink_conn.exists(testcases_metric_context_path_setname) + assert build_variant_name.encode() in datasink_conn.smembers( + build_variant_setname + ) + assert test_name.encode() in datasink_conn.smembers(testcases_setname) + assert running_platform.encode() in datasink_conn.smembers( + running_platforms_setname + ) + testcases_and_metric_context_path_members = [ + x.decode() + for x in datasink_conn.smembers( + testcases_and_metric_context_path_setname + ) + ] + metric_context_path_members = [ + x.decode() + for x in datasink_conn.smembers(testcases_metric_context_path_setname) + ] + assert len(testcases_and_metric_context_path_members) == len( + metric_context_path_members + ) + + assert [x.decode() for x in datasink_conn.smembers(testcases_setname)] == [ + test_name + ] + + assert "amd64".encode() in datasink_conn.smembers(project_archs_setname) + assert "debian-buster".encode() in datasink_conn.smembers( + project_oss_setname + ) + assert "gcc".encode() in datasink_conn.smembers(project_compilers_setname) + assert build_variant_name.encode() in datasink_conn.smembers( + build_variant_setname + ) + assert running_platform.encode() in datasink_conn.smembers( + running_platforms_setname + ) + + assert len(datasink_conn.smembers(project_archs_setname)) == 1 + assert len(datasink_conn.smembers(project_oss_setname)) == 1 + assert len(datasink_conn.smembers(project_compilers_setname)) == 1 + assert len(datasink_conn.smembers(build_variant_setname)) == 1 + assert len(datasink_conn.smembers(running_platforms_setname)) == 1 + assert len(datasink_conn.smembers(testcases_setname)) == 1 + assert len(datasink_conn.smembers(project_branches_setname)) == 1 + assert len(datasink_conn.smembers(project_versions_setname)) == 1 + + # get all keys + all_keys = datasink_conn.keys("*") + by_hash_keys = [] + for key in all_keys: + if "/by.hash/" in key.decode(): + by_hash_keys.append(key) + + # ensure we have by hash keys + assert len(by_hash_keys) > 0 + for hash_key in by_hash_keys: + # ensure we have both version and hash info on the key + assert "branch" in rts.info(hash_key).labels + assert "hash" in rts.info(hash_key).labels + + except redis.exceptions.ConnectionError: + pass diff --git a/utils/tests/test_data/api_builder_common.py b/utils/tests/test_data/api_builder_common.py index 2017930..b43333b 100644 --- a/utils/tests/test_data/api_builder_common.py +++ b/utils/tests/test_data/api_builder_common.py @@ -13,19 +13,37 @@ def flow_1_and_2_api_builder_checks( - conn, build_spec_name="gcc:8.5.0-amd64-debian-buster-default" + conn, + build_spec_name="gcc:8.5.0-amd64-debian-buster-default", + gh_org="redis", + gh_repo="redis", + git_hash="0cf2df84d4b27af4bffd2bf3543838f09e10f874", + git_branch="unstable", + build_command=None, + build_artifacts=None, + server_name=None, ): builder_consumer_group_create(conn) assert conn.xlen(STREAM_KEYNAME_GH_EVENTS_COMMIT) == 0 + fields = { + "git_hash": git_hash, + "git_branch": git_branch, + "github_org": gh_org, + "github_repo": gh_repo, + "use_git_timestamp": True, + } + if server_name is not None: + fields["server_name"] = server_name + if build_command is not None: + fields["build_command"] = build_command + if build_artifacts is not None: + fields["build_artifacts"] = ",".join(build_artifacts) + result, reply_fields, error_msg = commit_schema_to_stream( - { - "git_hash": "0cf2df84d4b27af4bffd2bf3543838f09e10f874", - "git_branch": "unstable", - "use_git_timestamp": True, - }, + fields, conn, - "redis", - "redis", + gh_org, + gh_repo, GH_TOKEN, ) diff --git a/utils/tests/test_self_contained_coordinator_memtier.py b/utils/tests/test_self_contained_coordinator_memtier.py index ec4794b..b66b179 100644 --- a/utils/tests/test_self_contained_coordinator_memtier.py +++ b/utils/tests/test_self_contained_coordinator_memtier.py @@ -102,8 +102,6 @@ def test_self_contained_coordinator_blocking_read(): ) assert result == True assert number_processed_streams == 1 - tf_github_org = "redis" - tf_github_repo = "redis" test_name = "memtier_benchmark-1Mkeys-100B-expire-use-case" tf_triggering_env = "ci" deployment_name = "oss-standalone" @@ -628,11 +626,16 @@ def test_self_contained_coordinator_dockerhub_valkey(): build_arch, testDetails, build_os, + [], + "sudo bash -c 'make -j'", ) build_stream_fields["github_repo"] = github_repo build_stream_fields["github_org"] = github_org build_stream_fields["server_name"] = github_repo build_stream_fields["mnt_point"] = "" + logging.info( + f"requesting stream with following info: {build_stream_fields}" + ) if result is True: benchmark_stream_id = conn.xadd( STREAM_KEYNAME_NEW_BUILD_EVENTS, build_stream_fields @@ -1091,3 +1094,192 @@ def test_prepare_memtier_benchmark_parameters(): benchmark_command_str == 'memtier_benchmark --port 12000 --server localhost --json-out-file 1.json --cluster-mode "--data-size" "100" --command "SETEX __key__ 10 __data__" --command-key-pattern="R" --command "SET __key__ __data__" --command-key-pattern="R" --command "GET __key__" --command-key-pattern="R" --command "DEL __key__" --command-key-pattern="R" -c 50 -t 2 --hide-histogram --test-time 300' ) + + +def test_self_contained_coordinator_blocking_read_valkey(): + try: + if run_coordinator_tests(): + db_port = int(os.getenv("DATASINK_PORT", "6379")) + conn = redis.StrictRedis(port=db_port) + conn.ping() + expected_datapoint_ts = None + conn.flushall() + gh_org = "valkey-io" + gh_repo = "valkey" + build_spec_name = "gcc:8.5.0-amd64-debian-buster-default" + git_hash = "7795152fff06f8200f5e4239ff612b240f638e14" + git_branch = "unstable" + build_artifacts = ["valkey-server"] + + build_variant_name, reply_fields = flow_1_and_2_api_builder_checks( + conn, + build_spec_name, + gh_org, + gh_repo, + git_hash, + git_branch, + "sh -c 'make -j'", + build_artifacts, + "valkey", + ) + if b"git_timestamp_ms" in reply_fields: + expected_datapoint_ts = int(reply_fields[b"git_timestamp_ms"].decode()) + if "git_timestamp_ms" in reply_fields: + expected_datapoint_ts = int(reply_fields["git_timestamp_ms"]) + + assert conn.exists(STREAM_KEYNAME_NEW_BUILD_EVENTS) + assert conn.xlen(STREAM_KEYNAME_NEW_BUILD_EVENTS) > 0 + running_platform = "fco-ThinkPad-T490" + + build_runners_consumer_group_create(conn, running_platform, "0") + datasink_conn = redis.StrictRedis(port=db_port) + docker_client = docker.from_env() + home = str(Path.home()) + stream_id = ">" + topologies_map = get_topologies( + "./redis_benchmarks_specification/setups/topologies/topologies.yml" + ) + # we use a benchmark spec with smaller CPU limit for client given github machines only contain 2 cores + # and we need 1 core for DB and another for CLIENT + testsuite_spec_files = [ + "./utils/tests/test_data/test-suites/memtier_benchmark-1Mkeys-100B-expire-use-case.yml" + ] + ( + result, + stream_id, + number_processed_streams, + _, + ) = self_contained_coordinator_blocking_read( + conn, + True, + docker_client, + home, + stream_id, + datasink_conn, + testsuite_spec_files, + topologies_map, + running_platform, + False, + [], + "", + 0, + 6399, + 1, + False, + 1, + None, + "amd64", + None, + 0, + 10000, + "unstable", + "", + True, + ) + assert result == True + assert number_processed_streams == 1 + + test_name = "memtier_benchmark-1Mkeys-100B-expire-use-case" + tf_triggering_env = "ci" + deployment_name = "oss-standalone" + deployment_type = "oss-standalone" + use_metric_context_path = False + metric_context_path = None + for metric_name in ["ALL_STATS.Totals.Latency", "ALL_STATS.Totals.Ops/sec"]: + ts_key_name = get_ts_metric_name( + "by.branch", + "unstable", + gh_org, + gh_repo, + deployment_name, + deployment_type, + test_name, + tf_triggering_env, + metric_name, + metric_context_path, + use_metric_context_path, + build_variant_name, + running_platform, + ) + rts = datasink_conn.ts() + assert ts_key_name.encode() in conn.keys() + assert len(rts.range(ts_key_name, 0, "+")) == 1 + if expected_datapoint_ts is not None: + assert rts.range(ts_key_name, 0, "+")[0][0] == expected_datapoint_ts + ( + prefix, + testcases_setname, + deployment_name_setname, + tsname_project_total_failures, + tsname_project_total_success, + running_platforms_setname, + build_variant_setname, + testcases_metric_context_path_setname, + testcases_and_metric_context_path_setname, + project_archs_setname, + project_oss_setname, + project_branches_setname, + project_versions_setname, + project_compilers_setname, + ) = get_overall_dashboard_keynames( + gh_org, + gh_repo, + tf_triggering_env, + build_variant_name, + running_platform, + test_name, + ) + + assert datasink_conn.exists(testcases_setname) + assert datasink_conn.exists(running_platforms_setname) + assert datasink_conn.exists(build_variant_setname) + assert datasink_conn.exists(testcases_and_metric_context_path_setname) + assert datasink_conn.exists(testcases_metric_context_path_setname) + assert build_variant_name.encode() in datasink_conn.smembers( + build_variant_setname + ) + assert test_name.encode() in datasink_conn.smembers(testcases_setname) + assert running_platform.encode() in datasink_conn.smembers( + running_platforms_setname + ) + testcases_and_metric_context_path_members = [ + x.decode() + for x in datasink_conn.smembers( + testcases_and_metric_context_path_setname + ) + ] + metric_context_path_members = [ + x.decode() + for x in datasink_conn.smembers(testcases_metric_context_path_setname) + ] + assert len(testcases_and_metric_context_path_members) == len( + metric_context_path_members + ) + + assert [x.decode() for x in datasink_conn.smembers(testcases_setname)] == [ + test_name + ] + + assert "amd64".encode() in datasink_conn.smembers(project_archs_setname) + assert "debian-buster".encode() in datasink_conn.smembers( + project_oss_setname + ) + assert "gcc".encode() in datasink_conn.smembers(project_compilers_setname) + assert build_variant_name.encode() in datasink_conn.smembers( + build_variant_setname + ) + assert running_platform.encode() in datasink_conn.smembers( + running_platforms_setname + ) + + assert len(datasink_conn.smembers(project_archs_setname)) == 1 + assert len(datasink_conn.smembers(project_oss_setname)) == 1 + assert len(datasink_conn.smembers(project_compilers_setname)) == 1 + assert len(datasink_conn.smembers(build_variant_setname)) == 1 + assert len(datasink_conn.smembers(running_platforms_setname)) == 1 + assert len(datasink_conn.smembers(testcases_setname)) == 1 + assert len(datasink_conn.smembers(project_branches_setname)) == 1 + assert len(datasink_conn.smembers(project_versions_setname)) == 1 + + except redis.exceptions.ConnectionError: + pass