Skip to content

Commit

Permalink
Include a way of triggering dockerhub run
Browse files Browse the repository at this point in the history
  • Loading branch information
fcostaoliveira committed Aug 8, 2024
1 parent 9d444e4 commit 6b2e772
Show file tree
Hide file tree
Showing 3 changed files with 196 additions and 7 deletions.
8 changes: 7 additions & 1 deletion redis_benchmarks_specification/__cli__/args.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
START_TIME_LAST_YEAR_UTC = START_TIME_NOW_UTC - datetime.timedelta(days=90)
CLI_TOOL_STATS = "stats"
CLI_TOOL_TRIGGER = "trigger"
CLI_TOOL_DOCKERHUB = "dockerhub"
PERFORMANCE_GH_TOKEN = os.getenv("PERFORMANCE_GH_TOKEN", None)


Expand Down Expand Up @@ -109,7 +110,7 @@ def spec_cli_args(parser):
type=str,
default=CLI_TOOL_TRIGGER,
help="subtool to use. One of '{}' ".format(
",".join([CLI_TOOL_STATS, CLI_TOOL_TRIGGER])
",".join([CLI_TOOL_STATS, CLI_TOOL_TRIGGER, CLI_TOOL_DOCKERHUB])
),
)
parser.add_argument("--gh_token", type=str, default=GH_TOKEN)
Expand All @@ -129,6 +130,11 @@ def spec_cli_args(parser):
parser.add_argument("--redis_repo", type=str, default=None)
parser.add_argument("--gh_org", type=str, default="redis")
parser.add_argument("--gh_repo", type=str, default="redis")
parser.add_argument("--server_name", type=str, default=None)
parser.add_argument("--run_image", type=str, default="redis")
parser.add_argument("--build_arch", type=str, default="amd64")
parser.add_argument("--id", type=str, default="dockerhub")
parser.add_argument("--mnt_point", type=str, default="")
parser.add_argument("--trigger-unstable-commits", type=bool, default=True)
parser.add_argument(
"--use-tags",
Expand Down
55 changes: 54 additions & 1 deletion redis_benchmarks_specification/__cli__/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,9 @@
from packaging import version
import time


from redis_benchmarks_specification.__builder__.builder import (
generate_benchmark_stream_request,
)
from redis_benchmarks_specification.__common__.github import (
update_comment_if_needed,
create_new_pr_comment,
Expand All @@ -38,6 +40,7 @@
REDIS_BINS_EXPIRE_SECS,
STREAM_KEYNAME_GH_EVENTS_COMMIT,
STREAM_GH_EVENTS_COMMIT_BUILDERS_CG,
STREAM_KEYNAME_NEW_BUILD_EVENTS,
)
from redis_benchmarks_specification.__common__.package import (
get_version_string,
Expand All @@ -52,6 +55,54 @@
)


def trigger_tests_dockerhub_cli_command_logic(args, project_name, project_version):
logging.info(
"Using: {project_name} {project_version}".format(
project_name=project_name, project_version=project_version
)
)
logging.info(
"Checking connection to redis with user: {}, host: {}, port: {}".format(
args.redis_user,
args.redis_host,
args.redis_port,
)
)
conn = redis.StrictRedis(
host=args.redis_host,
port=args.redis_port,
password=args.redis_pass,
username=args.redis_user,
decode_responses=False,
)
conn.ping()
testDetails = {}
build_stream_fields, result = generate_benchmark_stream_request(
args.id,
conn,
args.run_image,
args.build_arch,
testDetails,
"n/a",
)
build_stream_fields["github_repo"] = args.gh_repo
build_stream_fields["github_org"] = args.gh_org
server_name = args.gh_repo
if args.server_name is not None:
server_name = args.server_name
build_stream_fields["server_name"] = server_name
build_stream_fields["mnt_point"] = args.mnt_point
if result is True:
benchmark_stream_id = conn.xadd(
STREAM_KEYNAME_NEW_BUILD_EVENTS, build_stream_fields
)
logging.info(
"sucessfully requested a new run {}. Stream id: {}".format(
build_stream_fields, benchmark_stream_id
)
)


def main():
_, _, project_version = populate_with_poetry_data()
project_name = "redis-benchmarks-spec-cli"
Expand All @@ -65,6 +116,8 @@ def main():
trigger_tests_cli_command_logic(args, project_name, project_version)
if args.tool == "stats":
generate_stats_cli_command_logic(args, project_name, project_version)
if args.tool == "dockerhub":
trigger_tests_dockerhub_cli_command_logic(args, project_name, project_version)


def get_commits_by_branch(args, repo):
Expand Down
140 changes: 135 additions & 5 deletions utils/tests/test_self_contained_coordinator_memtier.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,10 @@
from redis_benchmarks_specification.__builder__.builder import (
generate_benchmark_stream_request,
)
from redis_benchmarks_specification.__cli__.args import spec_cli_args
from redis_benchmarks_specification.__cli__.cli import (
trigger_tests_dockerhub_cli_command_logic,
)
from redis_benchmarks_specification.__common__.env import (
STREAM_KEYNAME_NEW_BUILD_EVENTS,
)
Expand Down Expand Up @@ -359,6 +363,8 @@ def test_self_contained_coordinator_dockerhub_valkey():
id = "dockerhub"
redis_version = "7.2.6"
run_image = f"valkey/valkey:{redis_version}-bookworm"
github_org = "valkey"
github_repo = "valkey"
build_arch = "amd64"
testDetails = {}
build_os = "test_build_os"
Expand All @@ -370,8 +376,6 @@ def test_self_contained_coordinator_dockerhub_valkey():
testDetails,
build_os,
)
github_org = "valkey"
github_repo = "valkey"
build_stream_fields["github_repo"] = github_repo
build_stream_fields["github_org"] = github_org
build_stream_fields["server_name"] = github_repo
Expand All @@ -386,9 +390,6 @@ def test_self_contained_coordinator_dockerhub_valkey():
)
)

build_variant_name = "gcc:8.5.0-amd64-debian-buster-default"
expected_datapoint_ts = None

assert conn.exists(STREAM_KEYNAME_NEW_BUILD_EVENTS)
assert conn.xlen(STREAM_KEYNAME_NEW_BUILD_EVENTS) > 0
running_platform = "fco-ThinkPad-T490"
Expand Down Expand Up @@ -479,6 +480,135 @@ def test_self_contained_coordinator_dockerhub_valkey():
pass


def test_dockerhub_via_cli():
if run_coordinator_tests_dockerhub():
import argparse

db_port = int(os.getenv("DATASINK_PORT", "6379"))
conn = redis.StrictRedis(port=db_port)
conn.ping()
conn.flushall()
redis_version = "7.2.6"
run_image = f"valkey/valkey:{redis_version}-bookworm"
github_org = "valkey"
github_repo = "valkey"

db_port = os.getenv("DATASINK_PORT", "6379")

# should error due to missing --use-tags or --use-branch
parser = argparse.ArgumentParser(
description="test",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser = spec_cli_args(parser)
run_args = [
"--server_name",
"valkey",
"--run_image",
run_image,
"--gh_org",
github_org,
"--gh_repo",
github_repo,
"--redis_port",
"{}".format(db_port),
]
args = parser.parse_args(
args=run_args,
)
try:
trigger_tests_dockerhub_cli_command_logic(args, "tool", "v0")
except SystemExit as e:
assert e.code == 0

# confirm request was made via the cli
assert conn.exists(STREAM_KEYNAME_NEW_BUILD_EVENTS)
assert conn.xlen(STREAM_KEYNAME_NEW_BUILD_EVENTS) > 0
running_platform = "fco-ThinkPad-T490"

build_runners_consumer_group_create(conn, running_platform, "0")
datasink_conn = redis.StrictRedis(port=db_port)
docker_client = docker.from_env()
home = str(Path.home())
stream_id = ">"
topologies_map = get_topologies(
"./redis_benchmarks_specification/setups/topologies/topologies.yml"
)
# we use a benchmark spec with smaller CPU limit for client given github machines only contain 2 cores
# and we need 1 core for DB and another for CLIENT
testsuite_spec_files = [
"./utils/tests/test_data/test-suites/test-memtier-dockerhub.yml"
]
defaults_filename = "./utils/tests/test_data/test-suites/defaults.yml"
(
_,
_,
default_metrics,
_,
_,
_,
) = get_defaults(defaults_filename)

(
result,
stream_id,
number_processed_streams,
num_process_test_suites,
) = self_contained_coordinator_blocking_read(
conn,
True,
docker_client,
home,
stream_id,
datasink_conn,
testsuite_spec_files,
topologies_map,
running_platform,
False,
[],
"",
0,
6399,
1,
False,
5,
default_metrics,
"amd64",
None,
0,
10000,
"unstable",
"",
True,
False,
)

assert result == True
assert number_processed_streams == 1
assert num_process_test_suites == 1
by_version_key = f"ci.benchmarks.redislabs/ci/{github_org}/{github_repo}/memtier_benchmark-1Mkeys-load-string-with-10B-values/by.version/{redis_version}/benchmark_end/oss-standalone/memory_maxmemory"
assert datasink_conn.exists(by_version_key)
rts = datasink_conn.ts()
# check we have by version metrics
assert "version" in rts.info(by_version_key).labels
assert redis_version == rts.info(by_version_key).labels["version"]

# get all keys
all_keys = datasink_conn.keys("*")
by_hash_keys = []
for key in all_keys:
if "/by.hash/" in key.decode():
by_hash_keys.append(key)

# ensure we have by hash keys
assert len(by_hash_keys) > 0
for hash_key in by_hash_keys:
# ensure we have both version and hash info on the key
assert "version" in rts.info(hash_key).labels
assert "hash" in rts.info(hash_key).labels
assert redis_version == rts.info(hash_key).labels["version"]


def test_self_contained_coordinator_skip_build_variant():
try:
if run_coordinator_tests():
Expand Down

0 comments on commit 6b2e772

Please sign in to comment.