Skip to content

Commit

Permalink
Enhance request generation capacity (~2x higher) (#32)
Browse files Browse the repository at this point in the history
* using ProcessPoolExecutor to increase the traffic generation capacity

* simple stress testing script

* simple workload config file used for stress testing

* minor change in the configuration script to ensure virtual env is created smoothly
  • Loading branch information
engshahrad authored Oct 25, 2024
1 parent bfd3710 commit 1338d6f
Show file tree
Hide file tree
Showing 4 changed files with 130 additions and 3 deletions.
1 change: 1 addition & 0 deletions configure.sh
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ then
sudo apt install python3.12-venv
else
echo "Python 3.12 is already installed."
sudo apt install python3.12-venv
fi

# Check if pip is installed
Expand Down
8 changes: 5 additions & 3 deletions synthetic_workload_invoker/WorkloadInvoker.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@

# Standard imports
import argparse
from concurrent.futures import ProcessPoolExecutor
import json
import os
from requests_futures.sessions import FuturesSession
Expand Down Expand Up @@ -154,11 +155,12 @@ def HTTPInstanceGeneratorGeneric(instance_times, blocking_cli, url, data):
logger.error("Invalid URL: " + url)
return False

session = FuturesSession(max_workers=16)
session = FuturesSession(executor=ProcessPoolExecutor(max_workers=os.cpu_count()))
parameters = {"blocking": blocking_cli, "result": RESULT}
after_time, before_time = 0, 0

st = 0
json_data = json.dumps(data)
for t in instance_times:
# st: sleep time
st = st + t - (after_time - before_time)
Expand All @@ -168,7 +170,7 @@ def HTTPInstanceGeneratorGeneric(instance_times, blocking_cli, url, data):
future = session.post(
url,
headers={"Content-Type": "application/json"},
data=json.dumps(data),
data=json_data,
verify=False,
)
after_time = time.time()
Expand All @@ -178,7 +180,7 @@ def HTTPInstanceGeneratorGeneric(instance_times, blocking_cli, url, data):

def CreateActionInvocationThreads(workload, all_events):
threads = []
for (instance, instance_times) in all_events.items():
for instance, instance_times in all_events.items():
blocking_cli = workload["blocking_cli"]
if workload["endpoint"] == "local_openwhisk":
action = workload["instances"][instance]["application"]
Expand Down
103 changes: 103 additions & 0 deletions tests/stress_tests/ReqStressTest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
import json
import matplotlib.pyplot as plt
import os
import sys
import time

test_rates = [50, 100, 200, 500, 1000, 2000, 5000]


def ParseLog(log_file):
with open(log_file, "r") as f:
lines = f.readlines()

start_time = 0
end_time = 0
for line in lines:
if "Test started" in line:
start_time = line.split(" - w")[0]
ms = int(start_time.split(",")[1])
start_time = time.mktime(time.strptime(start_time, "%Y-%m-%d %H:%M:%S,%f"))
start_time += ms / 1000
if "Test ended" in line:
end_time = line.split(" - w")[0]
ms = int(end_time.split(",")[1])
end_time = time.mktime(time.strptime(end_time, "%Y-%m-%d %H:%M:%S,%f"))
end_time += ms / 1000

return end_time - start_time


def main():

results = {"test_rates": [], "durations": [], "logged_durations": []}

with open(
os.path.join(
os.path.dirname(__file__), "..", "test_data", "stress_test_workload.json"
)
) as f:
workload = json.load(f)

for test_rate in test_rates:
if os.path.exists(
os.path.join(os.path.dirname(__file__), "..", "..", "logs", "SWI.log")
):
os.remove(
os.path.join(os.path.dirname(__file__), "..", "..", "logs", "SWI.log")
)

workload["instances"]["instance1"]["rate"] = test_rate
file_description = "stress_test_workload_" + str(test_rate) + ".json"

with open(
os.path.join(
os.path.dirname(__file__), "..", "test_data", file_description
),
"w",
) as f:
json.dump(workload, f, indent=4)

start = time.time()
os.system(
" cd ../../; ./WorkloadInvoker -c ./tests/test_data/" + file_description
)
end = time.time()

time.sleep(0.5)

log_duration = ParseLog(
os.path.join(os.path.dirname(__file__), "..", "..", "logs", "SWI.log")
)

results["test_rates"].append(test_rate)
results["durations"].append(end - start)
results["logged_durations"].append(log_duration)

with open(
os.path.join(os.path.dirname(__file__), "stress_test_results.json"),
"w",
) as f:
json.dump(results, f, indent=4)


def plot_results():
with open(
os.path.join(os.path.dirname(__file__), "stress_test_results.json"),
"r",
) as f:
results = json.load(f)

plt.plot(
results["test_rates"], [15.0 / x for x in results["durations"]], label="Now"
)
plt.xlabel("Traffic Rate (rps)")
plt.ylabel("Testing Capacity")
plt.legend()
plt.xscale("log")
plt.ylim(0, 1.1)
plt.savefig(os.path.join(os.path.dirname(__file__), "stress_test_results.png"))


if __name__ == "__main__":
main()
21 changes: 21 additions & 0 deletions tests/test_data/stress_test_workload.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
{
"test_name": "example_test",
"test_duration_in_seconds": 15,
"random_seed": 100,
"blocking_cli": false,
"endpoint": "generic",
"instances":{
"instance1":{
"application": "example_app1",
"url": "https://127.0.0.1",
"data": {},
"distribution": "Uniform",
"rate": 2,
"activity_window": [5, 15]
}
},
"perf_monitoring":{
"runtime_script": null,
"post_script": null
}
}

0 comments on commit 1338d6f

Please sign in to comment.