Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Stats and UI Support for Multiple Hosts #2394

Closed
wants to merge 7 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions locust/clients.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,9 @@ def request(self, method, url, name=None, catch_response=False, context={}, **kw

# prepend url with hostname unless it's already an absolute URL
url = self._build_url(url)
parsed_url = urlparse(url)
# absolute URLs may use a different host
actual_host = f"{parsed_url.scheme}://{parsed_url.netloc}"

start_time = time.time()
start_perf_counter = time.perf_counter()
Expand All @@ -154,6 +157,7 @@ def request(self, method, url, name=None, catch_response=False, context={}, **kw
"exception": None,
"start_time": start_time,
"url": url,
"host": actual_host,
}

# get the length of the content, but if the argument stream is set to True, we take
Expand Down
4 changes: 4 additions & 0 deletions locust/contrib/fasthttp.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,6 +188,9 @@ def request(
"""
# prepend url with hostname unless it's already an absolute URL
built_url = self._build_url(url)
parsed_url = urlparse(built_url)
# absolute URLs may use a different host
actual_host = f"{parsed_url.scheme}://{parsed_url.netloc}"

start_time = time.time() # seconds since epoch

Expand Down Expand Up @@ -224,6 +227,7 @@ def request(
"exception": None,
"start_time": start_time,
"url": built_url, # this is a small deviation from HttpSession, which gets the final (possibly redirected) URL
"host": actual_host,
}

if not allow_redirects:
Expand Down
16 changes: 9 additions & 7 deletions locust/html.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,7 @@ def render_template(file, template_path, **kwargs):
return template.render(**kwargs)


def get_html_report(
environment,
show_download_link=True,
use_modern_ui=False,
theme="",
):
def get_html_report(environment, show_download_link=True, use_modern_ui=False, theme="", **kwargs):
root_path = os.path.dirname(os.path.abspath(__file__))
if use_modern_ui:
static_path = os.path.join(root_path, "webui", "dist", "assets")
Expand All @@ -53,6 +48,8 @@ def get_html_report(
all_hosts = {l.host for l in environment.runner.user_classes}
if len(all_hosts) == 1:
host = list(all_hosts)[0]
elif use_modern_ui:
host = list(all_hosts)

requests_statistics = list(chain(sort_stats(stats.entries), [stats.total]))
failures_statistics = sort_stats(stats.errors)
Expand Down Expand Up @@ -111,6 +108,7 @@ def get_html_report(
{
"name": escape(stat.name),
"method": escape(stat.method or ""),
"host": escape(stat.host or ""),
**{
str(percentile): stat.get_response_time_percentile(percentile)
for percentile in PERCENTILES_FOR_HTML_REPORT
Expand All @@ -120,13 +118,17 @@ def get_html_report(
],
"start_time": start_time,
"end_time": end_time,
"host": escape(str(host)),
"has_multiple_hosts": isinstance(host, list)
and len(host) > 1
and any(stat.host for stat in requests_statistics),
"host": escape(host) if isinstance(host, str) else [escape(h) for h in host],
"history": history,
"show_download_link": show_download_link,
"locustfile": escape(str(environment.locustfile)),
"tasks": task_data,
"percentile1": stats_module.PERCENTILES_TO_CHART[0],
"percentile2": stats_module.PERCENTILES_TO_CHART[1],
**kwargs,
},
theme=theme,
static_js="\n".join(static_js),
Expand Down
6 changes: 3 additions & 3 deletions locust/runners.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,10 +124,10 @@ def __init__(self, environment: "Environment") -> None:
self._users_dispatcher: Optional[UsersDispatcher] = None

# set up event listeners for recording requests
def on_request(request_type, name, response_time, response_length, exception=None, **_kwargs):
self.stats.log_request(request_type, name, response_time, response_length)
def on_request(request_type, name, host, response_time, response_length, exception=None, **_kwargs):
self.stats.log_request(request_type, name, host, response_time, response_length)
if exception:
self.stats.log_error(request_type, name, exception)
self.stats.log_error(request_type, name, host, exception)

self.environment.events.request.add_listener(on_request)

Expand Down
149 changes: 100 additions & 49 deletions locust/stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ def writerow(self, columns: Iterable[str | int | float]) -> None:
class StatsBaseDict(TypedDict):
name: str
method: str
host: str


class StatsEntryDict(StatsBaseDict):
Expand All @@ -88,6 +89,7 @@ class StatsErrorDict(StatsBaseDict):
class StatsHolder(Protocol):
name: str
method: str
host: str


S = TypeVar("S", bound=StatsHolder)
Expand Down Expand Up @@ -193,7 +195,11 @@ def __init__(self, request_stats):

def __missing__(self, key):
self[key] = StatsEntry(
self.request_stats, key[0], key[1], use_response_times_cache=self.request_stats.use_response_times_cache
self.request_stats,
key[0],
key[1],
key[2],
use_response_times_cache=self.request_stats.use_response_times_cache,
)
return self[key]

Expand All @@ -211,7 +217,7 @@ def __init__(self, use_response_times_cache=True):
is not needed.
"""
self.use_response_times_cache = use_response_times_cache
self.entries: Dict[Tuple[str, str], StatsEntry] = EntriesDict(self)
self.entries: Dict[Tuple[str, str, str], StatsEntry] = EntriesDict(self)
self.errors: Dict[str, StatsError] = {}
self.total = StatsEntry(self, "Aggregated", None, use_response_times_cache=self.use_response_times_cache)
self.history = []
Expand All @@ -236,27 +242,27 @@ def last_request_timestamp(self):
def start_time(self):
return self.total.start_time

def log_request(self, method: str, name: str, response_time: int, content_length: int) -> None:
def log_request(self, method: str, name: str, host: str, response_time: int, content_length: int) -> None:
self.total.log(response_time, content_length)
self.entries[(name, method)].log(response_time, content_length)
self.entries[(name, method, host)].log(response_time, content_length)

def log_error(self, method: str, name: str, error: Exception | str | None) -> None:
def log_error(self, method: str, name: str, host: str, error: Exception | str | None) -> None:
self.total.log_error(error)
self.entries[(name, method)].log_error(error)
self.entries[(name, method, host)].log_error(error)

# store error in errors dict
key = StatsError.create_key(method, name, error)
key = StatsError.create_key(method, name, host, error)
entry = self.errors.get(key)
if not entry:
entry = StatsError(method, name, error)
entry = StatsError(method, name, host, error)
self.errors[key] = entry
entry.occurred()

def get(self, name: str, method: str) -> "StatsEntry":
def get(self, name: str, method: str, host: str) -> "StatsEntry":
"""
Retrieve a StatsEntry instance by name and method
Retrieve a StatsEntry instance by name, method, and host
"""
return self.entries[(name, method)]
return self.entries[(name, method, host)]

def reset_all(self) -> None:
"""
Expand Down Expand Up @@ -291,12 +297,21 @@ class StatsEntry:
Represents a single stats entry (name and method)
"""

def __init__(self, stats: Optional[RequestStats], name: str, method: str, use_response_times_cache: bool = False):
def __init__(
self,
stats: Optional[RequestStats],
name: str,
method: str,
host: str = "",
use_response_times_cache: bool = False,
):
self.stats = stats
self.name = name
""" Name (URL) of this stats entry """
self.method = method
""" Method (GET, POST, PUT, etc.) """
self.host = host
""" Host (BASE_URL) of the request """
self.use_response_times_cache = use_response_times_cache
"""
If set to True, the copy of the response_time dict will be stored in response_times_cache
Expand Down Expand Up @@ -546,11 +561,11 @@ def serialize(self) -> StatsEntryDict:
@classmethod
def unserialize(cls, data: StatsEntryDict) -> "StatsEntry":
"""Return the unserialzed version of the specified dict"""
obj = cls(None, data["name"], data["method"])
obj = cls(None, data["name"], data["method"], data.get("host", ""))
valid_keys = StatsEntryDict.__annotations__.keys()

for key, value in data.items():
if key in ["name", "method"] or key not in valid_keys:
if key in ["name", "method", "host"] or key not in valid_keys:
continue

setattr(obj, key, value)
Expand Down Expand Up @@ -699,13 +714,15 @@ def to_dict(self, escape_string_values=False):
"ninetieth_response_time": self.get_response_time_percentile(0.9),
"ninety_ninth_response_time": self.get_response_time_percentile(0.99),
"avg_content_length": self.avg_content_length,
"host": self.host,
}


class StatsError:
def __init__(self, method: str, name: str, error: Exception | str | None, occurrences: int = 0):
def __init__(self, method: str, name: str, host: str, error: Exception | str | None, occurrences: int = 0):
self.method = method
self.name = name
self.host = host
self.error = error
self.occurrences = occurrences

Expand All @@ -724,8 +741,8 @@ def parse_error(cls, error: Exception | str | None) -> str:
return string_error.replace(hex_address, "0x....")

@classmethod
def create_key(cls, method: str, name: str, error: Exception | str | None) -> str:
key = f"{method}.{name}.{StatsError.parse_error(error)!r}"
def create_key(cls, method: str, name: str, host: str, error: Exception | str | None) -> str:
key = f"{method}.{name}.{host}.{StatsError.parse_error(error)!r}"
return hashlib.sha256(key.encode("utf-8")).hexdigest()

def occurred(self) -> None:
Expand Down Expand Up @@ -759,14 +776,15 @@ def _getattr(obj: "StatsError", key: str, default: Optional[Any]) -> Optional[An

@classmethod
def unserialize(cls, data: StatsErrorDict) -> "StatsError":
return cls(data["method"], data["name"], data["error"], data["occurrences"])
return cls(data["method"], data["name"], data.get("host", ""), data["error"], data["occurrences"])

def to_dict(self, escape_string_values=False):
return {
"method": escape(self.method),
"name": escape(self.name),
"error": escape(self.parse_error(self.error)),
"occurrences": self.occurrences,
"host": self.host,
}


Expand Down Expand Up @@ -798,9 +816,11 @@ def on_report_to_master(client_id: str, data: Dict[str, Any]) -> None:
def on_worker_report(client_id: str, data: Dict[str, Any]) -> None:
for stats_data in data["stats"]:
entry = StatsEntry.unserialize(stats_data)
request_key = (entry.name, entry.method)
request_key = (entry.name, entry.method, entry.host)
if request_key not in stats.entries:
stats.entries[request_key] = StatsEntry(stats, entry.name, entry.method, use_response_times_cache=True)
stats.entries[request_key] = StatsEntry(
stats, entry.name, entry.method, entry.host, use_response_times_cache=True
)
stats.entries[request_key].extend(entry)

for error_key, error in data["errors"].items():
Expand Down Expand Up @@ -910,6 +930,13 @@ def sort_stats(stats: Dict[Any, S]) -> List[S]:
return [stats[key] for key in sorted(stats.keys())]


def group_stats_by(group_by: str, stats: List[S]) -> Dict[str, List[S]]:
return {
group_name: [stat for stat in stats if getattr(stat, group_by) == group_name]
for group_name in set(getattr(stat, group_by) for stat in stats)
}


def stats_history(runner: "Runner") -> None:
"""Save current stats info to history for charts of report."""
while True:
Expand Down Expand Up @@ -976,40 +1003,64 @@ def _percentile_fields(self, stats_entry: StatsEntry, use_current: bool = False)
else:
return [int(stats_entry.get_response_time_percentile(x) or 0) for x in self.percentiles_to_report]

def requests_csv(self, csv_writer: CSVWriter) -> None:
def requests_csv(self, csv_writer: CSVWriter, group_by: Optional[str] = "") -> None:
"""Write requests csv with header and data rows."""
csv_writer.writerow(self.requests_csv_columns)
self._requests_data_rows(csv_writer)
sorted_stats = sort_stats(self.environment.stats.entries)
if group_by and any(getattr(stat, group_by) for stat in sorted_stats):
grouped_stats = group_stats_by(group_by, sorted_stats)

for group_name, stats in grouped_stats.items():
csv_writer.writerow([group_name])
csv_writer.writerow(self.requests_csv_columns)
self._requests_data_rows(csv_writer, stats)
csv_writer.writerow([])
else:
csv_writer.writerow(self.requests_csv_columns)
self._requests_data_rows(csv_writer, chain(sorted_stats, [self.environment.stats.total]))

def _requests_data_rows(self, csv_writer: CSVWriter) -> None:
def _requests_data_rows(self, csv_writer: CSVWriter, stats_entries=None) -> None:
"""Write requests csv data row, excluding header."""
stats = self.environment.stats
for stats_entry in chain(sort_stats(stats.entries), [stats.total]):
csv_writer.writerow(
chain(
[
stats_entry.method,
stats_entry.name,
stats_entry.num_requests,
stats_entry.num_failures,
stats_entry.median_response_time,
stats_entry.avg_response_time,
stats_entry.min_response_time or 0,
stats_entry.max_response_time,
stats_entry.avg_content_length,
stats_entry.total_rps,
stats_entry.total_fail_per_sec,
],
self._percentile_fields(stats_entry),
)
)
stats_entries = (
stats_entries
if stats_entries
else chain(sort_stats(self.environment.stats.entries), [self.environment.stats.total])
)

for stats_entry in stats_entries:
rows = [
stats_entry.method,
stats_entry.name,
stats_entry.num_requests,
stats_entry.num_failures,
stats_entry.median_response_time,
stats_entry.avg_response_time,
stats_entry.min_response_time or 0,
stats_entry.max_response_time,
stats_entry.avg_content_length,
stats_entry.total_rps,
stats_entry.total_fail_per_sec,
]

csv_writer.writerow(chain(rows, self._percentile_fields(stats_entry)))

def failures_csv(self, csv_writer: CSVWriter, group_by: Optional[str] = "") -> None:
sorted_errors = sort_stats(self.environment.stats.errors)
if group_by and any(getattr(error, group_by) for error in sorted_errors):
grouped_errors = group_stats_by(group_by, sorted_errors)

for group_name, failures in grouped_errors.items():
csv_writer.writerow([group_name])
csv_writer.writerow(self.failures_columns)
self._failures_data_rows(csv_writer, failures)
csv_writer.writerow([])
else:
csv_writer.writerow(self.failures_columns)
self._failures_data_rows(csv_writer, sorted_errors)

def failures_csv(self, csv_writer: CSVWriter) -> None:
csv_writer.writerow(self.failures_columns)
self._failures_data_rows(csv_writer)
def _failures_data_rows(self, csv_writer: CSVWriter, failures_entries=None) -> None:
failures_entries = failures_entries if failures_entries else sort_stats(self.environment.stats.errors)

def _failures_data_rows(self, csv_writer: CSVWriter) -> None:
for stats_error in sort_stats(self.environment.stats.errors):
for stats_error in failures_entries:
csv_writer.writerow(
[
stats_error.method,
Expand Down
Loading