Skip to content

Commit

Permalink
Fixing the metric increment for cache hit/miss and (#6018)
Browse files Browse the repository at this point in the history
Fixing the metric increment with simple readthrough cache for cache hit, cache miss and cach_hit_simple.
Co-authored-by: Nachiappan Veerappan Nachiappan <[email protected]>
  • Loading branch information
nachivrn authored Jun 11, 2024
1 parent f398ff9 commit 3e81923
Show file tree
Hide file tree
Showing 2 changed files with 59 additions and 2 deletions.
4 changes: 2 additions & 2 deletions snuba/web/db_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -797,10 +797,10 @@ def db_query(
metrics.increment("cache_hit", tags={"dataset": dataset_name})
elif stats.get("is_duplicate"):
metrics.increment("cache_stampede", tags={"dataset": dataset_name})
elif stats.get("cache_hit_simple"):
metrics.increment("cache_hit_simple", tags={"dataset": dataset_name})
else:
metrics.increment("cache_miss", tags={"dataset": dataset_name})
if stats.get("cache_hit_simple"):
metrics.increment("cache_hit_simple", tags={"dataset": dataset_name})
if result:
return result
raise error or Exception(
Expand Down
57 changes: 57 additions & 0 deletions tests/web/test_db_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -765,3 +765,60 @@ def test_clickhouse_settings_applied_to_query_id(
assert ("cache_hit_simple" in stats) == test_cache_hit_simple
assert clickhouse_query_settings["query_id"].startswith(expected_startswith)
assert _get_cache_partition(reader).get("test_query_id") is not None


@pytest.mark.clickhouse_db
@pytest.mark.redis_db
def test_cache_metrics_with_simple_readthrough() -> None:
query, storage, attribution_info = _build_test_query("count(distinct(project_id))")
state.set_config("disable_lua_randomize_query_id", 1)
state.set_config("read_through_cache.disable_lua_scripts_sample_rate", 1)

formatted_query = format_query(query)
reader = storage.get_cluster().get_reader()

with mock.patch("snuba.web.db_query.metrics", new=mock.Mock()) as metrics_mock:
result = db_query(
clickhouse_query=query,
query_settings=HTTPQuerySettings(),
attribution_info=attribution_info,
dataset_name="events",
query_metadata_list=[],
formatted_query=formatted_query,
reader=reader,
timer=Timer("foo"),
stats={},
trace_id="trace_id",
robust=False,
)
assert "cache_hit_simple" in result.extra["stats"]
# Assert on first call cache_miss is incremented
metrics_mock.assert_has_calls(
[
mock.call.increment("cache_miss", tags={"dataset": "events"}),
mock.call.increment("cache_hit_simple", tags={"dataset": "events"}),
]
)

metrics_mock.reset_mock()
result = db_query(
clickhouse_query=query,
query_settings=HTTPQuerySettings(),
attribution_info=attribution_info,
dataset_name="events",
query_metadata_list=[],
formatted_query=formatted_query,
reader=reader,
timer=Timer("foo"),
stats={},
trace_id="trace_id",
robust=False,
)
assert "cache_hit_simple" in result.extra["stats"]
# Assert on second call cache_hit is incremented
metrics_mock.assert_has_calls(
[
mock.call.increment("cache_hit", tags={"dataset": "events"}),
mock.call.increment("cache_hit_simple", tags={"dataset": "events"}),
]
)

0 comments on commit 3e81923

Please sign in to comment.