From 5b2c89d6b505917313eb55d6295a1eba053317ed Mon Sep 17 00:00:00 2001 From: Nachiappan Veerappan Nachiappan Date: Mon, 10 Jun 2024 19:02:16 -0700 Subject: [PATCH] Fixing the metric increment for cache hit/miss and cach_hit_simple. --- snuba/web/db_query.py | 4 +-- tests/web/test_db_query.py | 57 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 59 insertions(+), 2 deletions(-) diff --git a/snuba/web/db_query.py b/snuba/web/db_query.py index b64fde43f4..0015e6a474 100644 --- a/snuba/web/db_query.py +++ b/snuba/web/db_query.py @@ -797,10 +797,10 @@ def db_query( metrics.increment("cache_hit", tags={"dataset": dataset_name}) elif stats.get("is_duplicate"): metrics.increment("cache_stampede", tags={"dataset": dataset_name}) - elif stats.get("cache_hit_simple"): - metrics.increment("cache_hit_simple", tags={"dataset": dataset_name}) else: metrics.increment("cache_miss", tags={"dataset": dataset_name}) + if stats.get("cache_hit_simple"): + metrics.increment("cache_hit_simple", tags={"dataset": dataset_name}) if result: return result raise error or Exception( diff --git a/tests/web/test_db_query.py b/tests/web/test_db_query.py index ee981ccc85..d7ac157be3 100644 --- a/tests/web/test_db_query.py +++ b/tests/web/test_db_query.py @@ -765,3 +765,60 @@ def test_clickhouse_settings_applied_to_query_id( assert ("cache_hit_simple" in stats) == test_cache_hit_simple assert clickhouse_query_settings["query_id"].startswith(expected_startswith) assert _get_cache_partition(reader).get("test_query_id") is not None + + +@pytest.mark.clickhouse_db +@pytest.mark.redis_db +def test_cache_metrics_with_simple_readthrough() -> None: + query, storage, attribution_info = _build_test_query("count(distinct(project_id))") + state.set_config("disable_lua_randomize_query_id", 1) + state.set_config("read_through_cache.disable_lua_scripts_sample_rate", 1) + + formatted_query = format_query(query) + reader = storage.get_cluster().get_reader() + + with mock.patch("snuba.web.db_query.metrics", new=mock.Mock()) as metrics_mock: + result = db_query( + clickhouse_query=query, + query_settings=HTTPQuerySettings(), + attribution_info=attribution_info, + dataset_name="events", + query_metadata_list=[], + formatted_query=formatted_query, + reader=reader, + timer=Timer("foo"), + stats={}, + trace_id="trace_id", + robust=False, + ) + assert "cache_hit_simple" in result.extra["stats"] + # Assert on first call cache_miss is incremented + metrics_mock.assert_has_calls( + [ + mock.call.increment("cache_miss", tags={"dataset": "events"}), + mock.call.increment("cache_hit_simple", tags={"dataset": "events"}), + ] + ) + + metrics_mock.reset_mock() + result = db_query( + clickhouse_query=query, + query_settings=HTTPQuerySettings(), + attribution_info=attribution_info, + dataset_name="events", + query_metadata_list=[], + formatted_query=formatted_query, + reader=reader, + timer=Timer("foo"), + stats={}, + trace_id="trace_id", + robust=False, + ) + assert "cache_hit_simple" in result.extra["stats"] + # Assert on second call cache_hit is incremented + metrics_mock.assert_has_calls( + [ + mock.call.increment("cache_hit", tags={"dataset": "events"}), + mock.call.increment("cache_hit_simple", tags={"dataset": "events"}), + ] + )