Skip to content

Commit

Permalink
[V1][Bugfix] Do not reset prefix caching metrics (#14235)
Browse files Browse the repository at this point in the history
  • Loading branch information
comaniac authored Mar 5, 2025
1 parent 0df2510 commit ade3f7d
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions vllm/v1/metrics/loggers.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,9 @@ class LoggingStatLogger(StatLoggerBase):
def __init__(self):
self._reset(time.monotonic())
self.last_scheduler_stats = SchedulerStats()
# Prefix cache metrics. This cannot be reset.
# TODO: Make the interval configurable.
self.prefix_caching_metrics = PrefixCachingMetrics()

def _reset(self, now):
self.last_log_time = now
Expand All @@ -42,9 +45,6 @@ def _reset(self, now):
self.num_prompt_tokens: list[int] = []
self.num_generation_tokens: list[int] = []

# Prefix cache metrics. TODO: Make the interval configurable.
self.prefix_caching_metrics = PrefixCachingMetrics()

def _track_iteration_stats(self, iteration_stats: IterationStats):
# Save tracked stats for token counters.
self.num_prompt_tokens.append(iteration_stats.num_prompt_tokens)
Expand Down

0 comments on commit ade3f7d

Please sign in to comment.