|  | 
| 1 | 1 | import logging | 
|  | 2 | +from collections import deque | 
| 2 | 3 | 
 | 
| 3 | 4 | import pytest | 
|  | 5 | +from src import performance_tracker | 
| 4 | 6 | from src.performance_tracker import ( | 
| 5 | 7 |     PerformanceMetrics, | 
| 6 | 8 |     track_phase, | 
| @@ -33,6 +35,17 @@ def end_phase(self) -> None: | 
| 33 | 35 |         self.ended += 1 | 
| 34 | 36 | 
 | 
| 35 | 37 | 
 | 
|  | 38 | +def _time_sequence(*values: float): | 
|  | 39 | +    queue = deque(values) | 
|  | 40 | + | 
|  | 41 | +    def _next_time() -> float: | 
|  | 42 | +        if not queue: | 
|  | 43 | +            raise AssertionError("No more time values available") | 
|  | 44 | +        return queue.popleft() | 
|  | 45 | + | 
|  | 46 | +    return _next_time | 
|  | 47 | + | 
|  | 48 | + | 
| 36 | 49 | def test_performance_metrics_phase_tracking_and_finalize( | 
| 37 | 50 |     monkeypatch: pytest.MonkeyPatch, | 
| 38 | 51 | ) -> None: | 
| @@ -107,3 +120,80 @@ def test_track_phase_context_manager_ensures_end_called_on_exception() -> None: | 
| 107 | 120 | 
 | 
| 108 | 121 |     assert dummy.started == ["phase-one"] | 
| 109 | 122 |     assert dummy.ended == 1 | 
|  | 123 | + | 
|  | 124 | + | 
|  | 125 | +def test_track_phase_wraps_start_and_end(monkeypatch): | 
|  | 126 | +    metrics = PerformanceMetrics() | 
|  | 127 | +    events: list[tuple[str, str | None]] = [] | 
|  | 128 | + | 
|  | 129 | +    def fake_start(phase_name: str) -> None: | 
|  | 130 | +        events.append(("start", phase_name)) | 
|  | 131 | + | 
|  | 132 | +    def fake_end() -> None: | 
|  | 133 | +        events.append(("end", None)) | 
|  | 134 | + | 
|  | 135 | +    monkeypatch.setattr(metrics, "start_phase", fake_start) | 
|  | 136 | +    monkeypatch.setattr(metrics, "end_phase", fake_end) | 
|  | 137 | + | 
|  | 138 | +    with track_phase(metrics, "backend_call"): | 
|  | 139 | +        events.append(("inside", None)) | 
|  | 140 | + | 
|  | 141 | +    assert events == [ | 
|  | 142 | +        ("start", "backend_call"), | 
|  | 143 | +        ("inside", None), | 
|  | 144 | +        ("end", None), | 
|  | 145 | +    ] | 
|  | 146 | + | 
|  | 147 | + | 
|  | 148 | +def test_finalize_completes_active_phase(monkeypatch): | 
|  | 149 | +    time_values = _time_sequence(10.0, 12.5, 15.0) | 
|  | 150 | +    monkeypatch.setattr(performance_tracker.time, "time", time_values) | 
|  | 151 | + | 
|  | 152 | +    metrics = PerformanceMetrics(request_start=5.0) | 
|  | 153 | +    metrics.start_phase("backend_call") | 
|  | 154 | + | 
|  | 155 | +    metrics.finalize() | 
|  | 156 | + | 
|  | 157 | +    assert metrics.backend_call_time == 2.5 | 
|  | 158 | +    assert metrics.total_time == 10.0 | 
|  | 159 | + | 
|  | 160 | + | 
|  | 161 | +def test_summary_helpers_include_defaults(): | 
|  | 162 | +    metrics = PerformanceMetrics() | 
|  | 163 | +    metrics.total_time = 2.3456 | 
|  | 164 | +    metrics.command_processing_time = 0.123 | 
|  | 165 | +    metrics.response_processing_time = 0.456 | 
|  | 166 | + | 
|  | 167 | +    summary_prefix = metrics._format_summary_prefix() | 
|  | 168 | +    assert summary_prefix == [ | 
|  | 169 | +        "PERF_SUMMARY session=unknown", | 
|  | 170 | +        "total=2.346s", | 
|  | 171 | +        "backend=unknown", | 
|  | 172 | +        "model=unknown", | 
|  | 173 | +        "streaming=False", | 
|  | 174 | +        "commands=False", | 
|  | 175 | +    ] | 
|  | 176 | + | 
|  | 177 | +    timing_parts = metrics._format_timing_parts() | 
|  | 178 | +    assert timing_parts == [ | 
|  | 179 | +        "cmd_proc=0.123s", | 
|  | 180 | +        "resp_proc=0.456s", | 
|  | 181 | +    ] | 
|  | 182 | + | 
|  | 183 | + | 
|  | 184 | +def test_track_phase_ends_on_exception(monkeypatch): | 
|  | 185 | +    metrics = PerformanceMetrics() | 
|  | 186 | +    called: list[str] = [] | 
|  | 187 | + | 
|  | 188 | +    def fake_end_phase() -> None: | 
|  | 189 | +        called.append("end") | 
|  | 190 | + | 
|  | 191 | +    monkeypatch.setattr(metrics, "end_phase", fake_end_phase) | 
|  | 192 | + | 
|  | 193 | +    try: | 
|  | 194 | +        with track_phase(metrics, "response_processing"): | 
|  | 195 | +            raise RuntimeError("boom") | 
|  | 196 | +    except RuntimeError: | 
|  | 197 | +        pass | 
|  | 198 | + | 
|  | 199 | +    assert called == ["end"] | 
0 commit comments