From 2849e7f549a164d46f9627aaa594a2568d82aa0a Mon Sep 17 00:00:00 2001 From: Matt Quinn Date: Tue, 9 Aug 2022 17:23:10 -0400 Subject: [PATCH 1/3] ref(test): Support setting data when building spans with SpanBuilder --- tests/sentry/spans/grouping/test_strategy.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/sentry/spans/grouping/test_strategy.py b/tests/sentry/spans/grouping/test_strategy.py index c1849f1452a011..99123d689beb4a 100644 --- a/tests/sentry/spans/grouping/test_strategy.py +++ b/tests/sentry/spans/grouping/test_strategy.py @@ -60,6 +60,10 @@ def with_hash(self, hash: str) -> "SpanBuilder": self.hash = hash return self + def with_data(self, data: Any) -> "SpanBuilder": + self.data = data + return self + def build(self) -> Span: span = { "trace_id": self.trace_id, From c20017ddd4e86636b7f45f9208435e923aed546a Mon Sep 17 00:00:00 2001 From: Matt Quinn Date: Tue, 9 Aug 2022 17:25:15 -0400 Subject: [PATCH 2/3] feat(perf): Detect "Uncompressed Asset" performance issues Large assets can affect First Contentful Paint (FCP) and overall page load time. One easy way to make text assets smaller is to apply HTTP compression to them. Mark transactions containing uncompressed script or CSS resource loads (over a configurable minimum size, currently 500 KiB) as having an Uncompressed Asset performance issue. --- src/sentry/tasks/performance_detection.py | 66 ++++++++++++++++ .../tasks/test_performance_detection.py | 76 +++++++++++++++++++ 2 files changed, 142 insertions(+) diff --git a/src/sentry/tasks/performance_detection.py b/src/sentry/tasks/performance_detection.py index 76f9bfb57b971e..20d3000cc85b5d 100644 --- a/src/sentry/tasks/performance_detection.py +++ b/src/sentry/tasks/performance_detection.py @@ -20,6 +20,7 @@ class DetectorType(Enum): SLOW_SPAN = "slow_span" DUPLICATE_SPANS = "duplicate_spans" SEQUENTIAL_SLOW_SPANS = "sequential_slow_spans" + UNCOMPRESSED_ASSET_SPAN = "uncompressed_asset" # Facade in front of performance detection to limit impact of detection on our events ingestion @@ -55,6 +56,10 @@ def get_detection_settings(): "duration_threshold": 500.0, # ms "allowed_span_ops": ["db", "http"], }, + DetectorType.UNCOMPRESSED_ASSET_SPAN: { + "size_threshold_bytes": 500 * 1024, + "allowed_span_ops": ["resource.css", "resource.script"], + }, } @@ -67,6 +72,7 @@ def _detect_performance_issue(data: Event, sdk_span: Any): DetectorType.DUPLICATE_SPANS: DuplicateSpanDetector(detection_settings), DetectorType.SLOW_SPAN: SlowSpanDetector(detection_settings), DetectorType.SEQUENTIAL_SLOW_SPANS: SequentialSlowSpanDetector(detection_settings), + DetectorType.UNCOMPRESSED_ASSET_SPAN: UncompressedAssetSpanDetector(detection_settings), } for span in spans: @@ -126,6 +132,18 @@ def _detect_performance_issue(data: Event, sdk_span: Any): }, ) + uncompressed_asset_performance_issues = detectors[ + DetectorType.UNCOMPRESSED_ASSET_SPAN + ].stored_issues + uncompressed_asset_fingerprints = list(uncompressed_asset_performance_issues.keys()) + if uncompressed_asset_fingerprints: + first_uncompressed_asset_span = uncompressed_asset_performance_issues[ + uncompressed_asset_fingerprints[0] + ] + sdk_span.containing_transaction.set_tag( + "_pi_uncompressed_asset", first_uncompressed_asset_span["span_id"] + ) + # Creates a stable fingerprint given the same span details using sha1. def fingerprint_span(span: Span): @@ -331,3 +349,51 @@ def visit_span(self, span: Span): fingerprint ] >= timedelta(milliseconds=duration_threshold): self.stored_issues[fingerprint] = {"span_id": span_id} + + +class UncompressedAssetSpanDetector(PerformanceDetector): + """ + Checks for large assets that are affecting load time. + """ + + __slots__ = "stored_issues" + + settings_key = DetectorType.UNCOMPRESSED_ASSET_SPAN + + def init(self): + self.stored_issues = {} + + def visit_span(self, span: Span) -> None: + op = span.get("op", None) + if not op: + return + + allowed_span_ops = self.settings.get("allowed_span_ops") + if op not in allowed_span_ops: + return + + data = span.get("data", None) + transfer_size = data and data.get("Transfer Size", None) + encoded_body_size = data and data.get("Encoded Body Size", None) + decoded_body_size = data and data.get("Decoded Body Size", None) + if not (encoded_body_size and decoded_body_size and transfer_size): + return + + # Ignore assets from cache, either directly (nothing transferred) or via + # a 304 Not Modified response (transfer is smaller than asset size). + if transfer_size <= 0 or transfer_size < encoded_body_size: + return + + # Ignore assets that are already compressed. + if encoded_body_size != decoded_body_size: + return + + # Ignore assets that aren't big enough to worry about. + size_threshold_bytes = self.settings.get("size_threshold_bytes") + if encoded_body_size < size_threshold_bytes: + return + + fingerprint = fingerprint_span_op(span) + span_id = span.get("span_id", None) + if fingerprint and span_id and not self.stored_issues.get(fingerprint, False): + self.stored_issues[fingerprint] = {"span_id": span_id} diff --git a/tests/sentry/tasks/test_performance_detection.py b/tests/sentry/tasks/test_performance_detection.py index 5ba083221b9f3c..e0e1e929923410 100644 --- a/tests/sentry/tasks/test_performance_detection.py +++ b/tests/sentry/tasks/test_performance_detection.py @@ -253,3 +253,79 @@ def test_calls_detect_sequential(self): ), ] ) + + def test_calls_detect_uncompressed_assets(self): + def asset_event(transfer_size=0, encoded_body_size=0, decoded_body_size=0): + return { + "event_id": "a" * 16, + "spans": [ + modify_span_duration( + SpanBuilder() + .with_op("resource.script") + .with_description("https://example.com/app.js") + .with_data( + { + "Transfer Size": transfer_size, + "Encoded Body Size": encoded_body_size, + "Decoded Body Size": decoded_body_size, + } + ) + .build(), + 100.0, + ) + ], + } + + insignificant_size_bytes = 500 + significant_size_bytes = 5 * 1024**2 + + compressed_asset_event = asset_event( + transfer_size=significant_size_bytes, + encoded_body_size=significant_size_bytes, + decoded_body_size=2 * significant_size_bytes, + ) + small_uncompressed_asset_event = asset_event( + transfer_size=insignificant_size_bytes, + encoded_body_size=insignificant_size_bytes, + decoded_body_size=insignificant_size_bytes, + ) + cached_uncompressed_asset_event = asset_event( + transfer_size=0, + encoded_body_size=significant_size_bytes, + decoded_body_size=significant_size_bytes, + ) + uncompressed_asset_event = asset_event( + transfer_size=significant_size_bytes, + encoded_body_size=significant_size_bytes, + decoded_body_size=significant_size_bytes, + ) + + sdk_span_mock = Mock() + + _detect_performance_issue(compressed_asset_event, sdk_span_mock) + assert sdk_span_mock.containing_transaction.set_tag.call_count == 0 + + _detect_performance_issue(small_uncompressed_asset_event, sdk_span_mock) + assert sdk_span_mock.containing_transaction.set_tag.call_count == 0 + + _detect_performance_issue(cached_uncompressed_asset_event, sdk_span_mock) + assert sdk_span_mock.containing_transaction.set_tag.call_count == 0 + + _detect_performance_issue(uncompressed_asset_event, sdk_span_mock) + assert sdk_span_mock.containing_transaction.set_tag.call_count == 3 + sdk_span_mock.containing_transaction.set_tag.assert_has_calls( + [ + call( + "_pi_all_issue_count", + 1, + ), + call( + "_pi_transaction", + "aaaaaaaaaaaaaaaa", + ), + call( + "_pi_uncompressed_asset", + "bbbbbbbbbbbbbbbb", + ), + ] + ) From 5973528fb8b96341c267f0a3c6b9624f6bf3c9b6 Mon Sep 17 00:00:00 2001 From: Matt Quinn Date: Wed, 10 Aug 2022 09:30:05 -0400 Subject: [PATCH 3/3] fix(feat): add uncompressed_asset tag to performance issue detected metric --- src/sentry/tasks/performance_detection.py | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/src/sentry/tasks/performance_detection.py b/src/sentry/tasks/performance_detection.py index 20d3000cc85b5d..0801f4d5e9d220 100644 --- a/src/sentry/tasks/performance_detection.py +++ b/src/sentry/tasks/performance_detection.py @@ -122,16 +122,6 @@ def _detect_performance_issue(data: Event, sdk_span: Any): len(sequential_performance_fingerprints), ) - metrics.incr( - "performance.performance_issue.detected", - instance=str(bool(all_fingerprints)), - tags={ - "duplicates": bool(len(duplicate_performance_fingerprints)), - "slow_span": bool(len(slow_performance_fingerprints)), - "sequential": bool(len(sequential_performance_fingerprints)), - }, - ) - uncompressed_asset_performance_issues = detectors[ DetectorType.UNCOMPRESSED_ASSET_SPAN ].stored_issues @@ -144,6 +134,17 @@ def _detect_performance_issue(data: Event, sdk_span: Any): "_pi_uncompressed_asset", first_uncompressed_asset_span["span_id"] ) + metrics.incr( + "performance.performance_issue.detected", + instance=str(bool(all_fingerprints)), + tags={ + "duplicates": bool(len(duplicate_performance_fingerprints)), + "slow_span": bool(len(slow_performance_fingerprints)), + "sequential": bool(len(sequential_performance_fingerprints)), + "uncompressed_asset": bool(len(uncompressed_asset_fingerprints)), + }, + ) + # Creates a stable fingerprint given the same span details using sha1. def fingerprint_span(span: Span):