Skip to content

Commit e7211bb

Browse files
committed
use timeseries
1 parent 9f2851b commit e7211bb

File tree

2 files changed

+200
-76
lines changed

2 files changed

+200
-76
lines changed

src/sentry/integrations/slack/unfurl/explore.py

Lines changed: 104 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,18 @@
11
from __future__ import annotations
22

33
import html
4+
import itertools
45
import logging
56
import re
67
from collections.abc import Mapping
7-
from datetime import timedelta
8+
from datetime import datetime, timedelta, timezone
89
from typing import Any
910
from urllib.parse import urlparse
1011

1112
from django.http.request import QueryDict
1213

1314
from sentry import analytics, features
14-
from sentry.api import client
15+
from sentry.api.serializers.snuba import calculate_time_frame, zerofill
1516
from sentry.charts import backend as charts
1617
from sentry.charts.types import ChartType
1718
from sentry.integrations.messaging.metrics import (
@@ -24,13 +25,17 @@
2425
from sentry.integrations.slack.message_builder.discover import SlackDiscoverMessageBuilder
2526
from sentry.integrations.slack.spec import SlackMessagingSpec
2627
from sentry.integrations.slack.unfurl.types import Handler, UnfurlableUrl, UnfurledUrl
27-
from sentry.models.apikey import ApiKey
2828
from sentry.models.organization import Organization
29+
from sentry.models.project import Project
30+
from sentry.search.eap.types import SearchResolverConfig
31+
from sentry.search.events.types import SnubaParams
2932
from sentry.snuba.referrer import Referrer
33+
from sentry.snuba.spans_rpc import Spans
3034
from sentry.users.models.user import User
3135
from sentry.users.services.user import RpcUser
3236
from sentry.utils import json
33-
from sentry.utils.dates import get_interval_from_range, parse_stats_period
37+
from sentry.utils.dates import get_interval_from_range, parse_stats_period, parse_timestamp
38+
from sentry.utils.snuba import SnubaTSResult
3439

3540
_logger = logging.getLogger(__name__)
3641

@@ -50,12 +55,37 @@
5055
"p100",
5156
"percentile",
5257
"avg",
53-
"sum",
5458
}
5559

5660
TOP_N = 5
5761

5862

63+
def snuba_ts_result_to_event_stats(result: SnubaTSResult, column: str) -> dict[str, Any]:
64+
"""
65+
Converts a SnubaTSResult into the events-stats response format that
66+
Chartcuterie expects.
67+
"""
68+
data = [
69+
(key, list(group))
70+
for key, group in itertools.groupby(result.data["data"], key=lambda r: r["time"])
71+
]
72+
rv = []
73+
for k, v in data:
74+
row = [{"count": r.get(column, 0)} for r in v]
75+
rv.append((k, row))
76+
77+
res: dict[str, Any] = {
78+
"data": zerofill(rv, result.start, result.end, result.rollup),
79+
"isMetricsData": result.data.get("isMetricsData", False),
80+
}
81+
82+
timeframe = calculate_time_frame(result.start, result.end, result.rollup)
83+
res["start"] = timeframe["start"]
84+
res["end"] = timeframe["end"]
85+
86+
return res
87+
88+
5989
def unfurl_explore(
6090
integration: Integration | RpcIntegration,
6191
links: list[UnfurlableUrl],
@@ -96,7 +126,6 @@ def _unfurl_explore(
96126
y_axes = params.getlist("yAxis")
97127
if not y_axes:
98128
y_axes = [DEFAULT_Y_AXIS]
99-
params.setlist("yAxis", y_axes)
100129

101130
group_bys = params.getlist("field")
102131

@@ -105,45 +134,90 @@ def _unfurl_explore(
105134
y_axis = y_axes[0]
106135
aggregate_fn = y_axis.split("(")[0]
107136
if aggregate_fn in LINE_PLOT_FIELDS:
108-
display_mode = "top5line"
109137
style = ChartType.SLACK_DISCOVER_TOP5_PERIOD_LINE
110138
else:
111-
display_mode = "top5"
112139
style = ChartType.SLACK_DISCOVER_TOP5_PERIOD
113-
params.setlist("topEvents", [str(TOP_N)])
114140
else:
115-
display_mode = "default"
116141
style = ChartType.SLACK_DISCOVER_TOTAL_PERIOD
117142

118-
# Compute interval from time range
119-
delta = timedelta(days=90)
120-
if "statsPeriod" in params:
121-
if (parsed_period := parse_stats_period(params["statsPeriod"])) is not None:
143+
# Compute time range
144+
now = datetime.now(tz=timezone.utc)
145+
stats_period = params.get("statsPeriod")
146+
start_param = params.get("start")
147+
end_param = params.get("end")
148+
149+
if stats_period:
150+
parsed_period = parse_stats_period(stats_period)
151+
if parsed_period is not None:
122152
delta = parsed_period
123-
elif not params.get("statsPeriod") and not params.get("start"):
124-
params["statsPeriod"] = DEFAULT_PERIOD
153+
else:
154+
delta = timedelta(days=14)
155+
end = now
156+
start = end - delta
157+
elif start_param and end_param:
158+
parsed_start = parse_timestamp(start_param)
159+
parsed_end = parse_timestamp(end_param)
160+
if parsed_start is not None and parsed_end is not None:
161+
start = parsed_start
162+
end = parsed_end
163+
delta = end - start
164+
else:
165+
delta = timedelta(days=14)
166+
end = now
167+
start = end - delta
168+
else:
125169
delta = timedelta(days=14)
126-
127-
if "daily" in display_mode:
128-
params.setlist("interval", ["1d"])
170+
end = now
171+
start = end - delta
172+
173+
rollup = get_interval_from_range(delta, False)
174+
parsed_rollup = parse_stats_period(rollup)
175+
granularity_secs = int(parsed_rollup.total_seconds()) if parsed_rollup else 3600
176+
177+
# Resolve project IDs
178+
project_ids = [int(p) for p in params.getlist("project") if p]
179+
if project_ids:
180+
projects = list(
181+
Project.objects.filter(organization=org, id__in=project_ids).values_list(
182+
"id", flat=True
183+
)
184+
)
129185
else:
130-
interval = get_interval_from_range(delta, False)
131-
params.setlist("interval", [interval])
186+
projects = list(
187+
Project.objects.filter(organization=org).values_list("id", flat=True)[:10]
188+
)
132189

133-
params["referrer"] = Referrer.EXPLORE_SLACK_UNFURL.value
190+
snuba_params = SnubaParams(
191+
start=start,
192+
end=end,
193+
granularity_secs=granularity_secs,
194+
organization=org,
195+
projects=Project.objects.filter(id__in=projects),
196+
environments=[],
197+
)
198+
199+
query_string = params.get("query", "")
200+
201+
config = SearchResolverConfig(
202+
auto_fields=False,
203+
use_aggregate_conditions=True,
204+
)
134205

135206
try:
136-
resp = client.get(
137-
auth=ApiKey(organization_id=org.id, scope_list=["org:read"]),
138-
user=user,
139-
path=f"/organizations/{org_slug}/events-stats/",
140-
params=params,
207+
result = Spans.run_timeseries_query(
208+
params=snuba_params,
209+
query_string=query_string,
210+
y_axes=y_axes,
211+
referrer=Referrer.EXPLORE_SLACK_UNFURL.value,
212+
config=config,
213+
sampling_mode=None,
141214
)
142215
except Exception:
143-
_logger.warning("Failed to load events-stats for explore unfurl")
216+
_logger.warning("Failed to load timeseries data for explore unfurl")
144217
continue
145218

146-
chart_data = {"seriesName": params.get("yAxis"), "stats": resp.data}
219+
stats = snuba_ts_result_to_event_stats(result, y_axes[0])
220+
chart_data = {"seriesName": y_axes[0], "stats": stats}
147221

148222
try:
149223
url = charts.generate_chart(style, chart_data)
@@ -196,10 +270,9 @@ def map_explore_query_args(url: str, args: Mapping[str, str | None]) -> Mapping[
196270
if not y_axes:
197271
y_axes = [DEFAULT_Y_AXIS]
198272

199-
# Build query params for events-stats endpoint
273+
# Build query params
200274
query = QueryDict(mutable=True)
201275
query.setlist("yAxis", y_axes)
202-
query["dataset"] = "spans"
203276

204277
if group_bys:
205278
query.setlist("field", group_bys)

0 commit comments

Comments
 (0)