Skip to content

Commit aec7539

Browse files
Merge branch 'master' into dynamic-max-pages
2 parents 4ac7e82 + 6849ba6 commit aec7539

File tree

19 files changed

+635
-166
lines changed

19 files changed

+635
-166
lines changed

.github/CODEOWNERS

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -276,6 +276,8 @@ pnpm-lock.yaml @getsentry/owners-js-de
276276
/src/sentry/api/endpoints/organization_measurements_meta.py @getsentry/data-browsing
277277
src/sentry/api/endpoints/organization_attribute_mappings.py @getsentry/data-browsing
278278

279+
/src/sentry/integrations/slack/unfurl/explore.py @getsentry/data-browsing
280+
279281
/tests/snuba/api/endpoints/* @getsentry/data-browsing
280282
/tests/snuba/api/endpoints/test_organization_tags.py @getsentry/data-browsing
281283
/tests/snuba/api/endpoints/test_organization_events_histogram.py @getsentry/data-browsing

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,7 @@ dependencies = [
101101
"statsd>=3.3.0",
102102
"structlog>=22.1.0",
103103
"symbolic>=12.14.1",
104-
"taskbroker-client>=0.1.7",
104+
"taskbroker-client>=0.1.8",
105105
"tiktoken>=0.8.0",
106106
"tokenizers>=0.22.0",
107107
"tldextract>=5.1.2",

src/sentry/features/temporary.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -577,7 +577,7 @@ def register_temporary_features(manager: FeatureManager) -> None:
577577
# Enable lightweight RCA clustering write path (generate embeddings on new issues)
578578
manager.add("organizations:supergroups-lightweight-rca-clustering-write", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
579579
# Enable lightweight RCA clustering read path (query lightweight embeddings in supergroup APIs)
580-
manager.add("organizations:supergroups-lightweight-rca-clustering-read", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
580+
manager.add("organizations:supergroups-lightweight-rca-clustering-read", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
581581

582582
manager.add("projects:workflow-engine-performance-detectors", ProjectFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
583583

src/sentry/integrations/msteams/webhook.py

Lines changed: 60 additions & 57 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,7 @@
4545
IntegrationProviderSlug,
4646
IntegrationResponse,
4747
)
48+
from sentry.integrations.utils.webhook_viewer_context import webhook_viewer_context
4849
from sentry.models.activity import ActivityIntegration
4950
from sentry.models.apikey import ApiKey
5051
from sentry.models.group import Group
@@ -434,18 +435,19 @@ def _handle_team_member_removed(self, request: Request) -> Response:
434435
)
435436
if len(org_integrations) > 0:
436437
for org_integration in org_integrations:
437-
create_audit_entry(
438-
request=request,
439-
organization_id=org_integration.organization_id,
440-
target_object=integration.id,
441-
event=audit_log.get_event_id("INTEGRATION_REMOVE"),
442-
actor_label="Teams User",
443-
data={
444-
"provider": integration.provider,
445-
"name": integration.name,
446-
"team_id": team_id,
447-
},
448-
)
438+
with webhook_viewer_context(org_integration.organization_id):
439+
create_audit_entry(
440+
request=request,
441+
organization_id=org_integration.organization_id,
442+
target_object=integration.id,
443+
event=audit_log.get_event_id("INTEGRATION_REMOVE"),
444+
actor_label="Teams User",
445+
data={
446+
"provider": integration.provider,
447+
"name": integration.name,
448+
"team_id": team_id,
449+
},
450+
)
449451

450452
integration_service.delete_integration(integration_id=integration.id)
451453
return self.respond(status=204)
@@ -584,60 +586,61 @@ def _handle_action_submitted(self, request: Request) -> Response:
584586
)
585587
return self.respond(status=404)
586588

587-
idp = identity_service.get_provider(
588-
provider_type=IntegrationProviderSlug.MSTEAMS.value, provider_ext_id=team_id
589-
)
590-
if idp is None:
591-
logger.info(
592-
"msteams.action.invalid-team-id",
593-
extra={
594-
"team_id": team_id,
595-
"integration_id": integration.id,
596-
"organization_id": group.organization.id,
597-
},
589+
with webhook_viewer_context(group.organization.id):
590+
idp = identity_service.get_provider(
591+
provider_type=IntegrationProviderSlug.MSTEAMS.value, provider_ext_id=team_id
598592
)
599-
return self.respond(status=404)
593+
if idp is None:
594+
logger.info(
595+
"msteams.action.invalid-team-id",
596+
extra={
597+
"team_id": team_id,
598+
"integration_id": integration.id,
599+
"organization_id": group.organization.id,
600+
},
601+
)
602+
return self.respond(status=404)
600603

601-
identity = identity_service.get_identity(
602-
filter={"provider_id": idp.id, "identity_ext_id": user_id}
603-
)
604-
if identity is None:
605-
associate_url = build_linking_url(
606-
integration, group.organization, user_id, team_id, tenant_id
604+
identity = identity_service.get_identity(
605+
filter={"provider_id": idp.id, "identity_ext_id": user_id}
607606
)
607+
if identity is None:
608+
associate_url = build_linking_url(
609+
integration, group.organization, user_id, team_id, tenant_id
610+
)
608611

609-
card = build_linking_card(associate_url)
610-
user_conversation_id = client.get_user_conversation_id(user_id, tenant_id)
611-
client.send_card(user_conversation_id, card)
612-
return self.respond(status=201)
612+
card = build_linking_card(associate_url)
613+
user_conversation_id = client.get_user_conversation_id(user_id, tenant_id)
614+
client.send_card(user_conversation_id, card)
615+
return self.respond(status=201)
613616

614-
# update the state of the issue
615-
issue_change_response = self._issue_state_change(group, identity, data["value"])
617+
# update the state of the issue
618+
issue_change_response = self._issue_state_change(group, identity, data["value"])
616619

617-
# get the rules from the payload
618-
rules = tuple(Rule.objects.filter(id__in=payload["rules"]))
620+
# get the rules from the payload
621+
rules = tuple(Rule.objects.filter(id__in=payload["rules"]))
619622

620-
# pull the event based off our payload
621-
event = eventstore.backend.get_event_by_id(group.project_id, payload["eventId"])
622-
if event is None:
623-
logger.info(
624-
"msteams.action.event-missing",
625-
extra={
626-
"team_id": team_id,
627-
"integration_id": integration.id,
628-
"organization_id": group.organization.id,
629-
"event_id": payload["eventId"],
630-
"project_id": group.project_id,
631-
},
632-
)
633-
return self.respond(status=404)
623+
# pull the event based off our payload
624+
event = eventstore.backend.get_event_by_id(group.project_id, payload["eventId"])
625+
if event is None:
626+
logger.info(
627+
"msteams.action.event-missing",
628+
extra={
629+
"team_id": team_id,
630+
"integration_id": integration.id,
631+
"organization_id": group.organization.id,
632+
"event_id": payload["eventId"],
633+
"project_id": group.project_id,
634+
},
635+
)
636+
return self.respond(status=404)
634637

635-
# refresh issue and update card
636-
group.refresh_from_db()
637-
card = MSTeamsIssueMessageBuilder(group, event, rules, integration).build_group_card()
638-
client.update_card(conversation_id, activity_id, card)
638+
# refresh issue and update card
639+
group.refresh_from_db()
640+
card = MSTeamsIssueMessageBuilder(group, event, rules, integration).build_group_card()
641+
client.update_card(conversation_id, activity_id, card)
639642

640-
return issue_change_response
643+
return issue_change_response
641644

642645
def _handle_channel_message(self, request: Request) -> Response:
643646
data = request.data

src/sentry/integrations/slack/unfurl/explore.py

Lines changed: 54 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
import logging
55
import re
66
from collections.abc import Mapping
7-
from typing import Any
7+
from typing import Any, TypedDict
88
from urllib.parse import urlparse
99

1010
from django.http.request import QueryDict
@@ -25,6 +25,7 @@
2525
from sentry.integrations.slack.unfurl.types import Handler, UnfurlableUrl, UnfurledUrl
2626
from sentry.models.apikey import ApiKey
2727
from sentry.models.organization import Organization
28+
from sentry.search.eap.types import SupportedTraceItemType
2829
from sentry.snuba.referrer import Referrer
2930
from sentry.users.models.user import User
3031
from sentry.users.services.user import RpcUser
@@ -33,10 +34,40 @@
3334
_logger = logging.getLogger(__name__)
3435

3536
DEFAULT_PERIOD = "14d"
36-
DEFAULT_Y_AXIS = "count(span.duration)"
3737
TOP_N = 5
3838

3939

40+
class ExploreDatasetDefaults(TypedDict):
41+
title: str
42+
y_axis: str
43+
44+
45+
EXPLORE_DATASET_DEFAULTS: dict[SupportedTraceItemType, ExploreDatasetDefaults] = {
46+
SupportedTraceItemType.SPANS: {
47+
"title": "Explore Traces",
48+
"y_axis": "count(span.duration)",
49+
},
50+
SupportedTraceItemType.LOGS: {
51+
"title": "Explore Logs",
52+
"y_axis": "count(message)",
53+
},
54+
}
55+
56+
57+
def _get_explore_dataset_defaults(dataset: SupportedTraceItemType) -> ExploreDatasetDefaults:
58+
"""Returns the default title and y_axis for the given explore dataset."""
59+
return EXPLORE_DATASET_DEFAULTS.get(
60+
dataset, EXPLORE_DATASET_DEFAULTS[SupportedTraceItemType.SPANS]
61+
)
62+
63+
64+
def _get_explore_dataset(url: str) -> SupportedTraceItemType:
65+
"""Returns the dataset based on the explore URL."""
66+
if explore_logs_link_regex.match(url) or customer_domain_explore_logs_link_regex.match(url):
67+
return SupportedTraceItemType.LOGS
68+
return SupportedTraceItemType.SPANS
69+
70+
4071
def unfurl_explore(
4172
integration: Integration | RpcIntegration,
4273
links: list[UnfurlableUrl],
@@ -83,9 +114,12 @@ def _unfurl_explore(
83114
params = link.args["query"]
84115
chart_type = link.args.get("chart_type")
85116

117+
explore_dataset = link.args.get("dataset", SupportedTraceItemType.SPANS)
118+
defaults = _get_explore_dataset_defaults(explore_dataset)
119+
86120
y_axes = params.getlist("yAxis")
87121
if not y_axes:
88-
y_axes = [DEFAULT_Y_AXIS]
122+
y_axes = [defaults["y_axis"]]
89123
params.setlist("yAxis", y_axes)
90124

91125
group_bys = params.getlist("groupBy")
@@ -97,7 +131,7 @@ def _unfurl_explore(
97131
if not params.get("statsPeriod") and not params.get("start"):
98132
params["statsPeriod"] = DEFAULT_PERIOD
99133

100-
params["dataset"] = "spans"
134+
params["dataset"] = explore_dataset.value
101135
params["referrer"] = Referrer.EXPLORE_SLACK_UNFURL.value
102136

103137
try:
@@ -124,7 +158,7 @@ def _unfurl_explore(
124158
continue
125159

126160
unfurls[link.url] = SlackDiscoverMessageBuilder(
127-
title="Explore Traces",
161+
title=defaults["title"],
128162
chart_url=url,
129163
).build()
130164

@@ -158,6 +192,8 @@ def map_explore_query_args(url: str, args: Mapping[str, str | None]) -> Mapping[
158192
parsed_url = urlparse(url)
159193
raw_query = QueryDict(parsed_url.query)
160194

195+
explore_dataset = _get_explore_dataset(url)
196+
161197
# Parse visualize (spans explore) or aggregateField (logs explore) JSON params
162198
visualize_fields = raw_query.getlist("visualize") or raw_query.getlist("aggregateField")
163199
y_axes: list[str] = []
@@ -176,7 +212,7 @@ def map_explore_query_args(url: str, args: Mapping[str, str | None]) -> Mapping[
176212
continue
177213

178214
if not y_axes:
179-
y_axes = [DEFAULT_Y_AXIS]
215+
y_axes = [_get_explore_dataset_defaults(explore_dataset)["y_axis"]]
180216

181217
# Build query params
182218
query = QueryDict(mutable=True)
@@ -186,12 +222,12 @@ def map_explore_query_args(url: str, args: Mapping[str, str | None]) -> Mapping[
186222
query.setlist("groupBy", group_bys)
187223

188224
# Copy standard params
189-
for param in ("project", "statsPeriod", "start", "end", "query", "environment"):
225+
for param in ("project", "statsPeriod", "start", "end", "query", "environment", "interval"):
190226
values = raw_query.getlist(param)
191227
if values:
192228
query.setlist(param, values)
193229

194-
return dict(**args, query=query, chart_type=chart_type)
230+
return dict(**args, query=query, chart_type=chart_type, dataset=explore_dataset)
195231

196232

197233
explore_traces_link_regex = re.compile(
@@ -202,11 +238,21 @@ def map_explore_query_args(url: str, args: Mapping[str, str | None]) -> Mapping[
202238
r"^https?\://(?P<org_slug>[^.]+?)\.(?#url_prefix)[^/]+/explore/traces/"
203239
)
204240

241+
explore_logs_link_regex = re.compile(
242+
r"^https?\://(?#url_prefix)[^/]+/organizations/(?P<org_slug>[^/]+)/explore/logs/"
243+
)
244+
245+
customer_domain_explore_logs_link_regex = re.compile(
246+
r"^https?\://(?P<org_slug>[^.]+?)\.(?#url_prefix)[^/]+/explore/logs/"
247+
)
248+
205249
explore_handler = Handler(
206250
fn=unfurl_explore,
207251
matcher=[
208252
explore_traces_link_regex,
209253
customer_domain_explore_traces_link_regex,
254+
explore_logs_link_regex,
255+
customer_domain_explore_logs_link_regex,
210256
],
211257
arg_mapper=map_explore_query_args,
212258
)

src/sentry/runner/commands/run.py

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -135,11 +135,20 @@ def taskworker_scheduler(redis_cluster: str, **options: Any) -> None:
135135

136136

137137
@run.command()
138+
@click.option(
139+
"--push-mode", help="Whether to run in PUSH or PULL mode.", default=False, is_flag=True
140+
)
138141
@click.option(
139142
"--rpc-host",
140-
help="The hostname and port for the taskworker-rpc. When using num-brokers the hostname will be appended with `-{i}` to connect to individual brokers.",
143+
help="The hostname and port for the taskbroker gRPC server. When using num-brokers the hostname will be appended with `-{i}` to connect to individual brokers.",
141144
default="127.0.0.1:50051",
142145
)
146+
@click.option(
147+
"--worker-rpc-port",
148+
help="Port for the taskworker gRPC server to listen on when it is running in push mode.",
149+
default=50052,
150+
type=int,
151+
)
143152
@click.option(
144153
"--num-brokers", help="Number of brokers available to connect to", default=None, type=int
145154
)
@@ -198,6 +207,8 @@ def taskworker(**options: Any) -> None:
198207

199208

200209
def run_taskworker(
210+
push_mode: bool,
211+
worker_rpc_port: int,
201212
rpc_host: str,
202213
num_brokers: int | None,
203214
rpc_host_list: str | None,
@@ -233,6 +244,8 @@ def run_taskworker(
233244
processing_pool_name=processing_pool_name,
234245
health_check_file_path=health_check_file_path,
235246
health_check_sec_per_touch=health_check_sec_per_touch,
247+
grpc_port=worker_rpc_port,
248+
push_mode=push_mode,
236249
**options,
237250
)
238251
exitcode = worker.start()

src/sentry/taskworker/adapters.py

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -59,6 +59,29 @@ def incr(
5959
sample_rate = settings.SENTRY_METRICS_SAMPLE_RATE
6060
sentry_metrics.incr(name, amount=int(value), tags=tags, sample_rate=sample_rate)
6161

62+
def gauge(
63+
self,
64+
key: str,
65+
value: float,
66+
instance: str | None = None,
67+
tags: Tags | None = None,
68+
sample_rate: float | None = None,
69+
unit: str | None = None,
70+
stacklevel: int = 0,
71+
) -> None:
72+
if sample_rate is None:
73+
sample_rate = settings.SENTRY_METRICS_SAMPLE_RATE
74+
75+
return sentry_metrics.gauge(
76+
key,
77+
value,
78+
instance=instance,
79+
tags=tags,
80+
sample_rate=sample_rate,
81+
unit=unit,
82+
stacklevel=stacklevel,
83+
)
84+
6285
def distribution(
6386
self,
6487
name: str,

0 commit comments

Comments
 (0)