Skip to content

Commit 3c7bdc4

Browse files
authored
Merge branch 'master' into malwilley/uptime-detector-preview
2 parents 92aac72 + 1a2ec01 commit 3c7bdc4

File tree

51 files changed

+1498
-978
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

51 files changed

+1498
-978
lines changed

.github/CODEOWNERS

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -462,11 +462,11 @@ tests/sentry/api/endpoints/test_organization_attribute_mappings.py @get
462462
/src/sentry/identity/ @getsentry/enterprise
463463

464464
/src/sentry/integrations/ @getsentry/product-owners-settings-integrations @getsentry/ecosystem
465-
/src/sentry/integrations/api/endpoints/organization_code_mapping*.py @getsentry/issue-detection-backend
465+
/src/sentry/integrations/api/endpoints/organization_code_mapping*.py @getsentry/coding-workflows-sentry-backend
466466
/src/sentry/integrations/api/endpoints/organization_coding_agents.py @getsentry/machine-learning-ai
467467
/src/sentry/integrations/coding_agent/ @getsentry/machine-learning-ai
468468
/src/sentry/integrations/utils/codecov.py @getsentry/codecov
469-
/src/sentry/integrations/utils/stacktrace_link.py @getsentry/issue-detection-backend
469+
/src/sentry/integrations/utils/stacktrace_link.py @getsentry/coding-workflows-sentry-backend
470470

471471
/src/sentry/mail/ @getsentry/alerts-notifications
472472
/src/sentry/notifications/ @getsentry/alerts-notifications @getsentry/ecosystem
@@ -651,7 +651,7 @@ tests/sentry/api/endpoints/test_organization_attribute_mappings.py @get
651651
/src/sentry/event_manager.py @getsentry/issue-detection-backend
652652
/src/sentry/eventstore/models.py @getsentry/issue-detection-backend
653653
/src/sentry/grouping/ @getsentry/issue-detection-backend
654-
/src/sentry/issues/auto_source_code_config/ @getsentry/issue-detection-backend
654+
/src/sentry/issues/auto_source_code_config/ @getsentry/coding-workflows-sentry-backend
655655
/src/sentry/issues/endpoints/related_issues.py @getsentry/issue-detection-backend
656656
/src/sentry/issues/ingest.py @getsentry/issue-detection-backend
657657
/src/sentry/issues/issue_occurrence.py @getsentry/issue-detection-backend
@@ -671,16 +671,16 @@ tests/sentry/api/endpoints/test_organization_attribute_mappings.py @get
671671
/src/sentry/tasks/auto_ongoing_issues.py @getsentry/issue-detection-backend
672672
/src/sentry/tasks/auto_remove_inbox.py @getsentry/issue-detection-backend
673673
/src/sentry/tasks/auto_resolve_issues.py @getsentry/issue-detection-backend
674-
/src/sentry/tasks/auto_source_code_config.py @getsentry/issue-detection-backend
674+
/src/sentry/tasks/auto_source_code_config.py @getsentry/coding-workflows-sentry-backend
675675
/src/sentry/tasks/check_new_issue_threshold_met.py @getsentry/issue-detection-backend
676676
/src/sentry/tasks/clear_expired_resolutions.py @getsentry/issue-detection-backend
677677
/src/sentry/tasks/clear_expired_rulesnoozes.py @getsentry/issue-detection-backend
678678
/src/sentry/tasks/clear_expired_snoozes.py @getsentry/issue-detection-backend
679-
/src/sentry/tasks/codeowners/ @getsentry/issue-detection-backend
680-
/src/sentry/tasks/commit_context.py @getsentry/issue-detection-backend
679+
/src/sentry/tasks/codeowners/ @getsentry/coding-workflows-sentry-backend
680+
/src/sentry/tasks/commit_context.py @getsentry/coding-workflows-sentry-backend
681681
/src/sentry/tasks/seer/delete_seer_grouping_records.py @getsentry/issue-detection-backend
682682
/src/sentry/tasks/embeddings_grouping/ @getsentry/issue-detection-backend
683-
/src/sentry/tasks/groupowner.py @getsentry/issue-detection-backend
683+
/src/sentry/tasks/groupowner.py @getsentry/coding-workflows-sentry-backend
684684
/src/sentry/tasks/merge.py @getsentry/issue-detection-backend
685685
/src/sentry/tasks/unmerge.py @getsentry/issue-detection-backend
686686
/src/sentry/tasks/weekly_escalating_forecast.py @getsentry/issue-detection-backend
@@ -703,7 +703,7 @@ tests/sentry/api/endpoints/test_organization_attribute_mappings.py @get
703703
/tests/sentry/deletions/test_group.py @getsentry/issue-detection-backend
704704
/tests/sentry/event_manager/ @getsentry/issue-detection-backend
705705
/tests/sentry/grouping/ @getsentry/issue-detection-backend
706-
/tests/sentry/issues/auto_source_code_config/ @getsentry/issue-detection-backend
706+
/tests/sentry/issues/auto_source_code_config/ @getsentry/coding-workflows-sentry-backend
707707
/tests/sentry/issues/endpoints/ @getsentry/issue-workflow
708708
/tests/sentry/issues/endpoints/test_related_issues.py @getsentry/issue-detection-backend
709709
/tests/sentry/issues/test_ingest.py @getsentry/issue-detection-backend
@@ -726,9 +726,9 @@ tests/sentry/api/endpoints/test_organization_attribute_mappings.py @get
726726
/tests/sentry/tasks/test_clear_expired_resolutions.py @getsentry/issue-detection-backend
727727
/tests/sentry/tasks/test_clear_expired_rulesnoozes.py @getsentry/issue-detection-backend
728728
/tests/sentry/tasks/test_clear_expired_snoozes.py @getsentry/issue-detection-backend
729-
/tests/sentry/tasks/test_code_owners.py @getsentry/issue-detection-backend
730-
/tests/sentry/tasks/test_commit_context.py @getsentry/issue-detection-backend
731-
/tests/sentry/tasks/test_groupowner.py @getsentry/issue-detection-backend
729+
/tests/sentry/tasks/test_code_owners.py @getsentry/coding-workflows-sentry-backend
730+
/tests/sentry/tasks/test_commit_context.py @getsentry/coding-workflows-sentry-backend
731+
/tests/sentry/tasks/test_groupowner.py @getsentry/coding-workflows-sentry-backend
732732
/tests/sentry/tasks/test_merge.py @getsentry/issue-detection-backend
733733
/tests/sentry/tasks/test_post_process.py @getsentry/issue-detection-backend
734734
/tests/sentry/tasks/test_weekly_escalating_forecast.py @getsentry/issue-detection-backend

src/sentry/features/temporary.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -450,6 +450,9 @@ def register_temporary_features(manager: FeatureManager) -> None:
450450
# Use workflow engine exclusively for legacy issue alert rule.get results.
451451
# See src/sentry/workflow_engine/docs/legacy_backport.md for context.
452452
manager.add("organizations:workflow-engine-issue-alert-endpoints-get", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
453+
# Use workflow engine exclusively for OrganizationAlertRuleDetailsEndpoint.get results.
454+
# See src/sentry/workflow_engine/docs/legacy_backport.md for context.
455+
manager.add("organizations:workflow-engine-orgalertruledetails-get", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
453456
# Enable metric detector limits by plan type
454457
manager.add("organizations:workflow-engine-metric-detector-limit", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
455458
# Enable EventUniqueUserFrequencyConditionWithConditions special alert condition

src/sentry/incidents/endpoints/bases.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -154,6 +154,10 @@ def convert_args(
154154

155155

156156
class WorkflowEngineOrganizationAlertRuleEndpoint(OrganizationAlertRuleEndpoint):
157+
# Subclasses may set a per-method granular flag (e.g. for GET) that is OR'd
158+
# with the broad workflow-engine-rule-serializers flag.
159+
workflow_engine_method_flags: dict[str, str] = {}
160+
157161
def convert_args(
158162
self, request: Request, alert_rule_id: int, *args: Any, **kwargs: Any
159163
) -> tuple[tuple[Any, ...], dict[str, Any]]:
@@ -169,7 +173,10 @@ def convert_args(
169173
):
170174
raise ResourceDoesNotExist
171175

172-
if features.has("organizations:workflow-engine-rule-serializers", organization):
176+
method_flag = self.workflow_engine_method_flags.get(request.method or "")
177+
if features.has("organizations:workflow-engine-rule-serializers", organization) or (
178+
method_flag is not None and features.has(method_flag, organization)
179+
):
173180
try:
174181
ard = AlertRuleDetector.objects.get(
175182
alert_rule_id=validated_alert_rule_id,

src/sentry/incidents/endpoints/organization_alert_rule_details.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -333,6 +333,9 @@ def wrapper(
333333
@extend_schema(tags=["Alerts"])
334334
@cell_silo_endpoint
335335
class OrganizationAlertRuleDetailsEndpoint(WorkflowEngineOrganizationAlertRuleEndpoint):
336+
workflow_engine_method_flags = {
337+
"GET": "organizations:workflow-engine-orgalertruledetails-get",
338+
}
336339
owner = ApiOwner.ISSUES
337340
publish_status = {
338341
"DELETE": ApiPublishStatus.PUBLIC,

src/sentry/integrations/github/webhook_types.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,10 @@ class GithubWebhookType(StrEnum):
2222

2323

2424
# Event type strings (X-GitHub-Event header values) that the cell webhook endpoint processes.
25-
# INSTALLATION is handled in control only.
25+
# INSTALLATION and INSTALLATION_REPOSITORIES are handled in control only.
26+
_CONTROL_ONLY_EVENTS = frozenset(
27+
{GithubWebhookType.INSTALLATION, GithubWebhookType.INSTALLATION_REPOSITORIES}
28+
)
2629
CELL_PROCESSED_GITHUB_EVENTS = frozenset(
27-
t.value for t in GithubWebhookType if t != GithubWebhookType.INSTALLATION
30+
t.value for t in GithubWebhookType if t not in _CONTROL_ONLY_EVENTS
2831
)

src/sentry/middleware/integrations/parsers/github.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,9 +15,9 @@
1515
get_github_external_id,
1616
)
1717
from sentry.integrations.github.webhook_types import (
18+
_CONTROL_ONLY_EVENTS,
1819
CELL_PROCESSED_GITHUB_EVENTS,
1920
GITHUB_WEBHOOK_TYPE_HEADER,
20-
GithubWebhookType,
2121
)
2222
from sentry.integrations.middleware.hybrid_cloud.parser import BaseRequestParser
2323
from sentry.integrations.models.integration import Integration
@@ -77,7 +77,7 @@ def get_mailbox_identifier(
7777
def should_route_to_control_silo(
7878
self, parsed_event: Mapping[str, Any], request: HttpRequest
7979
) -> bool:
80-
return request.META.get(GITHUB_WEBHOOK_TYPE_HEADER) == GithubWebhookType.INSTALLATION
80+
return request.META.get(GITHUB_WEBHOOK_TYPE_HEADER) in _CONTROL_ONLY_EVENTS
8181

8282
@control_silo_function
8383
def get_integration_from_request(self) -> Integration | None:

src/sentry/options/defaults.py

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -928,6 +928,37 @@
928928
flags=FLAG_AUTOMATOR_MODIFIABLE,
929929
)
930930
register("snuba.search.hits-sample-size", default=100, flags=FLAG_AUTOMATOR_MODIFIABLE)
931+
register(
932+
"snuba.search.recommended.recency-weight",
933+
default=0.20,
934+
flags=FLAG_AUTOMATOR_MODIFIABLE,
935+
)
936+
register(
937+
"snuba.search.recommended.spike-weight",
938+
default=0.20,
939+
flags=FLAG_AUTOMATOR_MODIFIABLE,
940+
)
941+
register(
942+
"snuba.search.recommended.severity-weight",
943+
default=0.20,
944+
flags=FLAG_AUTOMATOR_MODIFIABLE,
945+
)
946+
register(
947+
"snuba.search.recommended.user-impact-weight",
948+
default=0.05,
949+
flags=FLAG_AUTOMATOR_MODIFIABLE,
950+
)
951+
register(
952+
"snuba.search.recommended.event-volume-weight",
953+
default=0.20,
954+
flags=FLAG_AUTOMATOR_MODIFIABLE,
955+
)
956+
register(
957+
"snuba.search.recommended.group-type-boost",
958+
type=Dict,
959+
default={7001: 0.15},
960+
flags=FLAG_ALLOW_EMPTY | FLAG_AUTOMATOR_MODIFIABLE,
961+
)
931962
register("snuba.track-outcomes-sample-rate", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE)
932963

933964
# The percentage of tagkeys that we want to cache. Set to 1.0 in order to cache everything, <=0.0 to stop caching

src/sentry/search/snuba/executors.py

Lines changed: 92 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -269,7 +269,7 @@ def _prepare_aggregations(
269269
end: datetime,
270270
having: Sequence[Sequence[Any]],
271271
aggregate_kwargs: TrendsSortWeights | None = None,
272-
replace_trends_aggregation: bool | None = False,
272+
use_issue_platform: bool = False,
273273
) -> list[Any]:
274274
extra_aggregations = self.dependency_aggregations.get(sort_field, [])
275275
required_aggregations = set([sort_field, "total"] + extra_aggregations)
@@ -280,8 +280,8 @@ def _prepare_aggregations(
280280
aggregations = []
281281
for alias in required_aggregations:
282282
aggregation = self.aggregation_defs[alias]
283-
if replace_trends_aggregation and alias == "trends":
284-
aggregation = self.aggregation_defs["trends_issue_platform"]
283+
if use_issue_platform and alias in ("trends", "recommended"):
284+
aggregation = self.aggregation_defs[f"{alias}_issue_platform"]
285285
if callable(aggregation):
286286
if aggregate_kwargs:
287287
aggregation = aggregation(start, end, aggregate_kwargs.get(alias, {}))
@@ -333,14 +333,10 @@ def _prepare_params_for_category(
333333
else:
334334
conditions.append(converted_filter)
335335

336-
if sort_field == "trends" and group_category is not GroupCategory.ERROR.value:
337-
aggregations = self._prepare_aggregations(
338-
sort_field, start, end, having, aggregate_kwargs, True
339-
)
340-
else:
341-
aggregations = self._prepare_aggregations(
342-
sort_field, start, end, having, aggregate_kwargs
343-
)
336+
use_issue_platform = group_category is not GroupCategory.ERROR.value
337+
aggregations = self._prepare_aggregations(
338+
sort_field, start, end, having, aggregate_kwargs, use_issue_platform
339+
)
344340

345341
if cursor is not None:
346342
having.append((sort_field, ">=" if cursor.is_prev else "<=", cursor.value))
@@ -700,18 +696,100 @@ def trends_aggregation_impl(
700696
]
701697

702698

699+
def _recommended_aggregation(
700+
timestamp_column: str, type_column: str | None = None
701+
) -> Sequence[str]:
702+
hour = 3600
703+
704+
# Recency: exponential decay based on time since last event (24hr halflife)
705+
recency_weight = options.get("snuba.search.recommended.recency-weight")
706+
age_hours = f"divide(minus(now(), max({timestamp_column})), {hour})"
707+
recency = f"divide(1, pow(2, divide({age_hours}, 24)))"
708+
709+
# Spike: ratio of recent 6hr events to total 3d events
710+
spike_weight = options.get("snuba.search.recommended.spike-weight")
711+
recent_6h = f"countIf(lessOrEquals(minus(now(), {timestamp_column}), {6 * hour}))"
712+
total_3d = f"countIf(lessOrEquals(minus(now(), {timestamp_column}), {3 * 24 * hour}))"
713+
spike = f"least(1.0, divide({recent_6h}, plus({total_3d}, 1)))"
714+
715+
# Severity: max log level - maps fatal=1.0, error=0.75, warning=0.5, info=0.25, debug=0.0
716+
severity_weight = options.get("snuba.search.recommended.severity-weight")
717+
severity = (
718+
"max(multiIf("
719+
"equals(level, 'fatal'), 1.0, "
720+
"equals(level, 'error'), 0.75, "
721+
"equals(level, 'warning'), 0.5, "
722+
"equals(level, 'info'), 0.25, "
723+
"0.0))"
724+
)
725+
726+
# User impact: ln(uniq(tags[sentry:user]) + 1)/ln(1001) - maps 1→~0, 10→0.33, 100→0.67, 1000→1.0
727+
user_impact_weight = options.get("snuba.search.recommended.user-impact-weight")
728+
user_impact = "least(1.0, divide(log(plus(uniq(tags[sentry:user]), 1)), log(1001)))"
729+
730+
# Event volume: ln(count() + 1)/ln(10001) - maps 1→~0, 10→0.25, 100→0.50, 1000→0.75, 10000+→1.0
731+
event_volume_weight = options.get("snuba.search.recommended.event-volume-weight")
732+
event_volume = "least(1.0, divide(log(plus(count(), 1)), log(10001)))"
733+
734+
# Group type boost: additive signal per issue type
735+
group_type_boosts = options.get("snuba.search.recommended.group-type-boost")
736+
if group_type_boosts:
737+
type_expr = f"any({type_column})" if type_column else "1"
738+
conditions = []
739+
for type_id, boost in group_type_boosts.items():
740+
conditions.append(f"equals({type_expr}, {type_id}), {boost}")
741+
type_boost = f"multiIf({', '.join(conditions)}, 0.0)"
742+
else:
743+
type_boost = "0.0"
744+
745+
return [
746+
(
747+
f"plus(plus(plus(plus(plus("
748+
f"multiply({recency_weight}, {recency}), "
749+
f"multiply({spike_weight}, {spike})), "
750+
f"multiply({severity_weight}, {severity})), "
751+
f"multiply({user_impact_weight}, {user_impact})), "
752+
f"multiply({event_volume_weight}, {event_volume})), "
753+
f"{type_boost})"
754+
),
755+
"",
756+
]
757+
758+
759+
def recommended_aggregation(
760+
start: datetime,
761+
end: datetime,
762+
aggregate_kwargs: Any = None,
763+
) -> Sequence[str]:
764+
return _recommended_aggregation(timestamp_column="timestamp")
765+
766+
767+
def recommended_issue_platform_aggregation(
768+
start: datetime,
769+
end: datetime,
770+
aggregate_kwargs: Any = None,
771+
) -> Sequence[str]:
772+
return _recommended_aggregation(
773+
timestamp_column="client_timestamp", type_column="occurrence_type_id"
774+
)
775+
776+
703777
class PostgresSnubaQueryExecutor(AbstractQueryExecutor):
704778
ISSUE_FIELD_NAME = "group_id"
705779

706780
logger = logging.getLogger("sentry.search.postgressnuba")
707-
dependency_aggregations = {"trends": ["last_seen", "times_seen"]}
781+
dependency_aggregations = {
782+
"trends": ["last_seen", "times_seen"],
783+
"recommended": ["last_seen", "times_seen", "user_count"],
784+
}
708785
postgres_only_fields = {*SKIP_SNUBA_FIELDS, "regressed_in_release"}
709786
# add specific fields here on top of skip_snuba_fields from the serializer
710787
sort_strategies = {
711788
"date": "last_seen",
712789
"freq": "times_seen",
713790
"new": "first_seen",
714791
"trends": "trends",
792+
"recommended": "recommended",
715793
"user": "user_count",
716794
# We don't need a corresponding snuba field here, since this sort only happens
717795
# in Postgres
@@ -723,10 +801,12 @@ class PostgresSnubaQueryExecutor(AbstractQueryExecutor):
723801
"first_seen": ["multiply(toUInt64(min(coalesce(group_first_seen, timestamp))), 1000)", ""],
724802
"last_seen": ["multiply(toUInt64(max(timestamp)), 1000)", ""],
725803
"trends": trends_aggregation,
804+
"recommended": recommended_aggregation,
726805
# Only makes sense with WITH TOTALS, returns 1 for an individual group.
727806
"total": ["uniq", ISSUE_FIELD_NAME],
728807
"user_count": ["uniq", "tags[sentry:user]"],
729808
"trends_issue_platform": trends_issue_platform_aggregation,
809+
"recommended_issue_platform": recommended_issue_platform_aggregation,
730810
}
731811

732812
@property

src/sentry/seer/supergroups/endpoints/organization_supergroups_by_group.py

Lines changed: 15 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
from sentry.api.api_publish_status import ApiPublishStatus
1313
from sentry.api.base import cell_silo_endpoint
1414
from sentry.api.bases.organization import OrganizationEndpoint, OrganizationPermission
15-
from sentry.models.group import Group
15+
from sentry.models.group import STATUS_QUERY_CHOICES, Group
1616
from sentry.models.organization import Organization
1717
from sentry.seer.signed_seer_api import (
1818
SeerViewerContext,
@@ -55,12 +55,21 @@ def get(self, request: Request, organization: Organization) -> Response:
5555
status=status_codes.HTTP_400_BAD_REQUEST,
5656
)
5757

58-
valid_group_ids = set(
59-
Group.objects.filter(
60-
id__in=group_ids,
61-
project__organization=organization,
62-
).values_list("id", flat=True)
58+
group_qs = Group.objects.filter(
59+
id__in=group_ids,
60+
project__organization=organization,
6361
)
62+
63+
status_param = request.GET.get("status")
64+
if status_param is not None:
65+
if status_param not in STATUS_QUERY_CHOICES:
66+
return Response(
67+
{"detail": "Invalid status parameter"},
68+
status=status_codes.HTTP_400_BAD_REQUEST,
69+
)
70+
group_qs = group_qs.filter(status=STATUS_QUERY_CHOICES[status_param])
71+
72+
valid_group_ids = set(group_qs.values_list("id", flat=True))
6473
group_ids = [gid for gid in group_ids if gid in valid_group_ids]
6574

6675
if not group_ids:

0 commit comments

Comments
 (0)