@@ -269,7 +269,7 @@ def _prepare_aggregations(
269269 end : datetime ,
270270 having : Sequence [Sequence [Any ]],
271271 aggregate_kwargs : TrendsSortWeights | None = None ,
272- replace_trends_aggregation : bool | None = False ,
272+ use_issue_platform : bool = False ,
273273 ) -> list [Any ]:
274274 extra_aggregations = self .dependency_aggregations .get (sort_field , [])
275275 required_aggregations = set ([sort_field , "total" ] + extra_aggregations )
@@ -280,8 +280,8 @@ def _prepare_aggregations(
280280 aggregations = []
281281 for alias in required_aggregations :
282282 aggregation = self .aggregation_defs [alias ]
283- if replace_trends_aggregation and alias == "trends" :
284- aggregation = self .aggregation_defs ["trends_issue_platform " ]
283+ if use_issue_platform and alias in ( "trends" , "recommended" ) :
284+ aggregation = self .aggregation_defs [f" { alias } _issue_platform " ]
285285 if callable (aggregation ):
286286 if aggregate_kwargs :
287287 aggregation = aggregation (start , end , aggregate_kwargs .get (alias , {}))
@@ -333,14 +333,10 @@ def _prepare_params_for_category(
333333 else :
334334 conditions .append (converted_filter )
335335
336- if sort_field == "trends" and group_category is not GroupCategory .ERROR .value :
337- aggregations = self ._prepare_aggregations (
338- sort_field , start , end , having , aggregate_kwargs , True
339- )
340- else :
341- aggregations = self ._prepare_aggregations (
342- sort_field , start , end , having , aggregate_kwargs
343- )
336+ use_issue_platform = group_category is not GroupCategory .ERROR .value
337+ aggregations = self ._prepare_aggregations (
338+ sort_field , start , end , having , aggregate_kwargs , use_issue_platform
339+ )
344340
345341 if cursor is not None :
346342 having .append ((sort_field , ">=" if cursor .is_prev else "<=" , cursor .value ))
@@ -700,18 +696,100 @@ def trends_aggregation_impl(
700696 ]
701697
702698
699+ def _recommended_aggregation (
700+ timestamp_column : str , type_column : str | None = None
701+ ) -> Sequence [str ]:
702+ hour = 3600
703+
704+ # Recency: exponential decay based on time since last event (24hr halflife)
705+ recency_weight = options .get ("snuba.search.recommended.recency-weight" )
706+ age_hours = f"divide(minus(now(), max({ timestamp_column } )), { hour } )"
707+ recency = f"divide(1, pow(2, divide({ age_hours } , 24)))"
708+
709+ # Spike: ratio of recent 6hr events to total 3d events
710+ spike_weight = options .get ("snuba.search.recommended.spike-weight" )
711+ recent_6h = f"countIf(lessOrEquals(minus(now(), { timestamp_column } ), { 6 * hour } ))"
712+ total_3d = f"countIf(lessOrEquals(minus(now(), { timestamp_column } ), { 3 * 24 * hour } ))"
713+ spike = f"least(1.0, divide({ recent_6h } , plus({ total_3d } , 1)))"
714+
715+ # Severity: max log level - maps fatal=1.0, error=0.75, warning=0.5, info=0.25, debug=0.0
716+ severity_weight = options .get ("snuba.search.recommended.severity-weight" )
717+ severity = (
718+ "max(multiIf("
719+ "equals(level, 'fatal'), 1.0, "
720+ "equals(level, 'error'), 0.75, "
721+ "equals(level, 'warning'), 0.5, "
722+ "equals(level, 'info'), 0.25, "
723+ "0.0))"
724+ )
725+
726+ # User impact: ln(uniq(tags[sentry:user]) + 1)/ln(1001) - maps 1→~0, 10→0.33, 100→0.67, 1000→1.0
727+ user_impact_weight = options .get ("snuba.search.recommended.user-impact-weight" )
728+ user_impact = "least(1.0, divide(log(plus(uniq(tags[sentry:user]), 1)), log(1001)))"
729+
730+ # Event volume: ln(count() + 1)/ln(10001) - maps 1→~0, 10→0.25, 100→0.50, 1000→0.75, 10000+→1.0
731+ event_volume_weight = options .get ("snuba.search.recommended.event-volume-weight" )
732+ event_volume = "least(1.0, divide(log(plus(count(), 1)), log(10001)))"
733+
734+ # Group type boost: additive signal per issue type
735+ group_type_boosts = options .get ("snuba.search.recommended.group-type-boost" )
736+ if group_type_boosts :
737+ type_expr = f"any({ type_column } )" if type_column else "1"
738+ conditions = []
739+ for type_id , boost in group_type_boosts .items ():
740+ conditions .append (f"equals({ type_expr } , { type_id } ), { boost } " )
741+ type_boost = f"multiIf({ ', ' .join (conditions )} , 0.0)"
742+ else :
743+ type_boost = "0.0"
744+
745+ return [
746+ (
747+ f"plus(plus(plus(plus(plus("
748+ f"multiply({ recency_weight } , { recency } ), "
749+ f"multiply({ spike_weight } , { spike } )), "
750+ f"multiply({ severity_weight } , { severity } )), "
751+ f"multiply({ user_impact_weight } , { user_impact } )), "
752+ f"multiply({ event_volume_weight } , { event_volume } )), "
753+ f"{ type_boost } )"
754+ ),
755+ "" ,
756+ ]
757+
758+
759+ def recommended_aggregation (
760+ start : datetime ,
761+ end : datetime ,
762+ aggregate_kwargs : Any = None ,
763+ ) -> Sequence [str ]:
764+ return _recommended_aggregation (timestamp_column = "timestamp" )
765+
766+
767+ def recommended_issue_platform_aggregation (
768+ start : datetime ,
769+ end : datetime ,
770+ aggregate_kwargs : Any = None ,
771+ ) -> Sequence [str ]:
772+ return _recommended_aggregation (
773+ timestamp_column = "client_timestamp" , type_column = "occurrence_type_id"
774+ )
775+
776+
703777class PostgresSnubaQueryExecutor (AbstractQueryExecutor ):
704778 ISSUE_FIELD_NAME = "group_id"
705779
706780 logger = logging .getLogger ("sentry.search.postgressnuba" )
707- dependency_aggregations = {"trends" : ["last_seen" , "times_seen" ]}
781+ dependency_aggregations = {
782+ "trends" : ["last_seen" , "times_seen" ],
783+ "recommended" : ["last_seen" , "times_seen" , "user_count" ],
784+ }
708785 postgres_only_fields = {* SKIP_SNUBA_FIELDS , "regressed_in_release" }
709786 # add specific fields here on top of skip_snuba_fields from the serializer
710787 sort_strategies = {
711788 "date" : "last_seen" ,
712789 "freq" : "times_seen" ,
713790 "new" : "first_seen" ,
714791 "trends" : "trends" ,
792+ "recommended" : "recommended" ,
715793 "user" : "user_count" ,
716794 # We don't need a corresponding snuba field here, since this sort only happens
717795 # in Postgres
@@ -723,10 +801,12 @@ class PostgresSnubaQueryExecutor(AbstractQueryExecutor):
723801 "first_seen" : ["multiply(toUInt64(min(coalesce(group_first_seen, timestamp))), 1000)" , "" ],
724802 "last_seen" : ["multiply(toUInt64(max(timestamp)), 1000)" , "" ],
725803 "trends" : trends_aggregation ,
804+ "recommended" : recommended_aggregation ,
726805 # Only makes sense with WITH TOTALS, returns 1 for an individual group.
727806 "total" : ["uniq" , ISSUE_FIELD_NAME ],
728807 "user_count" : ["uniq" , "tags[sentry:user]" ],
729808 "trends_issue_platform" : trends_issue_platform_aggregation ,
809+ "recommended_issue_platform" : recommended_issue_platform_aggregation ,
730810 }
731811
732812 @property
0 commit comments