1313import httpx
1414from asgiref .sync import sync_to_async
1515from django .conf import settings
16- from django .http import HttpRequest , HttpResponse , StreamingHttpResponse
16+ from django .http import HttpRequest , HttpResponse , JsonResponse , StreamingHttpResponse
1717from django .http .response import HttpResponseBase
1818
1919from sentry import options
3434from sentry .utils import metrics
3535from sentry .utils .http import BodyAsyncWrapper
3636
37+ from .circuitbreaker import (
38+ CircuitBreakerManager ,
39+ CircuitBreakerOverflow ,
40+ CircuitBreakerWindowOverflow ,
41+ )
42+
3743logger = logging .getLogger (__name__ )
3844
3945proxy_client = httpx .AsyncClient ()
46+ circuitbreakers = CircuitBreakerManager ()
4047
4148# Endpoints that handle uploaded files have higher timeouts configured
4249# and we need to honor those timeouts when proxying.
@@ -141,8 +148,9 @@ async def proxy_cell_request(
141148 request : HttpRequest ,
142149 cell : Cell ,
143150 url_name : str ,
144- ) -> StreamingHttpResponse :
151+ ) -> HttpResponseBase :
145152 """Take a django request object and proxy it to a cell silo"""
153+ metric_tags = {"region" : cell .name , "url_name" : url_name }
146154 target_url = urljoin (cell .address , request .path )
147155
148156 content_encoding = request .headers .get ("Content-Encoding" )
@@ -154,37 +162,59 @@ async def proxy_cell_request(
154162 query_params = request .GET
155163
156164 timeout = ENDPOINT_TIMEOUT_OVERRIDE .get (url_name , settings .GATEWAY_PROXY_TIMEOUT )
157- metric_tags = {"region" : cell .name , "url_name" : url_name }
158165
159166 # XXX: See sentry.testutils.pytest.sentry for more information
160167 if settings .APIGATEWAY_PROXY_SKIP_RELAY and request .path .startswith ("/api/0/relays/" ):
161168 return StreamingHttpResponse (streaming_content = "relay proxy skipped" , status = 404 )
162169
163- if url_name == "sentry-api-0-organization-objectstore" :
164- if content_encoding :
165- header_dict ["Content-Encoding" ] = content_encoding
166- data = get_raw_body_async (request )
167- else :
168- data = BodyAsyncWrapper (request .body )
169- # With request streaming, and without `Content-Length` header,
170- # `httpx` will set chunked transfer encoding.
171- # Upstream doesn't necessarily support this,
172- # thus we re-add the header if it was present in the original request.
173- if content_length :
174- header_dict ["Content-Length" ] = content_length
175-
176170 try :
177- with metrics .timer ("apigateway.proxy_request.duration" , tags = metric_tags ):
178- req = proxy_client .build_request (
179- request .method ,
180- target_url ,
181- headers = header_dict ,
182- params = dict (query_params ) if query_params is not None else None ,
183- content = _stream_request (data ) if data else None , # type: ignore[arg-type]
184- timeout = timeout ,
185- )
186- resp = await proxy_client .send (req , stream = True , follow_redirects = False )
187- return _adapt_response (resp , target_url )
188- except (httpx .TimeoutException , asyncio .CancelledError ):
189- # remote silo timeout. Use DRF timeout instead
190- raise RequestTimeout ()
171+ async with circuitbreakers .get (cell .name ) as circuitbreaker :
172+ if url_name == "sentry-api-0-organization-objectstore" :
173+ if content_encoding :
174+ header_dict ["Content-Encoding" ] = content_encoding
175+ data = get_raw_body_async (request )
176+ else :
177+ data = BodyAsyncWrapper (request .body )
178+ # With request streaming, and without `Content-Length` header,
179+ # `httpx` will set chunked transfer encoding.
180+ # Upstream doesn't necessarily support this,
181+ # thus we re-add the header if it was present in the original request.
182+ if content_length :
183+ header_dict ["Content-Length" ] = content_length
184+
185+ try :
186+ with metrics .timer ("apigateway.proxy_request.duration" , tags = metric_tags ):
187+ req = proxy_client .build_request (
188+ request .method ,
189+ target_url ,
190+ headers = header_dict ,
191+ params = dict (query_params ) if query_params is not None else None ,
192+ content = _stream_request (data ) if data else None , # type: ignore[arg-type]
193+ timeout = timeout ,
194+ )
195+ resp = await proxy_client .send (req , stream = True , follow_redirects = False )
196+ if resp .status_code >= 502 :
197+ metrics .incr ("apigateway.proxy.request_failed" , tags = metric_tags )
198+ circuitbreaker .incr_failures ()
199+ return _adapt_response (resp , target_url )
200+ except (httpx .TimeoutException , asyncio .CancelledError ):
201+ metrics .incr ("apigateway.proxy.request_timeout" , tags = metric_tags )
202+ circuitbreaker .incr_failures ()
203+ # remote silo timeout. Use DRF timeout instead
204+ raise RequestTimeout ()
205+ except httpx .RequestError :
206+ metrics .incr ("apigateway.proxy.request_failed" , tags = metric_tags )
207+ circuitbreaker .incr_failures ()
208+ raise
209+ except CircuitBreakerOverflow :
210+ metrics .incr ("apigateway.proxy.circuit_breaker.overflow" , tags = metric_tags )
211+ return JsonResponse (
212+ {"error" : "apigateway" , "detail" : "Too many requests" },
213+ status = 429 ,
214+ )
215+ except CircuitBreakerWindowOverflow :
216+ metrics .incr ("apigateway.proxy.circuit_breaker.rejected" , tags = metric_tags )
217+ return JsonResponse (
218+ {"error" : "apigateway" , "detail" : "Downstream service temporarily unavailable" },
219+ status = 503 ,
220+ )
0 commit comments