66
77import asyncio
88import logging
9- from collections .abc import Generator
9+ from collections .abc import AsyncGenerator
1010from urllib .parse import urljoin , urlparse
1111from wsgiref .util import is_hop_by_hop
1212
13+ import httpx
14+ from asgiref .sync import sync_to_async
1315from django .conf import settings
1416from django .http import HttpRequest , HttpResponse , StreamingHttpResponse
1517from django .http .response import HttpResponseBase
16- from httpx import AsyncClient as HttpClient
17- from httpx import TimeoutException
18- from requests import Response as ExternalResponse
1918
20- # from requests import request as external_request
21- # from requests.exceptions import Timeout
2219from sentry import options
2320from sentry .api .exceptions import RequestTimeout
24- from sentry .objectstore .endpoints .organization import ChunkedEncodingDecoder , get_raw_body
21+ from sentry .objectstore .endpoints .organization import get_raw_body_async
2522from sentry .silo .util import (
2623 PROXY_APIGATEWAY_HEADER ,
2724 PROXY_DIRECT_LOCATION_HEADER ,
3532 get_cell_for_organization ,
3633)
3734from sentry .utils import metrics
38- from sentry .utils .http import BodyWithLength
35+ from sentry .utils .http import BodyAsyncWrapper
3936
4037logger = logging .getLogger (__name__ )
4138
42- proxy_client = HttpClient ()
39+ proxy_client = httpx . AsyncClient ()
4340
4441# Endpoints that handle uploaded files have higher timeouts configured
4542# and we need to honor those timeouts when proxying.
5956PROXY_CHUNK_SIZE = 512 * 1024
6057
6158
62- def _parse_response (response : ExternalResponse , remote_url : str ) -> StreamingHttpResponse :
63- """
64- Convert the Responses class from requests into the drf Response
65- """
59+ async def _stream_response_and_close (response : httpx .Response ) -> AsyncGenerator [bytes ]:
60+ """Yield chunks from an httpx response and close the connection when done."""
61+ try :
62+ async for chunk in response .aiter_bytes (PROXY_CHUNK_SIZE ):
63+ yield chunk
64+ finally :
65+ await response .aclose ()
66+
67+
68+ def _adapt_response (response : httpx .Response , remote_url : str ) -> StreamingHttpResponse :
69+ """Convert an httpx Response into a Django response."""
6670
67- # def stream_response() -> Generator[bytes]:
68- # yield from response.iter_content(PROXY_CHUNK_SIZE)
6971 new_headers = clean_outbound_headers (response .headers )
72+ content_type = new_headers .pop ("Content-Type" , None )
7073
7174 streamed_response = StreamingHttpResponse (
72- streaming_content = response . aiter_bytes ( ),
75+ streaming_content = _stream_response_and_close ( response ),
7376 status = response .status_code ,
74- content_type = new_headers . pop ( "Content-Type" , None ) ,
77+ content_type = content_type ,
7578 )
76- # Add Headers to response
79+
7780 for header , value in new_headers .items ():
7881 if not is_hop_by_hop (header ):
7982 streamed_response [header ] = value
@@ -82,13 +85,20 @@ def _parse_response(response: ExternalResponse, remote_url: str) -> StreamingHtt
8285 return streamed_response
8386
8487
88+ async def _stream_request (body ) -> AsyncGenerator [bytes ]:
89+ async for chunk in body :
90+ yield chunk
91+
92+
8593async def proxy_request (
86- request : HttpRequest , org_id_or_slug : str , url_name : str
94+ request : HttpRequest ,
95+ org_id_or_slug : str ,
96+ url_name : str ,
8797) -> HttpResponseBase :
8898 """Take a django request object and proxy it to a remote location given an org_id_or_slug"""
8999
90100 try :
91- cell = get_cell_for_organization (org_id_or_slug )
101+ cell = await sync_to_async ( get_cell_for_organization ) (org_id_or_slug )
92102 except CellResolutionError as e :
93103 logger .info ("region_resolution_error" , extra = {"org_slug" : org_id_or_slug , "error" : str (e )})
94104 return HttpResponse (status = 404 )
@@ -128,7 +138,9 @@ async def proxy_error_embed_request(
128138
129139
130140async def proxy_cell_request (
131- request : HttpRequest , cell : Cell , url_name : str
141+ request : HttpRequest ,
142+ cell : Cell ,
143+ url_name : str ,
132144) -> StreamingHttpResponse :
133145 """Take a django request object and proxy it to a cell silo"""
134146 target_url = urljoin (cell .address , request .path )
@@ -137,7 +149,6 @@ async def proxy_cell_request(
137149 header_dict = clean_proxy_headers (request .headers )
138150 header_dict [PROXY_APIGATEWAY_HEADER ] = "true"
139151
140- # TODO: use requests session for connection pooling capabilities
141152 assert request .method is not None
142153 query_params = request .GET
143154
@@ -148,31 +159,26 @@ async def proxy_cell_request(
148159 if settings .APIGATEWAY_PROXY_SKIP_RELAY and request .path .startswith ("/api/0/relays/" ):
149160 return StreamingHttpResponse (streaming_content = "relay proxy skipped" , status = 404 )
150161
151- data : bytes | Generator [bytes ] | ChunkedEncodingDecoder | BodyWithLength | None = None
162+ data : AsyncGenerator [bytes ] | None = None
152163 if url_name == "sentry-api-0-organization-objectstore" :
153164 if content_encoding :
154165 header_dict ["Content-Encoding" ] = content_encoding
155- data = get_raw_body (request )
166+ data = get_raw_body_async (request )
156167 else :
157- data = BodyWithLength (request )
168+ data = BodyAsyncWrapper (request . body )
158169
159170 try :
160171 with metrics .timer ("apigateway.proxy_request.duration" , tags = metric_tags ):
161- async with proxy_client .stream (
172+ req = proxy_client .build_request (
162173 request .method ,
163174 target_url ,
164175 headers = header_dict ,
165176 params = dict (query_params ) if query_params is not None else None ,
166- data = data ,
177+ content = _stream_request ( data ) ,
167178 timeout = timeout ,
168- follow_redirects = False ,
169- ) as resp :
170- return _parse_response (resp , target_url )
171- except (TimeoutException , asyncio .CancelledError ):
179+ )
180+ resp = await proxy_client . send ( req , stream = True , follow_redirects = False )
181+ return _adapt_response (resp , target_url )
182+ except (httpx . TimeoutException , asyncio .CancelledError ):
172183 # remote silo timeout. Use DRF timeout instead
173184 raise RequestTimeout ()
174-
175- # new_headers = clean_outbound_headers(resp.headers)
176- # resp.headers.clear()
177- # resp.headers.update(new_headers)
178- # return _parse_response(resp, target_url)
0 commit comments