1+ """HTTP client with automatic retry and environment detection.
2+
3+ Provides adaptive HTTP client that works in both browser (Pyodide) and
4+ server environments with built-in retry logic for transient failures.
5+ """
6+
17import asyncio
28import json
39import logging
10+ from typing import Any , Awaitable , Callable , TypeVar
411
512from .exceptions import HttpError
613
1118MAX_RETRIES = 3
1219INITIAL_BACKOFF = 1.0 # seconds
1320
21+ T = TypeVar ("T" )
22+
23+
24+ async def _retry_request (
25+ request_fn : Callable [[], Awaitable [tuple [int , str , T | None ]]],
26+ url : str ,
27+ method : str ,
28+ ) -> T :
29+ """Execute HTTP request with retry logic for transient failures.
30+
31+ Args:
32+ request_fn: Async function that returns (status_code, response_text, parsed_data).
33+ parsed_data is None if request failed.
34+ url: Request URL (for error context)
35+ method: HTTP method (for error context)
36+
37+ Returns:
38+ Parsed response data on success
39+
40+ Raises:
41+ HttpError: On non-retryable error or after all retries exhausted
42+ """
43+ last_error : HttpError | None = None
44+
45+ for attempt in range (MAX_RETRIES ):
46+ status , text , data = await request_fn ()
47+
48+ # Success
49+ if data is not None :
50+ return data
51+
52+ # Non-retryable error
53+ if status not in RETRY_STATUS_CODES :
54+ raise HttpError (
55+ f"HTTP { status } : { text } " ,
56+ status_code = status ,
57+ details = {"url" : url , "method" : method },
58+ )
59+
60+ # Retryable error - store and possibly retry
61+ last_error = HttpError (
62+ f"HTTP { status } : { text } " ,
63+ status_code = status ,
64+ details = {"url" : url , "method" : method },
65+ )
66+
67+ if attempt < MAX_RETRIES - 1 :
68+ backoff = INITIAL_BACKOFF * (2 ** attempt )
69+ logger .warning (
70+ f"HTTP { status } , retrying in { backoff } s (attempt { attempt + 1 } /{ MAX_RETRIES } )"
71+ )
72+ await asyncio .sleep (backoff )
73+
74+ # All retries exhausted
75+ if last_error is None :
76+ # This should never happen since MAX_RETRIES >= 1
77+ raise HttpError (
78+ "Request failed with no error captured" ,
79+ status_code = 0 ,
80+ details = {"url" : url , "method" : method },
81+ )
82+ raise last_error
83+
1484
1585class HttpClient :
16- async def request (self , method , url , headers = None , body = None ):
86+ """Base HTTP client interface."""
87+
88+ async def request (
89+ self , method : str , url : str , headers : dict [str , str ] | None = None , body : Any = None
90+ ) -> Any :
1791 raise NotImplementedError
1892
1993
20- def is_pyodide ():
94+ def is_pyodide () -> bool :
95+ """Check if running in Pyodide (browser) environment."""
2196 try :
2297 import pyodide_js # type: ignore[import-not-found] # noqa: F401
2398
@@ -26,8 +101,8 @@ def is_pyodide():
26101 return False
27102
28103
29- # parse response without relying on content type
30- async def parse_response ( response ):
104+ async def _parse_response ( response : Any ) -> Any :
105+ """Parse response without relying on content type."""
31106 text = await response .text ()
32107 try :
33108 return json .loads (text )
@@ -41,53 +116,36 @@ async def parse_response(response):
41116
42117
43118class BrowserHttpClient (HttpClient ):
44- def __init__ (self ):
119+ """HTTP client for browser/Pyodide environment using fetch API."""
120+
121+ def __init__ (self ) -> None :
45122 from js import fetch # type: ignore[import-not-found]
46123 from pyodide .ffi import to_js # type: ignore[import-not-found]
47124
48125 self ._fetch = fetch
49126 self ._to_js = to_js
50127
51- async def request (self , method , url , headers = None , body = None ):
128+ async def request (
129+ self , method : str , url : str , headers : dict [str , str ] | None = None , body : Any = None
130+ ) -> Any :
52131 headers = headers or {}
53- options = {
132+ options : dict [ str , Any ] = {
54133 "method" : method .upper (),
55134 "headers" : headers ,
56135 }
57136 if body is not None :
58137 options ["body" ] = json .dumps (body )
59138 headers .setdefault ("Content-Type" , "application/json" )
60139
61- last_error = None
62- for attempt in range (MAX_RETRIES ):
140+ async def do_request () -> tuple [int , str , Any | None ]:
63141 response = await self ._fetch (url , self ._to_js (options ))
64142 if response .ok :
65- return await parse_response (response )
66-
67- status = response .status
143+ data = await _parse_response (response )
144+ return response .status , "" , data
68145 text = await response .text ()
146+ return response .status , text , None
69147
70- if status not in RETRY_STATUS_CODES :
71- # Don't retry client errors (except 429)
72- raise HttpError (
73- f"HTTP { status } : { text } " ,
74- status_code = status ,
75- details = {"url" : url , "method" : method },
76- )
77-
78- last_error = HttpError (
79- f"HTTP { status } : { text } " ,
80- status_code = status ,
81- details = {"url" : url , "method" : method },
82- )
83-
84- if attempt < MAX_RETRIES - 1 :
85- backoff = INITIAL_BACKOFF * (2 ** attempt )
86- logger .warning (f"HTTP { status } , retrying in { backoff } s " f"(attempt { attempt + 1 } /{ MAX_RETRIES } )" )
87- await asyncio .sleep (backoff )
88-
89- assert last_error is not None # Always set after at least one iteration
90- raise last_error
148+ return await _retry_request (do_request , url , method )
91149
92150
93151# ----------------------------
@@ -96,20 +154,23 @@ async def request(self, method, url, headers=None, body=None):
96154
97155
98156class ServerHttpClient (HttpClient ):
99- def __init__ (self ):
157+ """HTTP client for server environment using aiohttp."""
158+
159+ def __init__ (self ) -> None :
100160 import aiohttp
101161
102162 self ._aiohttp = aiohttp
103163
104- async def request (self , method , url , headers = None , body = None ):
164+ async def request (
165+ self , method : str , url : str , headers : dict [str , str ] | None = None , body : Any = None
166+ ) -> Any :
105167 data = None
106168 if body is not None :
107169 data = json .dumps (body )
108170 headers = headers or {}
109171 headers .setdefault ("Content-Type" , "application/json" )
110172
111- last_error = None
112- for attempt in range (MAX_RETRIES ):
173+ async def do_request () -> tuple [int , str , Any | None ]:
113174 async with self ._aiohttp .ClientSession () as session :
114175 async with session .request (
115176 method = method .upper (),
@@ -118,32 +179,12 @@ async def request(self, method, url, headers=None, body=None):
118179 data = data ,
119180 ) as response :
120181 if response .status < 400 :
121- return await parse_response (response )
122-
123- status = response .status
182+ parsed = await _parse_response (response )
183+ return response .status , "" , parsed
124184 text = await response .text ()
185+ return response .status , text , None
125186
126- if status not in RETRY_STATUS_CODES :
127- # Don't retry client errors (except 429)
128- raise HttpError (
129- f"HTTP { status } : { text } " ,
130- status_code = status ,
131- details = {"url" : url , "method" : method },
132- )
133-
134- last_error = HttpError (
135- f"HTTP { status } : { text } " ,
136- status_code = status ,
137- details = {"url" : url , "method" : method },
138- )
139-
140- if attempt < MAX_RETRIES - 1 :
141- backoff = INITIAL_BACKOFF * (2 ** attempt )
142- logger .warning (f"HTTP { status } , retrying in { backoff } s " f"(attempt { attempt + 1 } /{ MAX_RETRIES } )" )
143- await asyncio .sleep (backoff )
144-
145- assert last_error is not None # Always set after at least one iteration
146- raise last_error
187+ return await _retry_request (do_request , url , method )
147188
148189
149190# ----------------------------
0 commit comments