Skip to content

Commit f31ed64

Browse files
✨ Add retry with exponential backoff for rate limiting
Automatically retries requests on: - 429 Too Many Requests (rate limiting) - 500, 502, 503, 504 (server errors) Features: - Exponential backoff: 1s, 2s, 4s... up to 60s max - Respects Retry-After header when present - 3 retries by default (configurable via max_retries param) - Applies to both sync and async request methods - Logs retry attempts with delay info
1 parent 452d8c5 commit f31ed64

File tree

3 files changed

+150
-39
lines changed

3 files changed

+150
-39
lines changed

asa_api_client/resources/base.py

Lines changed: 148 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,8 @@
44
including HTTP request handling, error mapping, and pagination.
55
"""
66

7+
import asyncio
8+
import time
79
from collections.abc import AsyncIterator, Iterator
810
from typing import TYPE_CHECKING, Any, Generic, TypeVar
911

@@ -32,6 +34,13 @@
3234
CreateT = TypeVar("CreateT", bound=BaseModel)
3335
UpdateT = TypeVar("UpdateT", bound=BaseModel)
3436

37+
# Retry configuration
38+
DEFAULT_MAX_RETRIES = 3
39+
DEFAULT_INITIAL_DELAY = 1.0 # seconds
40+
DEFAULT_MAX_DELAY = 60.0 # seconds
41+
DEFAULT_BACKOFF_FACTOR = 2.0
42+
RETRYABLE_STATUS_CODES = {429, 500, 502, 503, 504}
43+
3544

3645
class BaseResource(Generic[T, CreateT, UpdateT]):
3746
"""Base class for API resources.
@@ -200,52 +209,118 @@ def _handle_error(self, response: httpx.Response) -> None:
200209
response_body=error_body,
201210
)
202211

212+
def _calculate_retry_delay(
213+
self,
214+
attempt: int,
215+
response: httpx.Response | None = None,
216+
) -> float:
217+
"""Calculate delay before next retry attempt.
218+
219+
Uses exponential backoff, respecting Retry-After header if present.
220+
221+
Args:
222+
attempt: Current attempt number (0-indexed).
223+
response: The HTTP response (to check Retry-After header).
224+
225+
Returns:
226+
Delay in seconds before next retry.
227+
"""
228+
# Check for Retry-After header
229+
if response is not None:
230+
retry_after = response.headers.get("Retry-After")
231+
if retry_after:
232+
try:
233+
return min(float(retry_after), DEFAULT_MAX_DELAY)
234+
except ValueError:
235+
pass
236+
237+
# Exponential backoff with jitter
238+
delay = DEFAULT_INITIAL_DELAY * (DEFAULT_BACKOFF_FACTOR**attempt)
239+
return min(delay, DEFAULT_MAX_DELAY)
240+
203241
def _request(
204242
self,
205243
method: str,
206244
path: str = "",
207245
*,
208246
json: dict[str, Any] | list[dict[str, Any]] | None = None,
209247
params: dict[str, Any] | None = None,
248+
max_retries: int = DEFAULT_MAX_RETRIES,
210249
) -> dict[str, Any]:
211-
"""Make a synchronous API request.
250+
"""Make a synchronous API request with automatic retry.
251+
252+
Automatically retries on rate limiting (429) and server errors (5xx)
253+
with exponential backoff.
212254
213255
Args:
214256
method: HTTP method (GET, POST, PUT, DELETE).
215257
path: URL path to append to base_path.
216258
json: JSON body to send.
217259
params: Query parameters.
260+
max_retries: Maximum number of retry attempts.
218261
219262
Returns:
220263
The parsed JSON response.
221264
222265
Raises:
223-
AppleSearchAdsError: If the request fails.
266+
AppleSearchAdsError: If the request fails after all retries.
224267
"""
225268
url = self._build_url(path)
226269
headers = self._get_headers()
227270

228271
logger.debug("%s %s", method, url)
229272

230-
try:
231-
response = self._http_client.request(
232-
method,
233-
url,
234-
json=json,
235-
params=params,
236-
headers=headers,
237-
)
238-
except httpx.RequestError as e:
239-
raise NetworkError(f"Request failed: {e}") from e
273+
last_exception: AppleSearchAdsError | None = None
274+
275+
for attempt in range(max_retries + 1):
276+
try:
277+
response = self._http_client.request(
278+
method,
279+
url,
280+
json=json,
281+
params=params,
282+
headers=headers,
283+
)
284+
except httpx.RequestError as e:
285+
if attempt < max_retries:
286+
delay = self._calculate_retry_delay(attempt)
287+
logger.warning(
288+
"Request failed (attempt %d/%d), retrying in %.1fs: %s",
289+
attempt + 1,
290+
max_retries + 1,
291+
delay,
292+
str(e),
293+
)
294+
time.sleep(delay)
295+
continue
296+
raise NetworkError(f"Request failed: {e}") from e
297+
298+
# Check if we should retry based on status code
299+
if response.status_code in RETRYABLE_STATUS_CODES and attempt < max_retries:
300+
delay = self._calculate_retry_delay(attempt, response)
301+
logger.warning(
302+
"Received %d (attempt %d/%d), retrying in %.1fs",
303+
response.status_code,
304+
attempt + 1,
305+
max_retries + 1,
306+
delay,
307+
)
308+
time.sleep(delay)
309+
continue
310+
311+
if response.status_code >= 400:
312+
self._handle_error(response)
240313

241-
if response.status_code >= 400:
242-
self._handle_error(response)
314+
if response.status_code == 204:
315+
return {}
243316

244-
if response.status_code == 204:
245-
return {}
317+
result: dict[str, Any] = response.json()
318+
return result
246319

247-
result: dict[str, Any] = response.json()
248-
return result
320+
# This should not be reached, but handle it just in case
321+
if last_exception:
322+
raise last_exception
323+
raise NetworkError("Request failed after all retries")
249324

250325
async def _request_async(
251326
self,
@@ -254,45 +329,82 @@ async def _request_async(
254329
*,
255330
json: dict[str, Any] | list[dict[str, Any]] | None = None,
256331
params: dict[str, Any] | None = None,
332+
max_retries: int = DEFAULT_MAX_RETRIES,
257333
) -> dict[str, Any]:
258-
"""Make an asynchronous API request.
334+
"""Make an asynchronous API request with automatic retry.
335+
336+
Automatically retries on rate limiting (429) and server errors (5xx)
337+
with exponential backoff.
259338
260339
Args:
261340
method: HTTP method (GET, POST, PUT, DELETE).
262341
path: URL path to append to base_path.
263342
json: JSON body to send.
264343
params: Query parameters.
344+
max_retries: Maximum number of retry attempts.
265345
266346
Returns:
267347
The parsed JSON response.
268348
269349
Raises:
270-
AppleSearchAdsError: If the request fails.
350+
AppleSearchAdsError: If the request fails after all retries.
271351
"""
272352
url = self._build_url(path)
273353
headers = await self._get_headers_async()
274354

275355
logger.debug("%s %s (async)", method, url)
276356

277-
try:
278-
response = await self._async_http_client.request(
279-
method,
280-
url,
281-
json=json,
282-
params=params,
283-
headers=headers,
284-
)
285-
except httpx.RequestError as e:
286-
raise NetworkError(f"Request failed: {e}") from e
357+
last_exception: AppleSearchAdsError | None = None
358+
359+
for attempt in range(max_retries + 1):
360+
try:
361+
response = await self._async_http_client.request(
362+
method,
363+
url,
364+
json=json,
365+
params=params,
366+
headers=headers,
367+
)
368+
except httpx.RequestError as e:
369+
if attempt < max_retries:
370+
delay = self._calculate_retry_delay(attempt)
371+
logger.warning(
372+
"Request failed (attempt %d/%d), retrying in %.1fs: %s",
373+
attempt + 1,
374+
max_retries + 1,
375+
delay,
376+
str(e),
377+
)
378+
await asyncio.sleep(delay)
379+
continue
380+
raise NetworkError(f"Request failed: {e}") from e
381+
382+
# Check if we should retry based on status code
383+
if response.status_code in RETRYABLE_STATUS_CODES and attempt < max_retries:
384+
delay = self._calculate_retry_delay(attempt, response)
385+
logger.warning(
386+
"Received %d (attempt %d/%d), retrying in %.1fs",
387+
response.status_code,
388+
attempt + 1,
389+
max_retries + 1,
390+
delay,
391+
)
392+
await asyncio.sleep(delay)
393+
continue
394+
395+
if response.status_code >= 400:
396+
self._handle_error(response)
287397

288-
if response.status_code >= 400:
289-
self._handle_error(response)
398+
if response.status_code == 204:
399+
return {}
290400

291-
if response.status_code == 204:
292-
return {}
401+
result: dict[str, Any] = response.json()
402+
return result
293403

294-
result: dict[str, Any] = response.json()
295-
return result
404+
# This should not be reached, but handle it just in case
405+
if last_exception:
406+
raise last_exception
407+
raise NetworkError("Request failed after all retries")
296408

297409
def _parse_response(self, data: dict[str, Any]) -> T:
298410
"""Parse a single item response.

pyproject.toml

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
44

55
[project]
66
name = "asa-api-client"
7-
version = "0.1.2"
7+
version = "0.1.3"
88
description = "A modern Python client for the Apple Search Ads API with full type safety and async support"
99
readme = "README.md"
1010
license = "MIT"
@@ -115,7 +115,6 @@ select = [
115115
ignore = [
116116
"D100", # Missing docstring in public module
117117
"D104", # Missing docstring in public package
118-
"UP046", # Generic class uses Generic subclass instead of type params (requires invasive changes)
119118
]
120119

121120
[tool.ruff.lint.pydocstyle]

uv.lock

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)