Implement exponential backoff for handling Reddit API rate limits in _fetch_data method

This commit is contained in:
2026-01-17 22:14:26 +00:00
parent 193ff43975
commit d7baf39087

View File

@@ -1,5 +1,6 @@
import requests import requests
import logging import logging
import time
from dto.post import Post from dto.post import Post
from dto.user import User from dto.user import User
@@ -145,10 +146,24 @@ class RedditAPI:
def _fetch_data(self, endpoint: str, params: dict) -> dict: def _fetch_data(self, endpoint: str, params: dict) -> dict:
url = f"{self.url}{endpoint}" url = f"{self.url}{endpoint}"
try: max_retries = 15
response = requests.get(url, headers={'User-agent': 'python:ethnography-college-project:0.1 (by /u/ThisBirchWood)'}, params=params) backoff = 1 # seconds
response.raise_for_status()
return response.json() for attempt in range(max_retries):
except requests.RequestException as e: try:
print(f"Error fetching data from Reddit API: {e}") response = requests.get(url, headers={'User-agent': 'python:ethnography-college-project:0.1 (by /u/ThisBirchWood)'}, params=params)
return {}
if response.status_code == 429:
wait_time = response.headers.get("Retry-After", backoff)
logger.warning(f"Rate limited by Reddit API. Retrying in {wait_time} seconds...")
time.sleep(wait_time)
backoff *= 2
continue
response.raise_for_status()
return response.json()
except requests.RequestException as e:
print(f"Error fetching data from Reddit API: {e}")
return {}