From 6ef8fcc99343aebe4f54fcf5ec122ef6de489d38 Mon Sep 17 00:00:00 2001 From: Marco Lents Date: Sun, 16 Nov 2025 10:12:39 +0100 Subject: [PATCH] extract graceful url handling into separate function --- crawler.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/crawler.py b/crawler.py index 7b5ea7d..9a34a24 100644 --- a/crawler.py +++ b/crawler.py @@ -184,12 +184,7 @@ def get_bio(url, name, sleep_for): name, party = name name = name.split(", ") print(f"Getting {url} for {name[1]} {name[0]}") - for _ in range(5): - try: - response = requests.get(url) - except: - print("Rate limit! waiting 5min") - sleep(300) + response = request_handle_rate_limit(url) soup = BeautifulSoup(response.content) cv = soup.find(class_="m-biography__biography").text.strip() ajax_divs = soup.find_all(class_="m-ajaxLoadedContent") @@ -236,6 +231,16 @@ def get_bio(url, name, sleep_for): return bio +def request_handle_rate_limit(url): + for _ in range(5): + try: + return requests.get(url) + except: + print("Rate limit! waiting 5min") + sleep(300) + return requests.get(url) + + def get_disclosures(elem): if not elem: return None @@ -319,12 +324,7 @@ def get_ajax(elem): for key, value in filters.items() ] url = url + "?" + "&".join(f"{key}={val}" for key, val in sanitized_filters) - for _ in range(5): - try: - response = requests.get(url) - except: - print("Rate limit! waiting 5min") - sleep(300) + response = request_handle_rate_limit(url) return response