Files
2025-01-25 15:58:24 -03:00

59 lines
1.5 KiB
Python

import json
from datetime import datetime, timedelta
import logging as log
import requests
from pyrate_limiter import Duration, Rate, Limiter, InMemoryBucket
from constants import api_url
def get_tray_url(path):
return f"{api_url}/{path}"
def parse_date(str_date, str_format=None):
if str_date == '0000-00-00' or str_date == '':
return None
else:
try:
return datetime.strptime(str_date, str_format if str_format is not None else '%Y-%m-%d').date()
except Exception as e:
log.error(e)
return None
def get_paged_data(path, data_key, limiter, max_pages=None, params=None):
all_data = []
current_page = 1
url = get_tray_url(path)
if params is None:
params = {}
while True:
limiter.try_acquire(url, weight=1)
params['page'] = current_page
response = requests.get(url, params=params)
response.raise_for_status()
json_data = response.json()
if response.status_code in [200, 201]:
all_data.extend(json_data[data_key])
paging_info = json_data['paging']
total_items = paging_info['total']
if max_pages is not None and max_pages <= current_page:
log.debug('Reached max pages: %s', max_pages)
break
if len(all_data) >= total_items:
break
current_page = current_page + 1
log.debug('Total items fetched: %i', len(all_data))
return all_data