From 5a6963b371b5e1447b499ac53d23f3e7dd4adf5b Mon Sep 17 00:00:00 2001 From: scott Date: Sun, 28 Nov 2021 00:17:34 -0700 Subject: [PATCH] ip limit fix --- ebay_api.py | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/ebay_api.py b/ebay_api.py index 7a07998..8ef84d1 100644 --- a/ebay_api.py +++ b/ebay_api.py @@ -227,19 +227,20 @@ class ShoppingApi: try: # random sleep here between 0 and 10 secs? + sleep(randint(1,10)) response = requests.get(url, headers=headers,timeout=24) response.raise_for_status() print('index number {}'.format(item_id_results.index(twenty_id))) + response = response.json() + response = response['Item'] - except requests.exceptions.RequestException: # TODO need better handling - print('connection error') + + except (requests.exceptions.RequestException, KeyError): # TODO need better handling + print('connection error. IP limit possibly exceeded') print('index number {}'.format(item_id_results.index(twenty_id))) return - response = response.json() - response = response['Item'] - return response def conky(self): @@ -276,17 +277,11 @@ class ShoppingApi: # NOTE: -# UPDATE** 76 pages seems like it might be too much. # Limited to 5000 calls to shopping api per day, and getMultpileitems service maxes out at 20 items # per call leaving you 100,000 items per day for you pandas dataframe initially. So you'll have # to divide these up into the categories. This will leave you with about 6.25K results per cat. # More than enough data for your dataset. -#class SDKTrading: -# api = Trading(config_file='ebay.yaml') -# response = api.execute('GetUser', {}) -# print(response.dict()) -# print(response.reply) class CurateData: '''