From b6796a789e8847f60c832d5f8f66bccc86604eeb Mon Sep 17 00:00:00 2001 From: scott Date: Sun, 28 Nov 2021 01:19:59 -0700 Subject: [PATCH] get_item_from_findItemsByCategory returns None if call limit is reach. conky will save and exit --- ebay_api.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/ebay_api.py b/ebay_api.py index 8ef84d1..a71446d 100644 --- a/ebay_api.py +++ b/ebay_api.py @@ -239,7 +239,7 @@ class ShoppingApi: except (requests.exceptions.RequestException, KeyError): # TODO need better handling print('connection error. IP limit possibly exceeded') print('index number {}'.format(item_id_results.index(twenty_id))) - return + return # returns NoneType. Handle at conky() return response @@ -264,15 +264,14 @@ class ShoppingApi: with concurrent.futures.ThreadPoolExecutor() as executor: # NOTE may need to include sleeps to avoid connection refusal due to overwhelming servers for future in executor.map(self.get_item_from_findItemsByCategory, item_id_results): - for item in future: - data.append(item) # The end result should be a list of dicts where each dict in the list is a listing - # data.update(future) + if future is not None: + for item in future: + data.append(item) # The end result should be a list of dicts where each dict in the list is a listing data.update(future) + else: + break with open('raw_data.txt', 'w') as f: - json.dump(data, f) # TODO maybe write for every future returned to avoid losing data if your accidentally reach - # the call limit and you get an error - return data # each future is a list of dictionaries because the output of any multithreader in this method is a list. + json.dump(data, f) - # data dictionary can't update from list of dicts unless iterated over. Might need a different way to update. # TODO It seems like the problem with updating the dictionary/csv file is starting here possibly; I think the item data is getting appended out of order from the item itself. # NOTE: