get_item_from_findItemsByCategory returns None if call limit is reach. conky will save and exit
This commit is contained in:
parent
5a6963b371
commit
b6796a789e
15
ebay_api.py
15
ebay_api.py
@ -239,7 +239,7 @@ class ShoppingApi:
|
||||
except (requests.exceptions.RequestException, KeyError): # TODO need better handling
|
||||
print('connection error. IP limit possibly exceeded')
|
||||
print('index number {}'.format(item_id_results.index(twenty_id)))
|
||||
return
|
||||
return # returns NoneType. Handle at conky()
|
||||
|
||||
return response
|
||||
|
||||
@ -264,15 +264,14 @@ class ShoppingApi:
|
||||
|
||||
with concurrent.futures.ThreadPoolExecutor() as executor: # NOTE may need to include sleeps to avoid connection refusal due to overwhelming servers
|
||||
for future in executor.map(self.get_item_from_findItemsByCategory, item_id_results):
|
||||
for item in future:
|
||||
data.append(item) # The end result should be a list of dicts where each dict in the list is a listing
|
||||
# data.update(future)
|
||||
if future is not None:
|
||||
for item in future:
|
||||
data.append(item) # The end result should be a list of dicts where each dict in the list is a listing data.update(future)
|
||||
else:
|
||||
break
|
||||
with open('raw_data.txt', 'w') as f:
|
||||
json.dump(data, f) # TODO maybe write for every future returned to avoid losing data if your accidentally reach
|
||||
# the call limit and you get an error
|
||||
return data # each future is a list of dictionaries because the output of any multithreader in this method is a list.
|
||||
json.dump(data, f)
|
||||
|
||||
# data dictionary can't update from list of dicts unless iterated over. Might need a different way to update.
|
||||
# TODO It seems like the problem with updating the dictionary/csv file is starting here possibly; I think the item data is getting appended out of order from the item itself.
|
||||
|
||||
# NOTE:
|
||||
|
Loading…
Reference in New Issue
Block a user