get_item_from_findItemsByCategory connection error fix

This commit is contained in:
scott 2021-11-26 20:40:26 -07:00
parent 0f65ab0bc2
commit 2cf8270707

View File

@ -241,28 +241,35 @@ class ShoppingApi:
except (FileNotFoundError, ValueError): # TODO not catching error
data = []
service_dict = {
0: 'findItemsAdvanced', 1: 'findCompletedItems',
2: 'findItemsByKeywords', 3: 'findItemsIneBayStores',
4: 'findItemsByCategory', 5:'findItemsByProduct'}
service_dict
try:
with open('ids.txt') as f:
ids = json.load(f)
item_id_results = [','.join(ids[n:n+20]) for n in list(range(0,
len(ids), 20))] # 20-ItemID list created to maximize dataset/decrease calls given call constraints
fnd_srvc = input(str(service_dict) + "choose Finding call: (press 'enter' for default(4))")
target_idspc = int(input('how many ids per cat? (7692 max)'))
optional_params = {
"itemFilter(0).name":"Condition",
"itemFilter(0).value":"Used"
} # NOTE setting as default in get_data() method
if fnd_srvc != '':
fnd_srvc = int(fnd_srvc)
finding = FindingApi(fnd_srvc, target_idspc)
else:
fnd_srvc = 4
finding = FindingApi(fnd_srvc, target_idspc)
item_id_results = scrape_ids.main()
except (FileNotFoundError, ValueError):
item_id_results = scrape_ids.main()
# service_dict = {
# 0: 'findItemsAdvanced', 1: 'findCompletedItems',
# 2: 'findItemsByKeywords', 3: 'findItemsIneBayStores',
# 4: 'findItemsByCategory', 5:'findItemsByProduct'}
# service_dict
#
# fnd_srvc = input(str(service_dict) + "choose Finding call: (press 'enter' for default(4))")
# target_idspc = int(input('how many ids per cat? (7692 max)'))
#
# optional_params = {
# "itemFilter(0).name":"Condition",
# "itemFilter(0).value":"Used"
# } # NOTE setting as default in get_data() method
#
# if fnd_srvc != '':
# fnd_srvc = int(fnd_srvc)
# finding = FindingApi(fnd_srvc, target_idspc)
# else:
# fnd_srvc = 4
# finding = FindingApi(fnd_srvc, target_idspc)
#
with concurrent.futures.ThreadPoolExecutor() as executor:
for future in executor.map(self.get_item_from_findItemsByCategory, item_id_results):
for item in future: