Removed unused FindingApi class/methods
This commit is contained in:
parent
b6796a789e
commit
61e6770812
41
ebay_api.py
41
ebay_api.py
@ -16,9 +16,9 @@ import pandas as pd
|
||||
import config as cfg
|
||||
import shutil
|
||||
import re
|
||||
from ebaysdk.trading import Connection as Trading
|
||||
from ebaysdk.exception import ConnectionError
|
||||
|
||||
from ebaysdk.exception import ConnectionError
|
||||
from ebaysdk.trading import Connection as Trading
|
||||
from ebaysdk.finding import Connection as Finding
|
||||
from ebaysdk.shopping import Connection as Shopping
|
||||
|
||||
@ -182,7 +182,7 @@ class ShoppingApi:
|
||||
|
||||
params = {
|
||||
"callname":"GetCategoryInfo",
|
||||
"X-EBAY-API-IAF-TOKEN":cfg.sec['X-EBAY-API-IAF-TOKEN'], #TODO change to match format of get_itemget_item_from_findItemsByCategory()
|
||||
"X-EBAY-API-IAF-TOKEN":cfg.sec['X-EBAY-API-IAF-TOKEN'],
|
||||
"version":"671",
|
||||
"responseencoding":"JSON",
|
||||
"CategoryID":department,
|
||||
@ -197,8 +197,7 @@ class ShoppingApi:
|
||||
print('connection error')
|
||||
|
||||
response = response.json()
|
||||
response = response['CategoryArray']['Category'][1:] # excludes index
|
||||
# 0 as this is parent node, i.e., women's or men's dept.
|
||||
response = response['CategoryArray']['Category'][1:] # excludes index 0 as this is parent node, i.e., women's or men's dept.
|
||||
|
||||
temp_cat_list = [cat['CategoryID'] for cat in response]
|
||||
cat_list.extend(temp_cat_list)
|
||||
@ -228,15 +227,16 @@ class ShoppingApi:
|
||||
try:
|
||||
# random sleep here between 0 and 10 secs?
|
||||
|
||||
sleep(randint(1,10))
|
||||
sleep(randint(1,10)) # may not be necessary
|
||||
response = requests.get(url, headers=headers,timeout=24)
|
||||
response.raise_for_status()
|
||||
print('index number {}'.format(item_id_results.index(twenty_id)))
|
||||
response = response.json()
|
||||
response = response['Item']
|
||||
print('index number {}'.format(item_id_results.index(twenty_id)))
|
||||
print(response)
|
||||
|
||||
|
||||
except (requests.exceptions.RequestException, KeyError): # TODO need better handling
|
||||
except (requests.exceptions.RequestException, KeyError):
|
||||
print('connection error. IP limit possibly exceeded')
|
||||
print('index number {}'.format(item_id_results.index(twenty_id)))
|
||||
return # returns NoneType. Handle at conky()
|
||||
@ -251,7 +251,7 @@ class ShoppingApi:
|
||||
try:
|
||||
with open('raw_data.txt') as f:
|
||||
data = json.load(f)
|
||||
except (FileNotFoundError, ValueError): # TODO not catching error
|
||||
except (FileNotFoundError, ValueError):
|
||||
data = []
|
||||
|
||||
try:
|
||||
@ -262,18 +262,17 @@ class ShoppingApi:
|
||||
except (FileNotFoundError, ValueError):
|
||||
item_id_results = scrape_ids.main()
|
||||
|
||||
with concurrent.futures.ThreadPoolExecutor() as executor: # NOTE may need to include sleeps to avoid connection refusal due to overwhelming servers
|
||||
with concurrent.futures.ThreadPoolExecutor() as executor:
|
||||
for future in executor.map(self.get_item_from_findItemsByCategory, item_id_results):
|
||||
if future is not None:
|
||||
for item in future:
|
||||
data.append(item) # The end result should be a list of dicts where each dict in the list is a listing data.update(future)
|
||||
data.append(item) # The end result should be a list of dicts where each dict in the list is a listing
|
||||
else:
|
||||
print('reached call limit')
|
||||
break
|
||||
with open('raw_data.txt', 'w') as f:
|
||||
json.dump(data, f)
|
||||
|
||||
# TODO It seems like the problem with updating the dictionary/csv file is starting here possibly; I think the item data is getting appended out of order from the item itself.
|
||||
|
||||
# NOTE:
|
||||
|
||||
# Limited to 5000 calls to shopping api per day, and getMultpileitems service maxes out at 20 items
|
||||
@ -515,15 +514,21 @@ class CurateData:
|
||||
#TODO it would mean that temp_pics_source_list is changing for some reason?
|
||||
|
||||
else:
|
||||
r = requests.get(pic, stream=True)
|
||||
r.raw.decode_content = True
|
||||
with open(temp_dict_pics[pic], 'wb') as f: # Or call dict_pics[pic] can work
|
||||
shutil.copyfileobj(r.raw, f)
|
||||
try:
|
||||
r = requests.get(pic, stream=True)
|
||||
r.raw.decode_content = True
|
||||
with open(temp_dict_pics[pic], 'wb') as f: # Or call dict_pics[pic] can work
|
||||
shutil.copyfileobj(r.raw, f)
|
||||
except ConnectionError:
|
||||
return
|
||||
|
||||
bargs = [(dict_pics, pic) for pic in temp_pics_source_list]
|
||||
with concurrent.futures.ThreadPoolExecutor() as executor:
|
||||
for future in executor.map(lambda p: dl_pic(*p), bargs):
|
||||
future
|
||||
if future is not None:
|
||||
future
|
||||
else:
|
||||
print('connection error')
|
||||
|
||||
os.remove('temp_pics_source_list.txt') # Deletes file after downloads complete successfully
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user