added used items query as default paramter in get_data() method
This commit is contained in:
parent
152b72f5df
commit
c2a812f7a8
36
ebay_api.py
36
ebay_api.py
@ -41,14 +41,16 @@ class FindingApi:
|
||||
"SERVICE-VERSION":"1.13.0",
|
||||
"RESPONSE-DATA-FORMAT":"JSON",
|
||||
"categoryId":category_id,
|
||||
"paginationInput.entriesPerPage":"100",
|
||||
"paginationInput.PageNumber":i
|
||||
"paginationInput.entriesPerPage":"10", # TODO change back to max = 100
|
||||
"paginationInput.PageNumber":i,
|
||||
"itemFilter(0).name":"Condition",
|
||||
"itemFilter(0).value":"Used"
|
||||
}
|
||||
|
||||
# TODO add try excepts here
|
||||
try:
|
||||
response = requests.get("https://svcs.ebay.com/services/search/FindingService/v1",
|
||||
params=params, timeout=3)
|
||||
params=params, timeout=4)
|
||||
response.raise_for_status()
|
||||
|
||||
except requests.exceptions.RequestException:
|
||||
@ -134,7 +136,7 @@ class ShoppingApi:
|
||||
}
|
||||
|
||||
try:
|
||||
response = requests.get("https://open.api.ebay.com/shopping?", params=params, timeout=1)
|
||||
response = requests.get("https://open.api.ebay.com/shopping?", params=params, timeout=4)
|
||||
response.raise_for_status()
|
||||
|
||||
except requests.exceptions.RequestException:
|
||||
@ -158,7 +160,7 @@ class ShoppingApi:
|
||||
'''
|
||||
params = {
|
||||
"callname":"GetMultipleItems",
|
||||
"appid":cfg.sec['SECURITY-APPNAME'],
|
||||
"appid":cfg.sec['SECURITY-APPNAME'], # TODO check ebay.yaml for deprication. Might be why you're getting connection errors
|
||||
"version":"671",
|
||||
"responseencoding":"JSON",
|
||||
"ItemID":twenty_id,
|
||||
@ -166,7 +168,7 @@ class ShoppingApi:
|
||||
}
|
||||
|
||||
try:
|
||||
response = requests.get("https://open.api.ebay.com/shopping?", params=params, timeout=1)
|
||||
response = requests.get("https://open.api.ebay.com/shopping?", params=params, timeout=4)
|
||||
response.raise_for_status()
|
||||
|
||||
except requests.exceptions.RequestException: # TODO need better handling
|
||||
@ -190,14 +192,21 @@ class ShoppingApi:
|
||||
|
||||
service_dict = {
|
||||
0: 'findItemsAdvanced', 1: 'findCompletedItems',
|
||||
2: 'findItemsAdvanced', 3: 'findCompletedItems',
|
||||
4: 'findItemsByProduct'}
|
||||
2: 'findItemsByKeywords', 3: 'findItemsIneBayStores',
|
||||
4: 'findItemsByCategory', 5:'findItemsByProduct'}
|
||||
service_dict
|
||||
|
||||
fnd_srvc = int(input(str(service_dict) + "choose Finding call: ('press enter' for default(4))"))
|
||||
fnd_srvc = input(str(service_dict) + "choose Finding call: (press 'enter' for default(4))")
|
||||
pg_num = int(input('how many pages? (76 max)'))
|
||||
|
||||
optional_params = {
|
||||
"itemFilter(0).name":"Condition",
|
||||
"itemFilter(0).value":"Used"
|
||||
} # NOTE setting as default in get_data() method
|
||||
|
||||
if fnd_srvc != '':
|
||||
finding = FindingApi(fnd_srvc, pg_num) # TODO replace these test values before production or add option to change prior to execution
|
||||
fnd_srvc = int(fnd_srvc)
|
||||
finding = FindingApi(fnd_srvc, pg_num)
|
||||
else:
|
||||
fnd_srvc = 4
|
||||
finding = FindingApi(fnd_srvc, pg_num)
|
||||
@ -210,7 +219,7 @@ class ShoppingApi:
|
||||
# data.update(future)
|
||||
with open('raw_data.txt', 'w') as f:
|
||||
json.dump(data, f)
|
||||
return data # TODO each future is a list of dictionaries because the output of any multithreader in this method is a list.
|
||||
return data # each future is a list of dictionaries because the output of any multithreader in this method is a list.
|
||||
|
||||
# data dictionary can't update from list of dicts unless iterated over. Might need a different way to update.
|
||||
# TODO It seems like the problem with updating the dictionary/csv file is starting here possibly; I think the item data is getting appended out of order from the item itself.
|
||||
@ -241,7 +250,8 @@ class CurateData:
|
||||
|
||||
def raw_df(self, raw_data):
|
||||
'''
|
||||
creates pandas df from raw json. Indended to be used inline with direct
|
||||
creates pandas df from raw json and saves master raw csv file as raw_df.csv.
|
||||
Indended to be used inline with direct
|
||||
data stream from ebay's APIs
|
||||
'''
|
||||
to_json = json.dumps(raw_data)
|
||||
@ -469,7 +479,7 @@ class PreProcessing:
|
||||
'''
|
||||
pass
|
||||
|
||||
# TODO pipeline gameplan: 5 files: master img download dict,raw_json.txt, raw_json.csv, master_class_training.csv, master_nvl_training.csv
|
||||
# TODO pipeline gameplan: 5 files: dict_pics.txt,raw_json.txt, raw_json.csv, expanded_class.csv, expanded_dropd.csv
|
||||
# cont... open raw_json.txt and append, same with csv --> process new data --> pull out image source+dest and expand new dfs for the additional pictures
|
||||
# if not exists and append to master img download dict
|
||||
# --> concat m_class_training df and m_nvl_training dfs with new data. Need to add inclusion tests for all files when opened and appended/concatted
|
||||
|
Loading…
Reference in New Issue
Block a user