From 3b0b96fd92e658ae5e794f86bfde035f157e15a8 Mon Sep 17 00:00:00 2001 From: spbeach46 Date: Wed, 7 Oct 2020 18:53:19 -0700 Subject: [PATCH] around line 37, creating itemId list to pipe into shopping api call/class --- finding_api.py | 43 +++++++++++++++++++++++++++++++++++++++++++ shopping_api.py | 49 ++++++++++++++++++++++++++++--------------------- 2 files changed, 71 insertions(+), 21 deletions(-) create mode 100644 finding_api.py diff --git a/finding_api.py b/finding_api.py new file mode 100644 index 0000000..8ecfad1 --- /dev/null +++ b/finding_api.py @@ -0,0 +1,43 @@ +import requests +import json +from bs4 import BeautifulSoup as b +import pandas as p + +# keywords = input('keyword search: ') + +with open('cat_list.txt') as jf: + cat_list = json.load(jf) +finding_service = ['findItemsAdvanced', 'findCompletedItems', 'findItemsByKeywords', 'findItemsIneBayStores', 'findItemsByCategory', 'findItemsByProduct'] + +pageNumber = list(range(1, 63)) + +# departments = ["3034","93427"] + +def get_ids(): + itemid_results_list = [] + for categoryID in cat_list[0:2]: + params = { + "OPERATION-NAME":finding_service[4], + "SECURITY-APPNAME":"scottbea-xlister-PRD-6796e0ff6-14862949", + "SERVICE-VERSION":"1.13.0", + "RESPONSE-DATA-FORMAT":"JSON", + "categoryId":categoryID , + "paginationInput.entriesPerPage":"100", + "paginationInput.PageNumber":pageNumber[0] + } + # extract item id here for piping into shopping_test.py + + response = requests.get("https://svcs.ebay.com/services/search/FindingService/v1", params=params) + data = response.json() + pretty_data = json.dumps(data, indent=2) + return data +# can use pandas.json_normalize(custom dict cobbled from respons.json()) + + +# Additional problem you will run into when getting labeled data is shoe types and features not in features, accents, styles, categories or subcategories. + +# also limited to 5000 calls per day. This leaves you with 500k listings + +# If you want to split up each cat equally with their respective maxes then use 62 pages with 100 +# Entries per page. At this amount you'll have the max number of calls you can make on the +# shopping api. diff --git a/shopping_api.py b/shopping_api.py index a5c151e..9e1e48d 100644 --- a/shopping_api.py +++ b/shopping_api.py @@ -1,37 +1,45 @@ -import requests import json -from bs4 import BeautifulSoup as b +import requests import pandas as pd +# OPEN CSV AS VARIALBE RIGHT HERE with open('cat_list.txt') as jf: cat_list = json.load(jf) -finding_service = ['findItemsAdvanced', 'findCompletedItems', 'findItemsByKeywords', 'findItemsIneBayStores', 'findItemsByCategory', 'findItemsByProduct'] -class Finding_api: +class FindingApi: + finding_service = [ + 'findItemsAdvanced', 'findCompletedItems', + 'findItemsByKeywords', 'findItemsIneBayStores', 'findItemsByCategory', + 'findItemsByProduct' + ] pageNumber = list(range(1, 63)) # departments = ["3034","93427"] def get_ids(self): itemid_results_list = [] - for categoryID in cat_list[0:2]: - params = { - "OPERATION-NAME":finding_service[4], - "SECURITY-APPNAME":"scottbea-xlister-PRD-6796e0ff6-14862949", - "SERVICE-VERSION":"1.13.0", - "RESPONSE-DATA-FORMAT":"JSON", - "categoryId":categoryID , - "paginationInput.entriesPerPage":"100", - "paginationInput.PageNumber":pageNumber[0] - } - # extract item id here for piping into shopping_test.py + for category_id in cat_list: + for i in FindingApi.pageNumber: + params = { + "OPERATION-NAME":FindingApi.finding_service[4], + "SECURITY-APPNAME":"scottbea-xlister-PRD-6796e0ff6-14862949", + "SERVICE-VERSION":"1.13.0", + "RESPONSE-DATA-FORMAT":"JSON", + "categoryId":category_id, + "paginationInput.entriesPerPage":"100", + "paginationInput.PageNumber":FindingApi.pageNumber[i] + } + # extract item id here for piping into shopping_test.py - response = requests.get("https://svcs.ebay.com/services/search/FindingService/v1", params=params) - data = response.json() - pretty_data = json.dumps(data, indent=2) - return data + response = requests.get("https://svcs.ebay.com/services/search/FindingService/v1", params=params) + data = response.json() + pretty_data = json.dumps(data, indent=2) + itemid_results_list.append( + item_id_results = map(str, itemid_results_list)) + item_id_results = ','.join(item_id_results) # you will have to split this into 20 item lists strings to feed into ItemID param. Consider making a list of str lists. + return item_id_results -class Shopping_api: +class ShoppingApi: def get_item(self): @@ -46,7 +54,6 @@ class Shopping_api: response = requests.get("https://open.api.ebay.com/shopping?", params=params) data = response.json() - pretty_data = json.dumps(data, indent=2) names = [] values = []