removed deleted stuff?

This commit is contained in:
spbeach46 2020-10-04 23:48:52 -07:00
parent 77038e92f8
commit b5afa5cfc0
2 changed files with 0 additions and 67 deletions

View File

@ -1,35 +0,0 @@
import requests
import json
from bs4 import BeautifulSoup as b
import pandas as p
# keywords = input('keyword search: ')
with open('cat_list.txt') as jf:
cat_list = json.load(jf)
finding_service = ['findItemsAdvanced', 'findCompletedItems', 'findItemsByKeywords', 'findItemsIneBayStores', 'findItemsByCategory', 'findItemsByProduct']
pageNumber = list(range(1,101))
# departments = ["3034","93427"]
itemid_results_list = []
for categoryID in cat_list[0:2]:
params = {
"OPERATION-NAME":finding_service[4],
"SECURITY-APPNAME":"scottbea-xlister-PRD-6796e0ff6-14862949",
"SERVICE-VERSION":"1.13.0",
"RESPONSE-DATA-FORMAT":"JSON",
"categoryId":categoryID ,
"paginationInput.entriesPerPage":"100",
"paginationInput.PageNumber":pageNumber[0]
}
# extract item id here for piping into shopping_test.py
response = requests.get("https://svcs.ebay.com/services/search/FindingService/v1", params=params)
data = response.json()
pretty_data = json.dumps(data, indent=2)
# can use pandas.json_normalize(custom dict cobbled from respons.json())
# Additional problem you will run into when getting labeled data is shoe types and features not in features, accents, styles, categories or subcategories.

View File

@ -1,32 +0,0 @@
import requests
import json
from bs4 import BeautifulSoup as b
import pandas as pd
params = {
"callname":"GetMultipleItems",
"appid":"scottbea-xlister-PRD-6796e0ff6-14862949",
"version":"671",
"responseencoding":"JSON",
"ItemID":'184228756721', # you pass in a list? If not then maybe a comma-separated
"IncludeSelector":"ItemSpecifics",
}
response = requests.get("https://open.api.ebay.com/shopping?", params=params)
data = response.json()
pretty_data = json.dumps(data, indent=2)
names = []
values = []
nvl = data['Item'][0]['ItemSpecifics']['NameValueList']
for nvl_dict in nvl:
names.append(nvl_dict['Name'])
values.append(nvl_dict['Value'])
nvl_dict = dict(zip(names, values))
data.update(nvl_dict)
df = pd.json_normalize(data)
df.to_csv('big_data.csv')
# to append items to the pandas dataframe/csv, use var = df.append(json/dict, ignore_index=True)
# you will have to redefine var as a new dataframe everytime, too for some reason (I'm wondering
# if this is a bug?).