Initial Commit

This commit is contained in:
spbeach46 2020-05-25 18:46:22 -07:00
commit bbfd20a02b
3 changed files with 301 additions and 0 deletions

65
id_update.py Normal file
View File

@ -0,0 +1,65 @@
import os
import requests
import json
import ebaysdk
from ebaysdk.trading import Connection as Trading
from ebaysdk.finding import Connection as Finding
import time
import concurrent.futures
# (categoryId = women's shoes = 3034)
# Initialize loop to get number of pages needed in for loop
start = time.time()
fapi = Finding(config_file = "ebay.yaml")
tapi = Trading(config_file = 'ebay.yaml')
fresponse = fapi.execute('findItemsAdvanced', {'itemFilter':{'name':'Seller','value':'chesshoebuddy'}, 'categoryId':'3034', 'paginationInput':{'entriesPerPage':'100','pageNumber':'1'}}).dict()
page_results = int(fresponse['paginationOutput']['totalPages'])
pages = []
for i in range(0, page_results):
i += 1
pages.append(i)
''' Begin definitions for getting ItemIds and SKU: '''
def id_up(n):
ids = []
fresponse = fapi.execute('findItemsAdvanced', {'itemFilter':{'name':'Seller','value':'chesshoebuddy'}, 'categoryId':'3034', 'paginationInput':{'entriesPerPage':'100','pageNumber':str(n)}}).dict()
for item in (fresponse['searchResult']['item']):
itemID = item['itemId']
#response = tapi.execute('GetItem',{'ItemID':itemID}).dict()
ids.append(itemID)
return ids
'''def sku_up(itemID):
response = tapi.execute('GetItem',{'ItemID':itemID}).dict()
try:
sku = response['Item']['SKU']
except KeyError:
sku = 'missing'
return sku'''
def main():
ids = []
skus = []
with concurrent.futures.ThreadPoolExecutor() as executor:
for future in executor.map(id_up, pages):
ids.extend(future)
with open('ebay_ids.txt', 'w') as outfile:
json.dump(ids, outfile)
'''with concurrent.futures.ProcessPoolExecutor() as executor:
for future in executor.map(sku_up, ids):
skus.append(future)
with open('ebay_skus.txt', 'w') as outfile:
json.dump(skus, outfile)'''
if __name__ == '__main__':
main()
'''ebay_id_dict = dict(zip(ids, skus))
with open('ebay_id_dict.txt', 'w') as outfile:
json.dump(ebay_id_dict, outfile)'''

BIN
requirements.txt Normal file

Binary file not shown.

236
xlister.py Normal file
View File

@ -0,0 +1,236 @@
import os
import requests
import json
import wget
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.select import Select
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.action_chains import ActionChains
from selenium.common.exceptions import TimeoutException
import ebaysdk
from ebaysdk.trading import Connection as Trading
from ebaysdk.finding import Connection as Finding
import concurrent.futures
import id_update
# update ebay_ids.txt file:
id_update.main()
# start selenium and log in to posh:
browser = webdriver.Chrome("C:/Users/unknown/Desktop/projects/chromedriver")
browser.get('https://poshmark.com/login')
usename = browser.find_element_by_id('login_form_username_email')
usename.send_keys('SpeanutButter')
psw = browser.find_element_by_id('login_form_password')
psw.send_keys('***REMOVED***')
psw.submit()
input('press "enter" to continue')
browser.get('https://poshmark.com/create-listing')
wait = WebDriverWait(browser, 120)
# Loop through each itemID to get relevant data to be used in posh listing:
tapi = Trading(config_file = 'ebay1.yaml')
img_num = [] # for use in ThreadPoolExecutor
for i in range(-1, 11):
i += 1
img_num.append(i)
# Cross-reference ebay and posh txt files to sync posh listings:
with open('ebay_ids.txt') as jf:
ebay_ids = json.load(jf)
for i in range(0,len(ebay_ids)):
id = ebay_ids[i]
with open('posh_ids.txt') as jf:
posh_ids = json.load(jf)
with open('posh_skus.txt') as jf:
posh_skus = json.load(jf)
if id not in posh_ids:
itemID = id
response = tapi.execute('GetItem',{'ItemID':itemID,'DetailLevel': 'ReturnAll', 'IncludeItemSpecifics':'True'}).dict()
print(itemID)
# download first 8 ebay pics and temporarily hold until listing is done:
def img_download(i):
try:
pic = response['Item']['PictureDetails']['PictureURL'][i]
except IndexError:
pass
else:
wget.download(pic, 'C:/Users/unknown/Desktop/projects/tempics/' + str(i) + '.jpg')
img = 'C:/Users/unknown/Desktop/projects//tempics/' + str(i) + '.jpg'
return img
pics = []
with concurrent.futures.ThreadPoolExecutor() as executor:
for future in executor.map(img_download, img_num):
pics.append(future)
pics_path = '\n'.join(pics)
#begin image upload:
fileup = browser.find_element_by_id('img-file-input')
fileup.send_keys(pics_path)
wait.until(EC.element_to_be_clickable((By.XPATH, "//button[@data-et-name='apply']")))
apply = browser.find_element_by_xpath("//button[@data-et-name='apply']")
apply.click()
for image in pics:
os.remove(image) # Delete images from local storage
# consider creating a function that just takes the ebay id as input and returns the details below so that the function doesn't have to be redefined every iteration but merely called.
cond_desc = response['Item']['ConditionDescription'] + ' Props not included'
item_specs = response['Item']['ItemSpecifics']
nvl = item_specs['NameValueList']
nvl_dict = {d['Name']:d['Value'] for d in nvl}
brand = nvl_dict['Brand']
cat = response['Item']['PrimaryCategory']['CategoryName']
try:
color = nvl_dict['Color']
except KeyError:
color = 'no color'
posh_colors = ['Red', 'Pink', 'Orange', 'Yellow', 'Green', 'Blue', 'Purple', 'Gold', 'Silver', 'Black', 'Gray', 'White', 'Cream', 'Brown', 'Tan'] # (needed for cross-referencing)
try:
sku = response['Item']['SKU']
except KeyError:
sku = 'missing'
posh_title_specs_list = ['Model', 'Style']
posh_desc_specs_list = ['Boot Shaft Height', 'Calf Circumference', 'Material', 'Heel Height', 'Exact Heel Height', 'Country/Region of Manufacture'] # Consider adding 'Heel Type' and 'Toe Type' to the list
posh_title = brand + ' '
for spec in posh_title_specs_list:
if spec in nvl_dict:
posh_title += nvl_dict[spec] + ' '
posh_desc = cond_desc + '\n'*2
for spec in posh_desc_specs_list:
if spec in nvl_dict:
posh_desc += spec + ': ' + nvl_dict[spec] + '\n'*2
# retrieve size listed in ebay
us_sz = 'US ' + nvl_dict["US Shoe Size (Women's)"].replace('US', ' ').replace(' ', '')
# Begin size disclaimer to include in posh description
sz_spec_list = ["EUR Shoe Size (Women's)", "UK Shoe Size (Women's)"]
size_disclaimer = '* Manufacture Sizing: ' + us_sz
n = -1
for sz in sz_spec_list:
n+=1
if sz in nvl_dict:
size_disclaimer += ' = ' + sz_spec_list[n] + ' ' + nvl_dict[sz] + '\n'*2
ebay_title = response['Item']['Title'] # Might use this later for something
print(ebay_title)
price = response['Item']['ListingDetails']['ConvertedStartPrice']['value']
price = str(round(float(price)/.75)) # 33.33 % bump from original price
# Begin creating poshmark lising:
# Select Dropdown Category and Dismiss Subcategory
wait.until(EC.element_to_be_clickable((By.XPATH, "//*[contains(text(), 'Select a Category')]")))
drop_cat = browser.find_element_by_xpath("//*[contains(text(), 'Select a Category')]").click()
wait.until(EC.element_to_be_clickable((By.XPATH, "//*[contains(text(), 'Shoes')]")))
drop_cat = browser.find_element_by_xpath("//*[contains(text(), 'Shoes')]").click()
wait.until(EC.element_to_be_clickable((By.XPATH, "//*[contains(text(), 'Select Subcategory (optional)')]")))
sub_cat = browser.find_element_by_xpath("//*[contains(text(), 'Select Subcategory (optional)')]").click()
# Fill in Custom Posh Size
wait.until(EC.element_to_be_clickable((By.XPATH, "//span[text()='Select Size']")))
drop_sz = browser.find_element_by_xpath("//span[text()='Select Size']").click()
wait.until(EC.element_to_be_clickable((By.XPATH, "//*[contains(text(), 'Custom')]")))
drop_sz = browser.find_element_by_xpath("//*[contains(text(), 'Custom')]").click()
custom_size = browser.find_element_by_xpath("//input[@data-vv-name='customSize']").send_keys(us_sz)
wait.until(EC.element_to_be_clickable((By.XPATH, "//*[contains(text(), 'Save')]")))
save_size = browser.find_element_by_xpath("//*[contains(text(), 'Save')]").click()
# Begin Posh Title
wait.until(EC.element_to_be_clickable((By.XPATH, "//input[@data-vv-name='title']")))
title = browser.find_element_by_xpath("//input[@data-vv-name='title']")
title.send_keys(posh_title)
# Begin Item Description
wait.until(EC.element_to_be_clickable((By.XPATH, "//textarea[@data-vv-name='description']")))
desc = browser.find_element_by_xpath("//textarea[@data-vv-name='description']")
desc.send_keys(posh_desc + '\n' + size_disclaimer + '\n'*2 + sku)
# Begin Brand Input
brand_input = browser.find_element_by_xpath("//input[@placeholder='Enter the Brand/Designer']")
brand_input.send_keys(brand)
# Fill Original Price and List Price
msrp = browser.find_element_by_xpath("//input[@data-vv-name='originalPrice']")
msrp.send_keys('0')
list_price = browser.find_element_by_xpath("//input[@data-vv-name='listingPrice']").send_keys(price)
# Select Color
if color in posh_colors:
wait.until(EC.element_to_be_clickable((By.XPATH, "//span[text()='Select up to 2 colors']")))
browser.find_element_by_xpath("//span[text()='Select up to 2 colors']").click()
wait.until(EC.element_to_be_clickable((By.XPATH, "//*[contains(text(), '" + color + "')]")))
browser.find_element_by_xpath("//*[contains(text(), '" + color + "')]").click()
wait.until(EC.element_to_be_clickable((By.XPATH, "//button[@data-et-on-name='color_tile']")))
browser.find_element_by_xpath("//button[@data-et-on-name='color_tile']").click()
# Begin Listing Publishing
wait.until(EC.element_to_be_clickable((By.XPATH, "//button[@data-et-name='next']")))
browser.find_element_by_xpath("//button[@data-et-name='next']").click()
wait.until(EC.element_to_be_clickable((By.XPATH, "//button[@data-et-name='list_item']")))
browser.find_element_by_xpath("//button[@data-et-name='list_item']").click()
try:
wait.until(EC.presence_of_element_located((By.XPATH, "//title[text()='Feed - Poshmark']")))
except TimeoutException:
input("press 'enter' to continue")
# Begin New Listing
browser.get('https://poshmark.com/create-listing')
# update posh data files:
posh_ids.append(itemID)
with open('posh_ids.txt', 'w') as jf:
json.dump(posh_ids, jf)
posh_skus.append(sku)
with open('posh_skus.txt', 'w') as jf:
json.dump(posh_skus, jf)
posh_sku_dict = dict(zip(posh_skus, posh_ids))
with open('posh_sku_dict.txt', 'w') as jf:
json.dump(posh_sku_dict, jf)
else:
pass
# End for loop