converted full script to classes and methods
This commit is contained in:
parent
8140e9269a
commit
6c64bdc30e
14
depop.py
Normal file
14
depop.py
Normal file
@ -0,0 +1,14 @@
|
||||
import json
|
||||
import os
|
||||
import concurrent.futures
|
||||
import wget
|
||||
from ebaysdk.trading import Connection as Trading
|
||||
from selenium import webdriver
|
||||
from selenium.webdriver.support.ui import WebDriverWait
|
||||
from selenium.webdriver.support import expected_conditions as EC
|
||||
from selenium.webdriver.common.by import By
|
||||
import argparse
|
||||
import id_update
|
||||
import xlister
|
||||
|
||||
login_url = 'https://www.depop.com/login/'
|
@ -30,6 +30,7 @@ def id_up(n):
|
||||
itemID = item['itemId']
|
||||
#response = tapi.execute('GetItem',{'ItemID':itemID}).dict()
|
||||
ids.append(itemID)
|
||||
print(ids)
|
||||
return ids
|
||||
|
||||
'''def sku_up(itemID):
|
||||
|
558
xlister.py
558
xlister.py
@ -1,236 +1,388 @@
|
||||
import os
|
||||
import requests
|
||||
#import os
|
||||
#import requests
|
||||
#import json
|
||||
#import wget
|
||||
#from selenium import webdriver
|
||||
#from selenium.webdriver.common.keys import Keys
|
||||
#from selenium.webdriver.support.select import Select
|
||||
#from selenium.webdriver.common.by import By
|
||||
#from selenium.webdriver.support.ui import WebDriverWait
|
||||
#from selenium.webdriver.support import expected_conditions as EC
|
||||
#from selenium.webdriver.common.action_chains import ActionChains
|
||||
#from selenium.common.exceptions import TimeoutException
|
||||
#import ebaysdk
|
||||
#from ebaysdk.trading import Connection as Trading
|
||||
#from ebaysdk.finding import Connection as Finding
|
||||
#import concurrent.futures
|
||||
#import id_update
|
||||
#
|
||||
## update ebay_ids.txt file:
|
||||
#
|
||||
#id_update.main()
|
||||
#
|
||||
## start selenium and log in to posh:
|
||||
#
|
||||
#browser = webdriver.Chrome("C:/Users/unknown/Desktop/projects/chromedriver")
|
||||
#
|
||||
#browser.get('https://poshmark.com/login')
|
||||
#usename = browser.find_element_by_id('login_form_username_email')
|
||||
#usename.send_keys('user')
|
||||
#psw = browser.find_element_by_id('login_form_password')
|
||||
#psw.send_keys('password')
|
||||
#psw.submit()
|
||||
#input('press "enter" to continue')
|
||||
#browser.get('https://poshmark.com/create-listing')
|
||||
#
|
||||
#
|
||||
#
|
||||
#wait = WebDriverWait(browser, 120)
|
||||
#
|
||||
## Loop through each itemID to get relevant data to be used in posh listing:
|
||||
#
|
||||
#tapi = Trading(config_file = 'ebay1.yaml')
|
||||
#
|
||||
#img_num = [] # for use in ThreadPoolExecutor
|
||||
#for i in range(-1, 11):
|
||||
# i += 1
|
||||
# img_num.append(i)
|
||||
#
|
||||
## Cross-reference ebay and posh txt files to sync posh listings:
|
||||
#
|
||||
#with open('ebay_ids.txt') as jf:
|
||||
# ebay_ids = json.load(jf)
|
||||
#for i in range(0,len(ebay_ids)):
|
||||
# id = ebay_ids[i]
|
||||
# with open('posh_ids.txt') as jf:
|
||||
# posh_ids = json.load(jf)
|
||||
# with open('posh_skus.txt') as jf:
|
||||
# posh_skus = json.load(jf)
|
||||
# if id not in posh_ids:
|
||||
# itemID = id
|
||||
# response = tapi.execute('GetItem',{'ItemID':itemID,'DetailLevel': 'ReturnAll', 'IncludeItemSpecifics':'True'}).dict()
|
||||
# print(itemID)
|
||||
#
|
||||
# # download first 8 ebay pics and temporarily hold until listing is done:
|
||||
#
|
||||
# def img_download(i):
|
||||
# try:
|
||||
# pic = response['Item']['PictureDetails']['PictureURL'][i]
|
||||
# except IndexError:
|
||||
# pass
|
||||
# else:
|
||||
# wget.download(pic, 'C:/Users/unknown/Desktop/projects/tempics/' + str(i) + '.jpg')
|
||||
# img = 'C:/Users/unknown/Desktop/projects//tempics/' + str(i) + '.jpg'
|
||||
# return img
|
||||
# pics = []
|
||||
# with concurrent.futures.ThreadPoolExecutor() as executor:
|
||||
# for future in executor.map(img_download, img_num):
|
||||
# pics.append(future)
|
||||
# pics_path = '\n'.join(pics)
|
||||
#
|
||||
# #begin image upload:
|
||||
#
|
||||
# fileup = browser.find_element_by_id('img-file-input')
|
||||
# fileup.send_keys(pics_path)
|
||||
#
|
||||
# wait.until(EC.element_to_be_clickable((By.XPATH, "//button[@data-et-name='apply']")))
|
||||
# apply = browser.find_element_by_xpath("//button[@data-et-name='apply']")
|
||||
# apply.click()
|
||||
# for image in pics:
|
||||
# os.remove(image) # Delete images from local storage
|
||||
#
|
||||
#
|
||||
# # consider creating a function that just takes the ebay id as input and returns the details below so that the function doesn't have to be redefined every iteration but merely called.
|
||||
# cond_desc = response['Item']['ConditionDescription'] + ' Props not included'
|
||||
# item_specs = response['Item']['ItemSpecifics']
|
||||
# nvl = item_specs['NameValueList']
|
||||
# nvl_dict = {d['Name']:d['Value'] for d in nvl}
|
||||
# brand = nvl_dict['Brand']
|
||||
# cat = response['Item']['PrimaryCategory']['CategoryName']
|
||||
# try:
|
||||
# color = nvl_dict['Color']
|
||||
# except KeyError:
|
||||
# color = 'no color'
|
||||
#
|
||||
# posh_colors = ['Red', 'Pink', 'Orange', 'Yellow', 'Green', 'Blue', 'Purple', 'Gold', 'Silver', 'Black', 'Gray', 'White', 'Cream', 'Brown', 'Tan'] # (needed for cross-referencing)
|
||||
#
|
||||
# try:
|
||||
# sku = response['Item']['SKU']
|
||||
# except KeyError:
|
||||
# sku = 'missing'
|
||||
#
|
||||
# posh_title_specs_list = ['Model', 'Style']
|
||||
# posh_desc_specs_list = ['Boot Shaft Height', 'Calf Circumference', 'Material', 'Heel Height', 'Exact Heel Height', 'Country/Region of Manufacture'] # Consider adding 'Heel Type' and 'Toe Type' to the list
|
||||
#
|
||||
# posh_title = brand + ' '
|
||||
# for spec in posh_title_specs_list:
|
||||
# if spec in nvl_dict:
|
||||
# posh_title += nvl_dict[spec] + ' '
|
||||
#
|
||||
# posh_desc = cond_desc + '\n'*2
|
||||
# for spec in posh_desc_specs_list:
|
||||
# if spec in nvl_dict:
|
||||
# posh_desc += spec + ': ' + nvl_dict[spec] + '\n'*2
|
||||
#
|
||||
# # retrieve size listed in ebay
|
||||
#
|
||||
# us_sz = 'US ' + nvl_dict["US Shoe Size (Women's)"].replace('US', ' ').replace(' ', '')
|
||||
#
|
||||
# # Begin size disclaimer to include in posh description
|
||||
# sz_spec_list = ["EUR Shoe Size (Women's)", "UK Shoe Size (Women's)"]
|
||||
# size_disclaimer = '* Manufacture Sizing: ' + us_sz
|
||||
#
|
||||
# n = -1
|
||||
# for sz in sz_spec_list:
|
||||
# n+=1
|
||||
# if sz in nvl_dict:
|
||||
# size_disclaimer += ' = ' + sz_spec_list[n] + ' ' + nvl_dict[sz] + '\n'*2
|
||||
#
|
||||
# ebay_title = response['Item']['Title'] # Might use this later for something
|
||||
# print(ebay_title)
|
||||
#
|
||||
# price = response['Item']['ListingDetails']['ConvertedStartPrice']['value']
|
||||
# price = str(round(float(price)/.75)) # 33.33 % bump from original price
|
||||
#
|
||||
#
|
||||
# # Begin creating poshmark lising:
|
||||
#
|
||||
#
|
||||
# # Select Dropdown Category and Dismiss Subcategory
|
||||
#
|
||||
# wait.until(EC.element_to_be_clickable((By.XPATH, "//*[contains(text(), 'Select a Category')]")))
|
||||
# drop_cat = browser.find_element_by_xpath("//*[contains(text(), 'Select a Category')]").click()
|
||||
#
|
||||
# wait.until(EC.element_to_be_clickable((By.XPATH, "//*[contains(text(), 'Shoes')]")))
|
||||
# drop_cat = browser.find_element_by_xpath("//*[contains(text(), 'Shoes')]").click()
|
||||
#
|
||||
# wait.until(EC.element_to_be_clickable((By.XPATH, "//*[contains(text(), 'Select Subcategory (optional)')]")))
|
||||
# sub_cat = browser.find_element_by_xpath("//*[contains(text(), 'Select Subcategory (optional)')]").click()
|
||||
#
|
||||
# # Fill in Custom Posh Size
|
||||
#
|
||||
# wait.until(EC.element_to_be_clickable((By.XPATH, "//span[text()='Select Size']")))
|
||||
# drop_sz = browser.find_element_by_xpath("//span[text()='Select Size']").click()
|
||||
#
|
||||
# wait.until(EC.element_to_be_clickable((By.XPATH, "//*[contains(text(), 'Custom')]")))
|
||||
# drop_sz = browser.find_element_by_xpath("//*[contains(text(), 'Custom')]").click()
|
||||
#
|
||||
# custom_size = browser.find_element_by_xpath("//input[@data-vv-name='customSize']").send_keys(us_sz)
|
||||
#
|
||||
# wait.until(EC.element_to_be_clickable((By.XPATH, "//*[contains(text(), 'Save')]")))
|
||||
# save_size = browser.find_element_by_xpath("//*[contains(text(), 'Save')]").click()
|
||||
#
|
||||
# # Begin Posh Title
|
||||
#
|
||||
# wait.until(EC.element_to_be_clickable((By.XPATH, "//input[@data-vv-name='title']")))
|
||||
# title = browser.find_element_by_xpath("//input[@data-vv-name='title']")
|
||||
# title.send_keys(posh_title)
|
||||
#
|
||||
# # Begin Item Description
|
||||
#
|
||||
# wait.until(EC.element_to_be_clickable((By.XPATH, "//textarea[@data-vv-name='description']")))
|
||||
# desc = browser.find_element_by_xpath("//textarea[@data-vv-name='description']")
|
||||
# desc.send_keys(posh_desc + '\n' + size_disclaimer + '\n'*2 + sku)
|
||||
#
|
||||
# # Begin Brand Input
|
||||
#
|
||||
# brand_input = browser.find_element_by_xpath("//input[@placeholder='Enter the Brand/Designer']")
|
||||
# brand_input.send_keys(brand)
|
||||
#
|
||||
# # Fill Original Price and List Price
|
||||
#
|
||||
# msrp = browser.find_element_by_xpath("//input[@data-vv-name='originalPrice']")
|
||||
# msrp.send_keys('0')
|
||||
#
|
||||
# list_price = browser.find_element_by_xpath("//input[@data-vv-name='listingPrice']").send_keys(price)
|
||||
#
|
||||
# # Select Color
|
||||
#
|
||||
# if color in posh_colors:
|
||||
# wait.until(EC.element_to_be_clickable((By.XPATH, "//span[text()='Select up to 2 colors']")))
|
||||
# browser.find_element_by_xpath("//span[text()='Select up to 2 colors']").click()
|
||||
# wait.until(EC.element_to_be_clickable((By.XPATH, "//*[contains(text(), '" + color + "')]")))
|
||||
# browser.find_element_by_xpath("//*[contains(text(), '" + color + "')]").click()
|
||||
# wait.until(EC.element_to_be_clickable((By.XPATH, "//button[@data-et-on-name='color_tile']")))
|
||||
# browser.find_element_by_xpath("//button[@data-et-on-name='color_tile']").click()
|
||||
#
|
||||
# # Begin Listing Publishing
|
||||
#
|
||||
# wait.until(EC.element_to_be_clickable((By.XPATH, "//button[@data-et-name='next']")))
|
||||
# browser.find_element_by_xpath("//button[@data-et-name='next']").click()
|
||||
#
|
||||
# wait.until(EC.element_to_be_clickable((By.XPATH, "//button[@data-et-name='list_item']")))
|
||||
# browser.find_element_by_xpath("//button[@data-et-name='list_item']").click()
|
||||
# try:
|
||||
# wait.until(EC.presence_of_element_located((By.XPATH, "//title[text()='Feed - Poshmark']")))
|
||||
# except TimeoutException:
|
||||
# input("press 'enter' to continue")
|
||||
#
|
||||
# # Begin New Listing
|
||||
#
|
||||
# browser.get('https://poshmark.com/create-listing')
|
||||
#
|
||||
# # update posh data files:
|
||||
#
|
||||
# posh_ids.append(itemID)
|
||||
# with open('posh_ids.txt', 'w') as jf:
|
||||
# json.dump(posh_ids, jf)
|
||||
# posh_skus.append(sku)
|
||||
# with open('posh_skus.txt', 'w') as jf:
|
||||
# json.dump(posh_skus, jf)
|
||||
# posh_sku_dict = dict(zip(posh_skus, posh_ids))
|
||||
# with open('posh_sku_dict.txt', 'w') as jf:
|
||||
# json.dump(posh_sku_dict, jf)
|
||||
# else:
|
||||
# pass
|
||||
## End for loop
|
||||
import json
|
||||
import os
|
||||
import concurrent.futures
|
||||
import wget
|
||||
from ebaysdk.trading import Connection as Trading
|
||||
from selenium import webdriver
|
||||
from selenium.webdriver.common.keys import Keys
|
||||
from selenium.webdriver.support.select import Select
|
||||
from selenium.webdriver.common.by import By
|
||||
from selenium.webdriver.support.ui import WebDriverWait
|
||||
from selenium.webdriver.support import expected_conditions as EC
|
||||
from selenium.webdriver.common.action_chains import ActionChains
|
||||
from selenium.common.exceptions import TimeoutException
|
||||
import ebaysdk
|
||||
from ebaysdk.trading import Connection as Trading
|
||||
from ebaysdk.finding import Connection as Finding
|
||||
import concurrent.futures
|
||||
from selenium.webdriver.common.by import By
|
||||
import argparse
|
||||
import id_update
|
||||
|
||||
# update ebay_ids.txt file:
|
||||
|
||||
id_update.main()
|
||||
|
||||
# start selenium and log in to posh:
|
||||
|
||||
browser = webdriver.Chrome("C:/Users/unknown/Desktop/projects/chromedriver")
|
||||
|
||||
browser.get('https://poshmark.com/login')
|
||||
usename = browser.find_element_by_id('login_form_username_email')
|
||||
usename.send_keys('SpeanutButter')
|
||||
psw = browser.find_element_by_id('login_form_password')
|
||||
psw.send_keys('***REMOVED***')
|
||||
psw.submit()
|
||||
input('press "enter" to continue')
|
||||
browser.get('https://poshmark.com/create-listing')
|
||||
class FileHandler:
|
||||
@staticmethod
|
||||
def load_json(filename):
|
||||
with open(filename) as f:
|
||||
return json.load(f)
|
||||
|
||||
@staticmethod
|
||||
def save_json(data, filename):
|
||||
with open(filename, 'w') as f:
|
||||
json.dump(data, f)
|
||||
|
||||
|
||||
class EbayAPI:
|
||||
def __init__(self, config_file):
|
||||
self.tapi = Trading(config_file=config_file)
|
||||
|
||||
def get_item_details(self, item_id):
|
||||
response = self.tapi.execute('GetItem', {'ItemID': item_id, 'DetailLevel': 'ReturnAll', 'IncludeItemSpecifics': 'True'}).dict()
|
||||
return response['Item']
|
||||
|
||||
wait = WebDriverWait(browser, 120)
|
||||
def get_details_with_images(self, item_id, image_downloader):
|
||||
item_details = self.get_item_details(item_id)
|
||||
image_urls = item_details['PictureDetails']['PictureURL'][:8] # Getting the first 8 images
|
||||
images_paths = image_downloader.download_images(image_urls)
|
||||
|
||||
# Loop through each itemID to get relevant data to be used in posh listing:
|
||||
# Add image paths to details
|
||||
item_details['ImagePaths'] = images_paths
|
||||
return item_details
|
||||
|
||||
tapi = Trading(config_file = 'ebay1.yaml')
|
||||
|
||||
img_num = [] # for use in ThreadPoolExecutor
|
||||
for i in range(-1, 11):
|
||||
i += 1
|
||||
img_num.append(i)
|
||||
class ImageDownloader:
|
||||
def __init__(self, temp_dir='tempics/'):
|
||||
self.temp_dir = temp_dir
|
||||
os.makedirs(self.temp_dir, exist_ok=True)
|
||||
|
||||
# Cross-reference ebay and posh txt files to sync posh listings:
|
||||
def download_images(self, image_urls):
|
||||
return self._download(image_urls, self.temp_dir)
|
||||
|
||||
with open('ebay_ids.txt') as jf:
|
||||
ebay_ids = json.load(jf)
|
||||
for i in range(0,len(ebay_ids)):
|
||||
id = ebay_ids[i]
|
||||
with open('posh_ids.txt') as jf:
|
||||
posh_ids = json.load(jf)
|
||||
with open('posh_skus.txt') as jf:
|
||||
posh_skus = json.load(jf)
|
||||
if id not in posh_ids:
|
||||
itemID = id
|
||||
response = tapi.execute('GetItem',{'ItemID':itemID,'DetailLevel': 'ReturnAll', 'IncludeItemSpecifics':'True'}).dict()
|
||||
print(itemID)
|
||||
def _download(self, image_urls, folder_path):
|
||||
def download(url, index):
|
||||
filepath = os.path.join(folder_path, f'{index}.jpg')
|
||||
wget.download(url, filepath)
|
||||
return filepath
|
||||
|
||||
# download first 8 ebay pics and temporarily hold until listing is done:
|
||||
|
||||
def img_download(i):
|
||||
try:
|
||||
pic = response['Item']['PictureDetails']['PictureURL'][i]
|
||||
except IndexError:
|
||||
pass
|
||||
else:
|
||||
wget.download(pic, 'C:/Users/unknown/Desktop/projects/tempics/' + str(i) + '.jpg')
|
||||
img = 'C:/Users/unknown/Desktop/projects//tempics/' + str(i) + '.jpg'
|
||||
return img
|
||||
pics = []
|
||||
with concurrent.futures.ThreadPoolExecutor() as executor:
|
||||
for future in executor.map(img_download, img_num):
|
||||
pics.append(future)
|
||||
pics_path = '\n'.join(pics)
|
||||
image_paths = list(executor.map(download, image_urls, range(len(image_urls))))
|
||||
return image_paths
|
||||
|
||||
#begin image upload:
|
||||
|
||||
fileup = browser.find_element_by_id('img-file-input')
|
||||
fileup.send_keys(pics_path)
|
||||
|
||||
wait.until(EC.element_to_be_clickable((By.XPATH, "//button[@data-et-name='apply']")))
|
||||
apply = browser.find_element_by_xpath("//button[@data-et-name='apply']")
|
||||
apply.click()
|
||||
for image in pics:
|
||||
os.remove(image) # Delete images from local storage
|
||||
def cleanup(self):
|
||||
for file in os.listdir(self.temp_dir):
|
||||
os.remove(os.path.join(self.temp_dir, file))
|
||||
|
||||
|
||||
# consider creating a function that just takes the ebay id as input and returns the details below so that the function doesn't have to be redefined every iteration but merely called.
|
||||
cond_desc = response['Item']['ConditionDescription'] + ' Props not included'
|
||||
item_specs = response['Item']['ItemSpecifics']
|
||||
nvl = item_specs['NameValueList']
|
||||
nvl_dict = {d['Name']:d['Value'] for d in nvl}
|
||||
brand = nvl_dict['Brand']
|
||||
cat = response['Item']['PrimaryCategory']['CategoryName']
|
||||
try:
|
||||
color = nvl_dict['Color']
|
||||
except KeyError:
|
||||
color = 'no color'
|
||||
class PoshmarkAutomator:
|
||||
def __init__(self, driver_path):
|
||||
self.driver = webdriver.Chrome(driver_path)
|
||||
self.wait = WebDriverWait(self.driver, 120)
|
||||
|
||||
posh_colors = ['Red', 'Pink', 'Orange', 'Yellow', 'Green', 'Blue', 'Purple', 'Gold', 'Silver', 'Black', 'Gray', 'White', 'Cream', 'Brown', 'Tan'] # (needed for cross-referencing)
|
||||
def login(self, username, password):
|
||||
self.driver.get('https://poshmark.com/login')
|
||||
self.driver.find_element(By.ID, 'login_form_username_email').send_keys(username)
|
||||
self.driver.find_element(By.ID, 'login_form_password').send_keys(password).submit()
|
||||
input('press "enter" to continue')
|
||||
|
||||
try:
|
||||
sku = response['Item']['SKU']
|
||||
except KeyError:
|
||||
sku = 'missing'
|
||||
|
||||
posh_title_specs_list = ['Model', 'Style']
|
||||
posh_desc_specs_list = ['Boot Shaft Height', 'Calf Circumference', 'Material', 'Heel Height', 'Exact Heel Height', 'Country/Region of Manufacture'] # Consider adding 'Heel Type' and 'Toe Type' to the list
|
||||
|
||||
posh_title = brand + ' '
|
||||
for spec in posh_title_specs_list:
|
||||
if spec in nvl_dict:
|
||||
posh_title += nvl_dict[spec] + ' '
|
||||
|
||||
posh_desc = cond_desc + '\n'*2
|
||||
for spec in posh_desc_specs_list:
|
||||
if spec in nvl_dict:
|
||||
posh_desc += spec + ': ' + nvl_dict[spec] + '\n'*2
|
||||
|
||||
# retrieve size listed in ebay
|
||||
|
||||
us_sz = 'US ' + nvl_dict["US Shoe Size (Women's)"].replace('US', ' ').replace(' ', '')
|
||||
|
||||
# Begin size disclaimer to include in posh description
|
||||
sz_spec_list = ["EUR Shoe Size (Women's)", "UK Shoe Size (Women's)"]
|
||||
size_disclaimer = '* Manufacture Sizing: ' + us_sz
|
||||
|
||||
n = -1
|
||||
for sz in sz_spec_list:
|
||||
n+=1
|
||||
if sz in nvl_dict:
|
||||
size_disclaimer += ' = ' + sz_spec_list[n] + ' ' + nvl_dict[sz] + '\n'*2
|
||||
|
||||
ebay_title = response['Item']['Title'] # Might use this later for something
|
||||
print(ebay_title)
|
||||
|
||||
price = response['Item']['ListingDetails']['ConvertedStartPrice']['value']
|
||||
price = str(round(float(price)/.75)) # 33.33 % bump from original price
|
||||
def create_listing(self, item_details, image_paths):
|
||||
self.driver.get('https://poshmark.com/create-listing')
|
||||
# Add logic here to fill out and submit the listing form using item_details and image_paths
|
||||
|
||||
def close(self):
|
||||
self.driver.quit()
|
||||
|
||||
|
||||
# Begin creating poshmark lising:
|
||||
def fetch_ebay_data(image_dir):
|
||||
ebay_handler = FileHandler()
|
||||
ebay_api = EbayAPI(config_file='ebay1.yaml')
|
||||
image_downloader = ImageDownloader(temp_dir=image_dir)
|
||||
|
||||
ebay_ids = ebay_handler.load_json('ebay_ids.txt')
|
||||
all_details = []
|
||||
|
||||
# Select Dropdown Category and Dismiss Subcategory
|
||||
for ebay_id in ebay_ids:
|
||||
# Get item details and images
|
||||
item_details = ebay_api.get_details_with_images(ebay_id, image_downloader)
|
||||
|
||||
# Append details including image paths to final list
|
||||
all_details.append(item_details)
|
||||
|
||||
wait.until(EC.element_to_be_clickable((By.XPATH, "//*[contains(text(), 'Select a Category')]")))
|
||||
drop_cat = browser.find_element_by_xpath("//*[contains(text(), 'Select a Category')]").click()
|
||||
# Optionally save each item's details with their images
|
||||
item_detail_path = f'ebay_item_{ebay_id}.json'
|
||||
ebay_handler.save_json(item_details, item_detail_path)
|
||||
print(f'Detailed data for item {ebay_id} has been saved.')
|
||||
|
||||
wait.until(EC.element_to_be_clickable((By.XPATH, "//*[contains(text(), 'Shoes')]")))
|
||||
drop_cat = browser.find_element_by_xpath("//*[contains(text(), 'Shoes')]").click()
|
||||
def create_listing():
|
||||
# Update eBay IDs
|
||||
id_update.main()
|
||||
|
||||
wait.until(EC.element_to_be_clickable((By.XPATH, "//*[contains(text(), 'Select Subcategory (optional)')]")))
|
||||
sub_cat = browser.find_element_by_xpath("//*[contains(text(), 'Select Subcategory (optional)')]").click()
|
||||
ebay_handler = FileHandler()
|
||||
posh_handler = FileHandler()
|
||||
|
||||
ebay_api = EbayAPI(config_file='ebay1.yaml')
|
||||
image_downloader = ImageDownloader()
|
||||
posh_automator = PoshmarkAutomator(driver_path="C:/Users/unknown/Desktop/projects/chromedriver")
|
||||
|
||||
ebay_ids = ebay_handler.load_json('ebay_ids.txt')
|
||||
posh_ids = posh_handler.load_json('posh_ids.txt')
|
||||
|
||||
posh_automator.login('user', 'password')
|
||||
|
||||
for ebay_id in ebay_ids:
|
||||
if ebay_id not in posh_ids:
|
||||
# Get item details and images
|
||||
item_details = ebay_api.get_details_with_images(ebay_id, image_downloader)
|
||||
|
||||
# Create Poshmark listing
|
||||
posh_automator.create_listing(item_details, item_details.get('ImagePaths'))
|
||||
|
||||
# Clean up downloaded images
|
||||
image_downloader.cleanup()
|
||||
|
||||
# Update Posh IDs and SKUs
|
||||
posh_ids.append(ebay_id)
|
||||
posh_handler.save_json(posh_ids, 'posh_ids.txt')
|
||||
|
||||
posh_automator.close()
|
||||
|
||||
# Fill in Custom Posh Size
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--action', choices=['list', 'fetch'], default='list', help='Choose to `fetch` eBay data or `list` items on Poshmark.')
|
||||
parser.add_argument('--image-dir', default='downloaded_images', help='The directory where the images will be saved.')
|
||||
|
||||
wait.until(EC.element_to_be_clickable((By.XPATH, "//span[text()='Select Size']")))
|
||||
drop_sz = browser.find_element_by_xpath("//span[text()='Select Size']").click()
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.action == 'fetch':
|
||||
fetch_ebay_data(args.image_dir)
|
||||
elif args.action == 'list':
|
||||
create_listing()
|
||||
|
||||
wait.until(EC.element_to_be_clickable((By.XPATH, "//*[contains(text(), 'Custom')]")))
|
||||
drop_sz = browser.find_element_by_xpath("//*[contains(text(), 'Custom')]").click()
|
||||
|
||||
custom_size = browser.find_element_by_xpath("//input[@data-vv-name='customSize']").send_keys(us_sz)
|
||||
|
||||
wait.until(EC.element_to_be_clickable((By.XPATH, "//*[contains(text(), 'Save')]")))
|
||||
save_size = browser.find_element_by_xpath("//*[contains(text(), 'Save')]").click()
|
||||
|
||||
# Begin Posh Title
|
||||
|
||||
wait.until(EC.element_to_be_clickable((By.XPATH, "//input[@data-vv-name='title']")))
|
||||
title = browser.find_element_by_xpath("//input[@data-vv-name='title']")
|
||||
title.send_keys(posh_title)
|
||||
|
||||
# Begin Item Description
|
||||
|
||||
wait.until(EC.element_to_be_clickable((By.XPATH, "//textarea[@data-vv-name='description']")))
|
||||
desc = browser.find_element_by_xpath("//textarea[@data-vv-name='description']")
|
||||
desc.send_keys(posh_desc + '\n' + size_disclaimer + '\n'*2 + sku)
|
||||
|
||||
# Begin Brand Input
|
||||
|
||||
brand_input = browser.find_element_by_xpath("//input[@placeholder='Enter the Brand/Designer']")
|
||||
brand_input.send_keys(brand)
|
||||
|
||||
# Fill Original Price and List Price
|
||||
|
||||
msrp = browser.find_element_by_xpath("//input[@data-vv-name='originalPrice']")
|
||||
msrp.send_keys('0')
|
||||
|
||||
list_price = browser.find_element_by_xpath("//input[@data-vv-name='listingPrice']").send_keys(price)
|
||||
|
||||
# Select Color
|
||||
|
||||
if color in posh_colors:
|
||||
wait.until(EC.element_to_be_clickable((By.XPATH, "//span[text()='Select up to 2 colors']")))
|
||||
browser.find_element_by_xpath("//span[text()='Select up to 2 colors']").click()
|
||||
wait.until(EC.element_to_be_clickable((By.XPATH, "//*[contains(text(), '" + color + "')]")))
|
||||
browser.find_element_by_xpath("//*[contains(text(), '" + color + "')]").click()
|
||||
wait.until(EC.element_to_be_clickable((By.XPATH, "//button[@data-et-on-name='color_tile']")))
|
||||
browser.find_element_by_xpath("//button[@data-et-on-name='color_tile']").click()
|
||||
|
||||
# Begin Listing Publishing
|
||||
|
||||
wait.until(EC.element_to_be_clickable((By.XPATH, "//button[@data-et-name='next']")))
|
||||
browser.find_element_by_xpath("//button[@data-et-name='next']").click()
|
||||
|
||||
wait.until(EC.element_to_be_clickable((By.XPATH, "//button[@data-et-name='list_item']")))
|
||||
browser.find_element_by_xpath("//button[@data-et-name='list_item']").click()
|
||||
try:
|
||||
wait.until(EC.presence_of_element_located((By.XPATH, "//title[text()='Feed - Poshmark']")))
|
||||
except TimeoutException:
|
||||
input("press 'enter' to continue")
|
||||
|
||||
# Begin New Listing
|
||||
|
||||
browser.get('https://poshmark.com/create-listing')
|
||||
|
||||
# update posh data files:
|
||||
|
||||
posh_ids.append(itemID)
|
||||
with open('posh_ids.txt', 'w') as jf:
|
||||
json.dump(posh_ids, jf)
|
||||
posh_skus.append(sku)
|
||||
with open('posh_skus.txt', 'w') as jf:
|
||||
json.dump(posh_skus, jf)
|
||||
posh_sku_dict = dict(zip(posh_skus, posh_ids))
|
||||
with open('posh_sku_dict.txt', 'w') as jf:
|
||||
json.dump(posh_sku_dict, jf)
|
||||
else:
|
||||
pass
|
||||
# End for loop
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
Loading…
Reference in New Issue
Block a user