From dcbdb7ced3bf3742695fe84c4c7e807294a10fde Mon Sep 17 00:00:00 2001 From: spbeach46 Date: Tue, 29 Dec 2020 00:20:55 -0700 Subject: [PATCH] need to fix data.update in conky to be list of dicts not dict of dicts --- ebay_api.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/ebay_api.py b/ebay_api.py index 0b46b43..882ac29 100644 --- a/ebay_api.py +++ b/ebay_api.py @@ -59,7 +59,7 @@ class FindingApi: for future in executor.map(lambda p: self.get_data(*p), args): data = future - try: + try: # TODO if conditions are not working due to each thread checking the same unedited item_id_results list training = pd.read_csv('training.csv') for item in data['findItemsByCategoryResponse'][0]['searchResult'][0]['item']: if (item not in training.values) and (item not in itemid_results_list): @@ -106,13 +106,15 @@ class ShoppingApi: For some reason item_id_results can only be passed as argument in executor.map if the variable is made within function ''' - data = {} + data = {} # TODO I think you need to append a list of dictionaries rather than update a dictionary of dictionaries. Training var will require an updated dictionary though finding = FindingApi(4, 2) item_id_results = finding.get_ids_from_cats() with concurrent.futures.ThreadPoolExecutor() as executor: for future in executor.map(self.get_item_from_findItemsByCategory, item_id_results): - data.update(future) - return data + print(future) + # data.update(future) + return data # TODO each future is a list of dictionaries because the output of any multithreader in this method is a list. + # data dictionary can't update from list of dicts unless iterated over. Might need a different way to update. class CurateData: ''' @@ -185,12 +187,12 @@ def main(): Main program creates/updates a csv file to use for ML training from live ebay listings ''' - service, pageNumber = input('service and pageNumber:').split() - service = int(service) - pageNumber = int(pageNumber) - # finding = FindingApi(service, pageNumber) +# service, pageNumber = input('service and pageNumber:').split() +# service = int(service) +# pageNumber = int(pageNumber) +# finding = FindingApi(service, pageNumber) - # item_id_results = finding.get_ids_from_cats() +# item_id_results = finding.get_ids_from_cats() shopping = ShoppingApi() data = shopping.conky() curate = CurateData()