diff --git a/ebay_api.py b/ebay_api.py index 234c084..ddf9d40 100644 --- a/ebay_api.py +++ b/ebay_api.py @@ -72,9 +72,8 @@ class ShoppingApi: Gets raw JSON data from multiple live listings given multiple itemIds ''' - with open('ids.txt') as f: - ids = json.load(f) - item_id_results = [','.join(ids[n:n+20]) for n in list(range(0, len(ids), 20))] # 20-ItemID list created to maximize dataset/decrease calls given call constraints + with open('item_id_results.txt') as f: + item_id_results = json.load(f) headers = { "X-EBAY-API-IAF-TOKEN":cfg.sec['X-EBAY-API-IAF-TOKEN'], # TODO implement auto oauth token renewal @@ -114,9 +113,8 @@ class ShoppingApi: data = [] try: - with open('ids.txt') as f: - ids = json.load(f) - item_id_results = [','.join(ids[n:n+20]) for n in list(range(0, len(ids), 20))] # 20-ItemID list created to maximize dataset/decrease calls given call constraints + with open('item_id_results.txt') as f: + item_id_results = json.load(f) except (FileNotFoundError, ValueError): item_id_results = scrape_ids.main() diff --git a/scrape_ids.py b/scrape_ids.py index 6a9ba64..8f0f931 100644 --- a/scrape_ids.py +++ b/scrape_ids.py @@ -79,17 +79,24 @@ def get_ids(url): def threaded_get_ids(urls): - ids = [] + try: + with open('item_id_results.txt') as f: + ids = json.load(f) + except FileNotFoundError: + ids = [] with concurrent.futures.ThreadPoolExecutor() as executor: for future in executor.map(get_ids, urls): ids.extend(future) + item_id_results = [','.join(ids[n:n+20]) for n in list(range(0, len(ids), 20))] # 20-ItemID list created to maximize dataset/decrease calls given call constraints - with open('ids.txt', 'w') as f: - json.dump(ids, f) + + with open('item_id_results.txt', 'w') as f: + json.dump(item_id_results, f) return item_id_results + def main(): urls = threaded_urls() item_id_results = threaded_get_ids(urls)