added loop revision for all listings
This commit is contained in:
		@@ -40,7 +40,7 @@
 | 
			
		||||
   "metadata": {},
 | 
			
		||||
   "outputs": [],
 | 
			
		||||
   "source": [
 | 
			
		||||
    "#image_faults.faulty_images() # removes faulty images\n",
 | 
			
		||||
    "image_faults.faulty_images() # removes faulty images\n",
 | 
			
		||||
    "df = pd.read_csv('expanded_class.csv', index_col=[0], low_memory=False)\n"
 | 
			
		||||
   ]
 | 
			
		||||
  },
 | 
			
		||||
@@ -60,8 +60,7 @@
 | 
			
		||||
    "        \n",
 | 
			
		||||
    "    dict_pics = {}\n",
 | 
			
		||||
    "    for k in temp_pics_source_list:\n",
 | 
			
		||||
    "        try:\n",
 | 
			
		||||
    "            \n",
 | 
			
		||||
    "        try:      \n",
 | 
			
		||||
    "             patt_1 = re.search(r'[^/]+(?=/\\$_|.(\\.jpg|\\.jpeg|\\.png))', k, re.IGNORECASE)\n",
 | 
			
		||||
    "             patt_2 = re.search(r'(\\.jpg|\\.jpeg|\\.png)', k, re.IGNORECASE)\n",
 | 
			
		||||
    "             if patt_1 and patt_2 is not None:\n",
 | 
			
		||||
@@ -110,8 +109,8 @@
 | 
			
		||||
    "        drop_row_vals.append(pic)\n",
 | 
			
		||||
    "\n",
 | 
			
		||||
    "df['PrimaryCategoryID'] = df['PrimaryCategoryID'].astype(str) # pandas thinks ids are ints\n",
 | 
			
		||||
    "ddf = df[df.PictureURL.isin(drop_row_vals)==False] # remove improperly named image files\n",
 | 
			
		||||
    "df = ddf[ddf.PrimaryCategoryID.isin(men_cats)==False] # removes rows of womens categories\n",
 | 
			
		||||
    "df = df[df.PictureURL.isin(drop_row_vals)==False] # remove improperly named image files\n",
 | 
			
		||||
    "df = df[ddf.PrimaryCategoryID.isin(men_cats)==False] # removes rows of womens categories\n",
 | 
			
		||||
    "\n",
 | 
			
		||||
    "blah = pd.Series(df.PictureURL)\n",
 | 
			
		||||
    "df = df.drop(labels=['PictureURL'], axis=1)\n",
 | 
			
		||||
@@ -161,21 +160,21 @@
 | 
			
		||||
   "id": "4d72eb90",
 | 
			
		||||
   "metadata": {},
 | 
			
		||||
   "outputs": [
 | 
			
		||||
    {
 | 
			
		||||
     "name": "stdout",
 | 
			
		||||
     "output_type": "stream",
 | 
			
		||||
     "text": [
 | 
			
		||||
      "Found 17660 validated image filenames belonging to 7 classes.\n",
 | 
			
		||||
      "Found 4414 validated image filenames belonging to 7 classes.\n"
 | 
			
		||||
     ]
 | 
			
		||||
    },
 | 
			
		||||
    {
 | 
			
		||||
     "name": "stderr",
 | 
			
		||||
     "output_type": "stream",
 | 
			
		||||
     "text": [
 | 
			
		||||
      "/usr/local/lib/python3.8/dist-packages/keras_preprocessing/image/dataframe_iterator.py:279: UserWarning: Found 1 invalid image filename(s) in x_col=\"PictureURL\". These filename(s) will be ignored.\n",
 | 
			
		||||
      "/usr/local/lib/python3.8/dist-packages/keras_preprocessing/image/dataframe_iterator.py:279: UserWarning: Found 5 invalid image filename(s) in x_col=\"PictureURL\". These filename(s) will be ignored.\n",
 | 
			
		||||
      "  warnings.warn(\n"
 | 
			
		||||
     ]
 | 
			
		||||
    },
 | 
			
		||||
    {
 | 
			
		||||
     "name": "stdout",
 | 
			
		||||
     "output_type": "stream",
 | 
			
		||||
     "text": [
 | 
			
		||||
      "Found 49212 validated image filenames belonging to 7 classes.\n",
 | 
			
		||||
      "Found 12302 validated image filenames belonging to 7 classes.\n"
 | 
			
		||||
     ]
 | 
			
		||||
    }
 | 
			
		||||
   ],
 | 
			
		||||
   "source": [
 | 
			
		||||
@@ -194,7 +193,7 @@
 | 
			
		||||
    "    directory='./training_images',\n",
 | 
			
		||||
    "    x_col='PictureURL',\n",
 | 
			
		||||
    "    y_col='PrimaryCategoryID',\n",
 | 
			
		||||
    "    batch_size=64,\n",
 | 
			
		||||
    "    batch_size=32,\n",
 | 
			
		||||
    "    seed=42,\n",
 | 
			
		||||
    "    shuffle=True,\n",
 | 
			
		||||
    "    target_size=(299,299),\n",
 | 
			
		||||
@@ -205,7 +204,7 @@
 | 
			
		||||
    "    directory='./training_images',\n",
 | 
			
		||||
    "    x_col='PictureURL',\n",
 | 
			
		||||
    "    y_col='PrimaryCategoryID',\n",
 | 
			
		||||
    "    batch_size=64,\n",
 | 
			
		||||
    "    batch_size=32,\n",
 | 
			
		||||
    "    seed=42,\n",
 | 
			
		||||
    "    shuffle=True,\n",
 | 
			
		||||
    "    target_size=(299,299),\n",
 | 
			
		||||
@@ -754,28 +753,39 @@
 | 
			
		||||
     "output_type": "stream",
 | 
			
		||||
     "text": [
 | 
			
		||||
      "Epoch 1/6\n",
 | 
			
		||||
      "276/276 [==============================] - 254s 903ms/step - loss: 0.9800 - accuracy: 0.6524 - val_loss: 1.3301 - val_accuracy: 0.5258\n",
 | 
			
		||||
      "1538/1538 [==============================] - 665s 429ms/step - loss: 1.0036 - accuracy: 0.6370 - val_loss: 0.8648 - val_accuracy: 0.6866\n",
 | 
			
		||||
      "Epoch 2/6\n",
 | 
			
		||||
      "276/276 [==============================] - 246s 891ms/step - loss: 0.4296 - accuracy: 0.8554 - val_loss: 0.8291 - val_accuracy: 0.7175\n",
 | 
			
		||||
      "1538/1538 [==============================] - 685s 445ms/step - loss: 0.5250 - accuracy: 0.8150 - val_loss: 0.8666 - val_accuracy: 0.7095\n",
 | 
			
		||||
      "Epoch 3/6\n",
 | 
			
		||||
      "276/276 [==============================] - 245s 885ms/step - loss: 0.1091 - accuracy: 0.9716 - val_loss: 0.9532 - val_accuracy: 0.7288\n",
 | 
			
		||||
      "1538/1538 [==============================] - 690s 448ms/step - loss: 0.1942 - accuracy: 0.9359 - val_loss: 1.0513 - val_accuracy: 0.7043\n",
 | 
			
		||||
      "Epoch 4/6\n",
 | 
			
		||||
      "276/276 [==============================] - 248s 895ms/step - loss: 0.0216 - accuracy: 0.9971 - val_loss: 1.0324 - val_accuracy: 0.7331\n",
 | 
			
		||||
      "Epoch 5/6\n",
 | 
			
		||||
      "276/276 [==============================] - 249s 900ms/step - loss: 0.0072 - accuracy: 0.9993 - val_loss: 1.1318 - val_accuracy: 0.7295\n",
 | 
			
		||||
      "Epoch 6/6\n",
 | 
			
		||||
      "276/276 [==============================] - 249s 899ms/step - loss: 0.0032 - accuracy: 0.9997 - val_loss: 1.1304 - val_accuracy: 0.7388\n"
 | 
			
		||||
      " 899/1538 [================>.............] - ETA: 4:00 - loss: 0.0808 - accuracy: 0.9739"
 | 
			
		||||
     ]
 | 
			
		||||
    },
 | 
			
		||||
    {
 | 
			
		||||
     "data": {
 | 
			
		||||
      "text/plain": [
 | 
			
		||||
       "<keras.callbacks.History at 0x7fd5404d6580>"
 | 
			
		||||
      ]
 | 
			
		||||
     },
 | 
			
		||||
     "execution_count": 17,
 | 
			
		||||
     "metadata": {},
 | 
			
		||||
     "output_type": "execute_result"
 | 
			
		||||
     "ename": "KeyboardInterrupt",
 | 
			
		||||
     "evalue": "",
 | 
			
		||||
     "output_type": "error",
 | 
			
		||||
     "traceback": [
 | 
			
		||||
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
 | 
			
		||||
      "\u001b[0;31mKeyboardInterrupt\u001b[0m                         Traceback (most recent call last)",
 | 
			
		||||
      "\u001b[0;32m<ipython-input-17-d95c7325b16d>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m model.fit(x=train_generator,\n\u001b[0m\u001b[1;32m      2\u001b[0m           \u001b[0msteps_per_epoch\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtrain_generator\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      3\u001b[0m           \u001b[0mvalidation_data\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mvalidation_generator\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      4\u001b[0m           \u001b[0mvalidation_steps\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvalidation_generator\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      5\u001b[0m           \u001b[0mepochs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m6\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
 | 
			
		||||
      "\u001b[0;32m/usr/local/lib/python3.8/dist-packages/keras/utils/traceback_utils.py\u001b[0m in \u001b[0;36merror_handler\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m     62\u001b[0m     \u001b[0mfiltered_tb\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     63\u001b[0m     \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 64\u001b[0;31m       \u001b[0;32mreturn\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     65\u001b[0m     \u001b[0;32mexcept\u001b[0m \u001b[0mException\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m  \u001b[0;31m# pylint: disable=broad-except\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     66\u001b[0m       \u001b[0mfiltered_tb\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_process_traceback_frames\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0me\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__traceback__\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
 | 
			
		||||
      "\u001b[0;32m/usr/local/lib/python3.8/dist-packages/keras/engine/training.py\u001b[0m in \u001b[0;36mfit\u001b[0;34m(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)\u001b[0m\n\u001b[1;32m   1219\u001b[0m               \u001b[0mlogs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtmp_logs\u001b[0m  \u001b[0;31m# No error, now safe to assign to logs.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1220\u001b[0m               \u001b[0mend_step\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mstep\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0mdata_handler\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstep_increment\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1221\u001b[0;31m               \u001b[0mcallbacks\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mon_train_batch_end\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mend_step\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlogs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1222\u001b[0m               \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstop_training\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1223\u001b[0m                 \u001b[0;32mbreak\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
 | 
			
		||||
      "\u001b[0;32m/usr/local/lib/python3.8/dist-packages/keras/callbacks.py\u001b[0m in \u001b[0;36mon_train_batch_end\u001b[0;34m(self, batch, logs)\u001b[0m\n\u001b[1;32m    434\u001b[0m     \"\"\"\n\u001b[1;32m    435\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_should_call_train_batch_hooks\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 436\u001b[0;31m       \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_call_batch_hook\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mModeKeys\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTRAIN\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'end'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbatch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlogs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mlogs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    437\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    438\u001b[0m   \u001b[0;32mdef\u001b[0m \u001b[0mon_test_batch_begin\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbatch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlogs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
 | 
			
		||||
      "\u001b[0;32m/usr/local/lib/python3.8/dist-packages/keras/callbacks.py\u001b[0m in \u001b[0;36m_call_batch_hook\u001b[0;34m(self, mode, hook, batch, logs)\u001b[0m\n\u001b[1;32m    293\u001b[0m       \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_call_batch_begin_hook\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmode\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbatch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlogs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    294\u001b[0m     \u001b[0;32melif\u001b[0m \u001b[0mhook\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m'end'\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 295\u001b[0;31m       \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_call_batch_end_hook\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmode\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbatch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlogs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    296\u001b[0m     \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    297\u001b[0m       raise ValueError(\n",
 | 
			
		||||
      "\u001b[0;32m/usr/local/lib/python3.8/dist-packages/keras/callbacks.py\u001b[0m in \u001b[0;36m_call_batch_end_hook\u001b[0;34m(self, mode, batch, logs)\u001b[0m\n\u001b[1;32m    314\u001b[0m       \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_batch_times\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mappend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbatch_time\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    315\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 316\u001b[0;31m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_call_batch_hook_helper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mhook_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbatch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlogs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    317\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    318\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_batch_times\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m>=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_num_batches_for_timing_check\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
 | 
			
		||||
      "\u001b[0;32m/usr/local/lib/python3.8/dist-packages/keras/callbacks.py\u001b[0m in \u001b[0;36m_call_batch_hook_helper\u001b[0;34m(self, hook_name, batch, logs)\u001b[0m\n\u001b[1;32m    352\u001b[0m     \u001b[0;32mfor\u001b[0m \u001b[0mcallback\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcallbacks\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    353\u001b[0m       \u001b[0mhook\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgetattr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcallback\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mhook_name\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 354\u001b[0;31m       \u001b[0mhook\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbatch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlogs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    355\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    356\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_check_timing\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
 | 
			
		||||
      "\u001b[0;32m/usr/local/lib/python3.8/dist-packages/keras/callbacks.py\u001b[0m in \u001b[0;36mon_train_batch_end\u001b[0;34m(self, batch, logs)\u001b[0m\n\u001b[1;32m   1030\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1031\u001b[0m   \u001b[0;32mdef\u001b[0m \u001b[0mon_train_batch_end\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbatch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlogs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1032\u001b[0;31m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_batch_update_progbar\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbatch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlogs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1033\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1034\u001b[0m   \u001b[0;32mdef\u001b[0m \u001b[0mon_test_batch_end\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbatch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlogs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
 | 
			
		||||
      "\u001b[0;32m/usr/local/lib/python3.8/dist-packages/keras/callbacks.py\u001b[0m in \u001b[0;36m_batch_update_progbar\u001b[0;34m(self, batch, logs)\u001b[0m\n\u001b[1;32m   1102\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mverbose\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1103\u001b[0m       \u001b[0;31m# Only block async when verbose = 1.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1104\u001b[0;31m       \u001b[0mlogs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf_utils\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msync_to_numpy_or_python_type\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlogs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1105\u001b[0m       \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mprogbar\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mupdate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mseen\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlist\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlogs\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mitems\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfinalize\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mFalse\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1106\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
 | 
			
		||||
      "\u001b[0;32m/usr/local/lib/python3.8/dist-packages/keras/utils/tf_utils.py\u001b[0m in \u001b[0;36msync_to_numpy_or_python_type\u001b[0;34m(tensors)\u001b[0m\n\u001b[1;32m    552\u001b[0m     \u001b[0;32mreturn\u001b[0m \u001b[0mt\u001b[0m  \u001b[0;31m# Don't turn ragged or sparse tensors to NumPy.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    553\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 554\u001b[0;31m   \u001b[0;32mreturn\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnest\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmap_structure\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0m_to_single_numpy_or_python_type\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtensors\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    555\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    556\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
 | 
			
		||||
      "\u001b[0;32m/usr/local/lib/python3.8/dist-packages/tensorflow/python/util/nest.py\u001b[0m in \u001b[0;36mmap_structure\u001b[0;34m(func, *structure, **kwargs)\u001b[0m\n\u001b[1;32m    867\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    868\u001b[0m   return pack_sequence_as(\n\u001b[0;32m--> 869\u001b[0;31m       \u001b[0mstructure\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mfunc\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mx\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mentries\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    870\u001b[0m       expand_composites=expand_composites)\n\u001b[1;32m    871\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
 | 
			
		||||
      "\u001b[0;32m/usr/local/lib/python3.8/dist-packages/tensorflow/python/util/nest.py\u001b[0m in \u001b[0;36m<listcomp>\u001b[0;34m(.0)\u001b[0m\n\u001b[1;32m    867\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    868\u001b[0m   return pack_sequence_as(\n\u001b[0;32m--> 869\u001b[0;31m       \u001b[0mstructure\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mfunc\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mx\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mentries\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    870\u001b[0m       expand_composites=expand_composites)\n\u001b[1;32m    871\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
 | 
			
		||||
      "\u001b[0;32m/usr/local/lib/python3.8/dist-packages/keras/utils/tf_utils.py\u001b[0m in \u001b[0;36m_to_single_numpy_or_python_type\u001b[0;34m(t)\u001b[0m\n\u001b[1;32m    548\u001b[0m   \u001b[0;32mdef\u001b[0m \u001b[0m_to_single_numpy_or_python_type\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mt\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    549\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mt\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTensor\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 550\u001b[0;31m       \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mt\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnumpy\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    551\u001b[0m       \u001b[0;32mreturn\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mitem\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mndim\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;36m0\u001b[0m \u001b[0;32melse\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    552\u001b[0m     \u001b[0;32mreturn\u001b[0m \u001b[0mt\u001b[0m  \u001b[0;31m# Don't turn ragged or sparse tensors to NumPy.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
 | 
			
		||||
      "\u001b[0;32m/usr/local/lib/python3.8/dist-packages/tensorflow/python/framework/ops.py\u001b[0m in \u001b[0;36mnumpy\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m   1147\u001b[0m     \"\"\"\n\u001b[1;32m   1148\u001b[0m     \u001b[0;31m# TODO(slebedev): Consider avoiding a copy for non-CPU or remote tensors.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1149\u001b[0;31m     \u001b[0mmaybe_arr\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_numpy\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m  \u001b[0;31m# pylint: disable=protected-access\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1150\u001b[0m     \u001b[0;32mreturn\u001b[0m \u001b[0mmaybe_arr\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcopy\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmaybe_arr\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mndarray\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32melse\u001b[0m \u001b[0mmaybe_arr\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1151\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
 | 
			
		||||
      "\u001b[0;32m/usr/local/lib/python3.8/dist-packages/tensorflow/python/framework/ops.py\u001b[0m in \u001b[0;36m_numpy\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m   1113\u001b[0m   \u001b[0;32mdef\u001b[0m \u001b[0m_numpy\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1114\u001b[0m     \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1115\u001b[0;31m       \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_numpy_internal\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1116\u001b[0m     \u001b[0;32mexcept\u001b[0m \u001b[0mcore\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_NotOkStatusException\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m  \u001b[0;31m# pylint: disable=protected-access\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1117\u001b[0m       \u001b[0;32mraise\u001b[0m \u001b[0mcore\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_status_to_exception\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0me\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0;32mNone\u001b[0m  \u001b[0;31m# pylint: disable=protected-access\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
 | 
			
		||||
      "\u001b[0;31mKeyboardInterrupt\u001b[0m: "
 | 
			
		||||
     ]
 | 
			
		||||
    }
 | 
			
		||||
   ],
 | 
			
		||||
   "source": [
 | 
			
		||||
@@ -789,19 +799,10 @@
 | 
			
		||||
  },
 | 
			
		||||
  {
 | 
			
		||||
   "cell_type": "code",
 | 
			
		||||
   "execution_count": 18,
 | 
			
		||||
   "execution_count": null,
 | 
			
		||||
   "id": "63f791af",
 | 
			
		||||
   "metadata": {},
 | 
			
		||||
   "outputs": [
 | 
			
		||||
    {
 | 
			
		||||
     "name": "stderr",
 | 
			
		||||
     "output_type": "stream",
 | 
			
		||||
     "text": [
 | 
			
		||||
      "/usr/local/lib/python3.8/dist-packages/keras/engine/functional.py:1410: CustomMaskWarning: Custom mask layers require a config and must override get_config. When loading, the custom mask layer must be passed to the custom_objects argument.\n",
 | 
			
		||||
      "  layer_config = serialize_layer_fn(layer)\n"
 | 
			
		||||
     ]
 | 
			
		||||
    }
 | 
			
		||||
   ],
 | 
			
		||||
   "outputs": [],
 | 
			
		||||
   "source": [
 | 
			
		||||
    "model.save(\"Model_1.h5\")"
 | 
			
		||||
   ]
 | 
			
		||||
 
 | 
			
		||||
@@ -247,7 +247,7 @@ class ShoppingApi:
 | 
			
		||||
        except (requests.exceptions.RequestException, KeyError):
 | 
			
		||||
            print('connection error. IP limit possibly exceeded')
 | 
			
		||||
            print(response)
 | 
			
		||||
            return # returns NoneType. Handled at conky()
 | 
			
		||||
            return # this returns NoneType. Handled at conky()
 | 
			
		||||
 | 
			
		||||
        return item
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -7,10 +7,97 @@ from ebaysdk.finding import Connection as Finding
 | 
			
		||||
from ebaysdk.shopping import Connection as Shopping
 | 
			
		||||
import concurrent.futures
 | 
			
		||||
import config as cfg
 | 
			
		||||
import id_update
 | 
			
		||||
import store_ids
 | 
			
		||||
import ebay_api
 | 
			
		||||
 | 
			
		||||
# ids = id_update.main()
 | 
			
		||||
with open('ebay_ids.txt') as f:
 | 
			
		||||
    ids = json.load(f)
 | 
			
		||||
tapi = Trading(config_file='ebay.yaml')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def revised_price(id, original_prices):
 | 
			
		||||
    percent = (random.randint(95, 105))/100
 | 
			
		||||
    rev_price = original_prices[id]*percent
 | 
			
		||||
    rev_price = str(round(rev_price, 2))
 | 
			
		||||
    return rev_price
 | 
			
		||||
 | 
			
		||||
def revise_item(id, rev_price):
 | 
			
		||||
    response = tapi.execute(
 | 
			
		||||
        'ReviseItem', {
 | 
			
		||||
            'item': {
 | 
			
		||||
            'ItemID': id,
 | 
			
		||||
            'StartPrice':rev_price
 | 
			
		||||
            }
 | 
			
		||||
        
 | 
			
		||||
        }
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
def revise_items():
 | 
			
		||||
    with open('original_prices.txt') as f:
 | 
			
		||||
        original_prices = json.load(f)
 | 
			
		||||
 | 
			
		||||
    for id in original_prices:
 | 
			
		||||
        rev_price = revised_price(id, original_prices)
 | 
			
		||||
        revise_item(id, rev_price)
 | 
			
		||||
 | 
			
		||||
def get_prices(twenty_id):
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
   Gets raw JSON data from multiple live listings given multiple itemIds
 | 
			
		||||
    '''
 | 
			
		||||
 | 
			
		||||
    with open('temp_oauth_token.txt') as f:
 | 
			
		||||
        access_token = json.load(f)
 | 
			
		||||
 | 
			
		||||
    headers = {
 | 
			
		||||
        "X-EBAY-API-IAF-TOKEN":access_token,
 | 
			
		||||
        "version":"671",
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
    url = "https://open.api.ebay.com/shopping?&callname=GetMultipleItems&responseencoding=JSON&ItemID="+twenty_id
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
    
 | 
			
		||||
        response = requests.get(url, headers=headers,timeout=24)
 | 
			
		||||
        response.raise_for_status()
 | 
			
		||||
        response = response.json()
 | 
			
		||||
        item = response['Item']
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
    except (requests.exceptions.RequestException, KeyError):
 | 
			
		||||
        print('connection error. IP limit possibly exceeded')
 | 
			
		||||
        print(response)
 | 
			
		||||
        return # this returns NoneType. Handled at get_prices_thread
 | 
			
		||||
 | 
			
		||||
    id_price_dict = {item['ItemID']:item['ConvertedCurrentPrice']['Value'] for item in item}
 | 
			
		||||
    return id_price_dict
 | 
			
		||||
 | 
			
		||||
def get_prices_thread(twenty_ids_list):
 | 
			
		||||
    '''
 | 
			
		||||
    Runs get_prices in multiple threads
 | 
			
		||||
    '''
 | 
			
		||||
 | 
			
		||||
    id_price_dict = {}
 | 
			
		||||
 | 
			
		||||
    with concurrent.futures.ThreadPoolExecutor() as executor:
 | 
			
		||||
        for future in executor.map(get_prices, twenty_ids_list):
 | 
			
		||||
            if future is not None:
 | 
			
		||||
                id_price_dict.update(future)
 | 
			
		||||
            else:
 | 
			
		||||
                print('response is None')
 | 
			
		||||
                break
 | 
			
		||||
    return id_price_dict
 | 
			
		||||
 | 
			
		||||
def main():
 | 
			
		||||
 | 
			
		||||
    with open('ebay_ids.txt') as f:
 | 
			
		||||
        ids = json.load(f)
 | 
			
		||||
 | 
			
		||||
    twenty_id_list = [','.join(ids[n:n+20]) for n in list(range(0,
 | 
			
		||||
        len(ids), 20))]
 | 
			
		||||
 | 
			
		||||
    ids = store_ids.main() # gets your store ids for all listings
 | 
			
		||||
    ebay_api.getAuthToken() # updates your Oauth Token
 | 
			
		||||
    id_price_dict = get_prices_thread(twenty_id_list)
 | 
			
		||||
    return id_price_dict
 | 
			
		||||
 | 
			
		||||
if __name__=="__main__":
 | 
			
		||||
    main()
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user