projet-reseaux-profond/notebook.ipynb

596 lines
57 KiB
Plaintext
Raw Normal View History

{
"cells": [
{
"cell_type": "code",
2022-04-05 20:38:05 +00:00
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"import numpy as np\n",
"import PIL.Image\n",
"import glob\n",
"import os\n",
"\n",
"import matplotlib.pyplot as plt\n",
"%matplotlib inline"
]
},
{
"cell_type": "code",
2022-04-05 21:31:33 +00:00
"execution_count": null,
"metadata": {},
"outputs": [
{
2022-04-05 21:31:33 +00:00
"ename": "",
"evalue": "",
"output_type": "error",
"traceback": [
"\u001b[1;31mnotebook controller is DISPOSED. \n",
"View Jupyter <a href='command:jupyter.viewOutput'>log</a> for further details."
]
}
],
"source": [
"IMAGE_SIZE = (400, 150, 3)\n",
"RESIZED_SIZE = (100, 50, 3)\n",
"RESIZED_SIZE_PIL = (RESIZED_SIZE[1], RESIZED_SIZE[0], RESIZED_SIZE[2])\n",
2022-04-05 20:38:05 +00:00
"DATASET_PATH = \"./dataset_rot/\"\n",
"DATASET_PATH = os.path.abspath(DATASET_PATH)\n",
2022-04-05 20:38:05 +00:00
"CLASSES = next(os.walk(DATASET_PATH))[1]\n",
"\n",
"print(DATASET_PATH)\n",
"print(CLASSES)"
]
},
{
"cell_type": "code",
2022-04-05 21:31:33 +00:00
"execution_count": null,
"metadata": {},
2022-04-05 21:31:33 +00:00
"outputs": [
{
"ename": "",
"evalue": "",
"output_type": "error",
"traceback": [
"\u001b[1;31mnotebook controller is DISPOSED. \n",
"View Jupyter <a href='command:jupyter.viewOutput'>log</a> for further details."
]
}
],
"source": [
"def load_data():\n",
" # Récupération des fichiers\n",
" files = glob.glob(f\"{DATASET_PATH}/**/*.jpg\", recursive = True)\n",
"\n",
" # Initialise les structures de données\n",
" x = np.zeros((len(files), *RESIZED_SIZE_PIL))\n",
" y = np.zeros((len(files), 1))\n",
"\n",
" # print(f\"x.shape = {x.shape}\")\n",
"\n",
" for i, path in enumerate(files):\n",
" # Lecture de l'image\n",
" img = PIL.Image.open(path)\n",
"\n",
" # print(f\"img.size = {img.size}\")\n",
"\n",
" # Redimensionnement de l'image\n",
" img = img.resize(RESIZED_SIZE[:-1], PIL.Image.ANTIALIAS)\n",
"\n",
" # print(f\"img.size = {img.size}\")\n",
"\n",
" test = np.asarray(img)\n",
"\n",
" # print(f\"test.shape = {test.shape}\")\n",
"\n",
" # Remplissage de la variable x\n",
" x[i] = test\n",
"\n",
" # On récupère l'index dans le path\n",
" class_label = path.split(\"/\")[-2]\n",
"\n",
" # On récupère le numéro de la classe à partir du string\n",
" class_label = CLASSES.index(class_label)\n",
" \n",
" # Remplissage de la variable y\n",
" y[i] = class_label\n",
"\n",
" return x, y"
]
},
{
"cell_type": "code",
2022-04-05 21:31:33 +00:00
"execution_count": null,
"metadata": {},
2022-04-05 21:31:33 +00:00
"outputs": [
{
"ename": "",
"evalue": "",
"output_type": "error",
"traceback": [
"\u001b[1;31mnotebook controller is DISPOSED. \n",
"View Jupyter <a href='command:jupyter.viewOutput'>log</a> for further details."
]
}
],
"source": [
"x, y = load_data()\n",
"x = x / 255"
]
},
{
"cell_type": "code",
2022-04-05 21:31:33 +00:00
"execution_count": null,
"metadata": {},
"outputs": [
{
2022-04-05 21:31:33 +00:00
"ename": "",
"evalue": "",
"output_type": "error",
"traceback": [
"\u001b[1;31mnotebook controller is DISPOSED. \n",
"View Jupyter <a href='command:jupyter.viewOutput'>log</a> for further details."
]
}
],
"source": [
"# Randomisation des indices et affichage de 9 images alétoires de la base d'apprentissage\n",
"indices = np.arange(x.shape[0])\n",
"np.random.shuffle(indices)\n",
"\n",
"plt.figure(figsize=(12, 6))\n",
"\n",
"for i in range(0, 3*3):\n",
" plt.subplot(3, 3, i+1)\n",
" plt.title(CLASSES[int(y[indices[i]])])\n",
" plt.imshow(x[indices[i]])\n",
"\n",
"plt.tight_layout()\n",
"plt.show()"
]
},
{
"cell_type": "code",
2022-04-05 21:31:33 +00:00
"execution_count": null,
"metadata": {},
2022-04-05 21:31:33 +00:00
"outputs": [
{
"ename": "",
"evalue": "",
"output_type": "error",
"traceback": [
"\u001b[1;31mnotebook controller is DISPOSED. \n",
"View Jupyter <a href='command:jupyter.viewOutput'>log</a> for further details."
]
}
],
"source": [
"import tensorflow\n",
"from tensorflow.keras.models import Sequential\n",
"from tensorflow.keras.layers import InputLayer, Dense, Flatten, Conv2D, MaxPooling2D\n",
"from tensorflow.keras import optimizers"
]
},
{
"cell_type": "code",
2022-04-05 21:31:33 +00:00
"execution_count": null,
"metadata": {},
"outputs": [
2022-04-05 20:38:05 +00:00
{
2022-04-05 21:31:33 +00:00
"ename": "",
"evalue": "",
"output_type": "error",
"traceback": [
"\u001b[1;31mnotebook controller is DISPOSED. \n",
"View Jupyter <a href='command:jupyter.viewOutput'>log</a> for further details."
]
2022-04-05 20:38:05 +00:00
}
],
"source": [
"model = Sequential()\n",
"\n",
"model.add(InputLayer(input_shape=RESIZED_SIZE_PIL))\n",
"\n",
"model.add(Conv2D(32, 3, activation=\"relu\"))\n",
"model.add(MaxPooling2D(pool_size=(2, 2)))\n",
"\n",
"model.add(Conv2D(64, 3, activation=\"relu\"))\n",
"model.add(MaxPooling2D(pool_size=(2, 2)))\n",
"\n",
"model.add(Conv2D(92, 3, activation=\"relu\"))\n",
"model.add(MaxPooling2D(pool_size=(2, 2)))\n",
"\n",
"model.add(Flatten())\n",
"\n",
"model.add(Dense(250, activation=\"relu\"))\n",
"\n",
"model.add(Dense(4, activation=\"softmax\"))\n",
"\n",
"model.summary()\n",
"\n",
"adam = optimizers.Adam(learning_rate=7e-6)\n",
"model.compile(optimizer=adam, loss='sparse_categorical_crossentropy', metrics=['accuracy'])\n",
"history = model.fit(x, y, validation_split=0.15, epochs=10, batch_size=25)"
]
},
{
"cell_type": "code",
2022-04-05 21:31:33 +00:00
"execution_count": null,
2022-04-05 20:38:05 +00:00
"metadata": {},
"outputs": [
{
2022-04-05 21:31:33 +00:00
"ename": "",
"evalue": "",
"output_type": "error",
"traceback": [
"\u001b[1;31mnotebook controller is DISPOSED. \n",
"View Jupyter <a href='command:jupyter.viewOutput'>log</a> for further details."
]
2022-04-05 20:38:05 +00:00
}
],
"source": [
"def plot_training_analysis():\n",
" acc = history.history['accuracy']\n",
" val_acc = history.history['val_accuracy']\n",
" loss = history.history['loss']\n",
" val_loss = history.history['val_loss']\n",
"\n",
" epochs = range(len(acc))\n",
"\n",
" plt.plot(epochs, acc, 'b', linestyle=\"--\",label='Training acc')\n",
" plt.plot(epochs, val_acc, 'g', label='Validation acc')\n",
" plt.title('Training and validation accuracy')\n",
" plt.legend()\n",
"\n",
" plt.figure()\n",
"\n",
" plt.plot(epochs, loss, 'b', linestyle=\"--\",label='Training loss')\n",
" plt.plot(epochs, val_loss,'g', label='Validation loss')\n",
" plt.title('Training and validation loss')\n",
" plt.legend()\n",
"\n",
" plt.show()\n",
"\n",
"plot_training_analysis()"
]
},
{
"cell_type": "code",
2022-04-05 21:31:33 +00:00
"execution_count": 3,
2022-04-05 20:38:05 +00:00
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
2022-04-05 20:38:05 +00:00
"/tmp/deepl/data\n",
"[]\n"
]
2022-04-05 20:38:05 +00:00
}
],
"source": [
"IMAGE_SIZE = (400, 150, 3)\n",
"RESIZED_SIZE = (100, 50, 3)\n",
"RESIZED_SIZE_PIL = (RESIZED_SIZE[1], RESIZED_SIZE[0], RESIZED_SIZE[2])\n",
"DATASET_PATH = \"./data/\"\n",
"DATASET_PATH = os.path.abspath(DATASET_PATH)\n",
"CLASSES = next(os.walk(DATASET_PATH))[1]\n",
"\n",
"print(DATASET_PATH)\n",
"print(CLASSES)"
]
},
{
"cell_type": "code",
2022-04-05 21:31:33 +00:00
"execution_count": 6,
"metadata": {},
"outputs": [],
"source": [
"import tensorflow\n",
"from tensorflow.keras.models import Sequential\n",
"from tensorflow.keras.layers import InputLayer, Dense, Flatten, Conv2D, MaxPooling2D\n",
"from tensorflow.keras import optimizers"
]
},
{
"cell_type": "code",
"execution_count": 27,
2022-04-05 20:38:05 +00:00
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
2022-04-05 21:31:33 +00:00
"dataset_length = 10000\n",
"batch size = 32\n",
"number of batchs = 312\n",
2022-04-05 20:38:05 +00:00
"\n",
2022-04-05 21:31:33 +00:00
"train_size = 250\n",
"validation_size = 9750\n"
]
}
],
"source": [
2022-04-05 20:38:05 +00:00
"import tensorflow as tf\n",
"import tensorflow_addons as tfa\n",
"import sqlite3\n",
"\n",
2022-04-05 20:38:05 +00:00
"AUTOTUNE = tf.data.experimental.AUTOTUNE\n",
2022-04-05 21:31:33 +00:00
"BATCH_SIZE = 32\n",
"SHUFFLE_SIZE = 32\n",
"LIMIT = 10000\n",
"\n",
2022-04-05 20:38:05 +00:00
"def customGenerator():\n",
2022-04-05 21:31:33 +00:00
" data = sqlite3.connect(f\"{DATASET_PATH}/index.db\").execute(f\"SELECT uuid, model from data LIMIT {LIMIT}\").fetchall()\n",
"\n",
2022-04-05 20:38:05 +00:00
" for uuid, model in data:\n",
" img = tf.io.read_file(f\"{DATASET_PATH}/{uuid}.jpg\")\n",
" img = tf.image.decode_jpeg(img, channels=IMAGE_SIZE[2])\n",
" img = tf.image.convert_image_dtype(img, tf.float32)\n",
" img = tf.image.resize(img, RESIZED_SIZE[:-1])\n",
" \n",
" label = tf.convert_to_tensor(model, dtype=tf.uint8)\n",
" \n",
" yield img, label\n",
"\n",
2022-04-05 20:38:05 +00:00
"def cutout(image, label):\n",
" img = tfa.image.random_cutout(image, (6, 6), constant_values=1)\n",
" return (img, label)\n",
"\n",
2022-04-05 20:38:05 +00:00
"def rotate(image,label) :\n",
" img = tfa.image.rotate(image, tf.constant(np.pi)) \n",
" return (img, label)\n",
"\n",
2022-04-05 21:31:33 +00:00
"def set_shapes(image, label):\n",
" image.set_shape(RESIZED_SIZE)\n",
" label.set_shape([])\n",
" return image, label\n",
"\n",
2022-04-05 20:38:05 +00:00
"dataset = tf.data.Dataset.from_generator(\n",
" generator=customGenerator, \n",
" output_types=(tf.float32, tf.uint8)\n",
")\n",
"\n",
"(dataset_length,) = sqlite3.connect(f\"{DATASET_PATH}/index.db\").execute(\"SELECT count(uuid) from data\").fetchone()\n",
2022-04-05 21:31:33 +00:00
"dataset_length = min(dataset_length, LIMIT)\n",
"\n",
2022-04-05 20:38:05 +00:00
"print(f\"dataset_length = {dataset_length}\")\n",
"print(f\"batch size = {BATCH_SIZE}\")\n",
"print(f\"number of batchs = {dataset_length // BATCH_SIZE}\")\n",
"\n",
"print()\n",
"\n",
2022-04-05 21:31:33 +00:00
"train_size = int(0.8 * dataset_length / BATCH_SIZE)\n",
2022-04-05 20:38:05 +00:00
"print(f\"train_size = {train_size}\")\n",
"print(f\"validation_size = {dataset_length - train_size}\")\n",
"\n",
"dataset = (\n",
2022-04-05 21:31:33 +00:00
" dataset.shuffle(SHUFFLE_SIZE)\n",
" .map(set_shapes)\n",
" .batch(BATCH_SIZE)\n",
2022-04-05 20:38:05 +00:00
" # .map(cutout)\n",
2022-04-05 21:31:33 +00:00
" .prefetch(AUTOTUNE)\n",
2022-04-05 20:38:05 +00:00
")\n",
"\n",
"dataset_train = dataset.take(train_size)\n",
"dataset_validate = dataset.skip(train_size)\n",
"\n",
2022-04-05 21:31:33 +00:00
"# print()\n",
2022-04-05 20:38:05 +00:00
"# print(RESIZED_SIZE)\n",
"# for boop in dataset_train.take(2):\n",
2022-04-05 21:31:33 +00:00
"# print(boop)\n",
"\n",
"# for image_batch, label_batch in dataset.take(1):\n",
"# print(label_batch.shape, image_batch.shape)\n",
"# pass\n",
"# for image_batch, label_batch in dataset_train.take(1):\n",
"# print(label_batch.shape, image_batch.shape)\n",
"# pass\n",
"# for image_batch, label_batch in dataset_validate.take(1):\n",
"# print(label_batch.shape, image_batch.shape)\n",
"# pass"
2022-04-05 20:38:05 +00:00
]
},
{
"cell_type": "code",
2022-04-05 21:31:33 +00:00
"execution_count": 30,
2022-04-05 20:38:05 +00:00
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
2022-04-05 21:31:33 +00:00
"Model: \"sequential_7\"\n",
2022-04-05 20:38:05 +00:00
"_________________________________________________________________\n",
" Layer (type) Output Shape Param # \n",
"=================================================================\n",
2022-04-05 21:31:33 +00:00
" conv2d_21 (Conv2D) (None, 98, 48, 32) 896 \n",
2022-04-05 20:38:05 +00:00
" \n",
2022-04-05 21:31:33 +00:00
" max_pooling2d_21 (MaxPoolin (None, 49, 24, 32) 0 \n",
" g2D) \n",
2022-04-05 20:38:05 +00:00
" \n",
2022-04-05 21:31:33 +00:00
" conv2d_22 (Conv2D) (None, 47, 22, 64) 18496 \n",
2022-04-05 20:38:05 +00:00
" \n",
2022-04-05 21:31:33 +00:00
" max_pooling2d_22 (MaxPoolin (None, 23, 11, 64) 0 \n",
" g2D) \n",
2022-04-05 20:38:05 +00:00
" \n",
2022-04-05 21:31:33 +00:00
" conv2d_23 (Conv2D) (None, 21, 9, 92) 53084 \n",
2022-04-05 20:38:05 +00:00
" \n",
2022-04-05 21:31:33 +00:00
" max_pooling2d_23 (MaxPoolin (None, 10, 4, 92) 0 \n",
" g2D) \n",
2022-04-05 20:38:05 +00:00
" \n",
2022-04-05 21:31:33 +00:00
" flatten_7 (Flatten) (None, 3680) 0 \n",
2022-04-05 20:38:05 +00:00
" \n",
2022-04-05 21:31:33 +00:00
" dense_14 (Dense) (None, 250) 920250 \n",
2022-04-05 20:38:05 +00:00
" \n",
2022-04-05 21:31:33 +00:00
" dense_15 (Dense) (None, 4) 1004 \n",
2022-04-05 20:38:05 +00:00
" \n",
"=================================================================\n",
"Total params: 993,730\n",
"Trainable params: 993,730\n",
"Non-trainable params: 0\n",
"_________________________________________________________________\n",
2022-04-05 21:31:33 +00:00
"Epoch 1/25\n",
"250/250 [==============================] - 53s 211ms/step - loss: 0.9718 - accuracy: 0.6635 - val_loss: 2.2070 - val_accuracy: 0.1400\n",
"Epoch 2/25\n",
"250/250 [==============================] - 52s 209ms/step - loss: 0.7459 - accuracy: 0.6416 - val_loss: 2.2988 - val_accuracy: 0.1400\n",
"Epoch 3/25\n",
"250/250 [==============================] - 53s 210ms/step - loss: 0.6746 - accuracy: 0.6664 - val_loss: 2.4333 - val_accuracy: 0.1400\n",
"Epoch 4/25\n",
"250/250 [==============================] - 53s 211ms/step - loss: 0.6437 - accuracy: 0.6920 - val_loss: 2.3739 - val_accuracy: 0.1400\n",
"Epoch 5/25\n",
"250/250 [==============================] - 53s 211ms/step - loss: 0.5982 - accuracy: 0.7157 - val_loss: 2.1336 - val_accuracy: 0.1400\n",
"Epoch 6/25\n",
"250/250 [==============================] - 53s 210ms/step - loss: 0.5497 - accuracy: 0.7380 - val_loss: 2.0098 - val_accuracy: 0.1400\n",
"Epoch 7/25\n",
"250/250 [==============================] - 53s 210ms/step - loss: 0.5108 - accuracy: 0.7561 - val_loss: 1.9775 - val_accuracy: 0.1400\n",
"Epoch 8/25\n",
"250/250 [==============================] - 53s 211ms/step - loss: 0.4761 - accuracy: 0.7725 - val_loss: 1.9294 - val_accuracy: 0.1400\n",
"Epoch 9/25\n",
"250/250 [==============================] - 53s 210ms/step - loss: 0.4304 - accuracy: 0.7940 - val_loss: 2.0166 - val_accuracy: 0.1400\n",
"Epoch 10/25\n",
"250/250 [==============================] - 53s 214ms/step - loss: 0.4140 - accuracy: 0.8075 - val_loss: 1.7652 - val_accuracy: 0.1400\n",
"Epoch 11/25\n",
"250/250 [==============================] - 53s 211ms/step - loss: 0.3672 - accuracy: 0.8279 - val_loss: 1.6594 - val_accuracy: 0.1400\n",
"Epoch 12/25\n",
"250/250 [==============================] - 53s 211ms/step - loss: 0.3285 - accuracy: 0.8512 - val_loss: 1.5178 - val_accuracy: 0.1400\n",
"Epoch 13/25\n",
"250/250 [==============================] - 52s 210ms/step - loss: 0.2867 - accuracy: 0.8702 - val_loss: 1.4753 - val_accuracy: 0.1400\n",
"Epoch 14/25\n",
"250/250 [==============================] - 52s 209ms/step - loss: 0.2500 - accuracy: 0.8905 - val_loss: 1.3835 - val_accuracy: 0.1400\n",
"Epoch 15/25\n",
"250/250 [==============================] - 53s 210ms/step - loss: 0.2186 - accuracy: 0.9100 - val_loss: 1.1579 - val_accuracy: 0.1405\n",
"Epoch 16/25\n",
"250/250 [==============================] - 53s 210ms/step - loss: 0.1810 - accuracy: 0.9309 - val_loss: 1.0902 - val_accuracy: 0.1510\n",
"Epoch 17/25\n",
"250/250 [==============================] - 52s 210ms/step - loss: 0.1555 - accuracy: 0.9451 - val_loss: 0.9250 - val_accuracy: 0.2240\n",
"Epoch 18/25\n",
"250/250 [==============================] - 53s 210ms/step - loss: 0.1280 - accuracy: 0.9626 - val_loss: 0.7926 - val_accuracy: 0.3720\n",
"Epoch 19/25\n",
"250/250 [==============================] - 53s 212ms/step - loss: 0.1065 - accuracy: 0.9728 - val_loss: 0.6717 - val_accuracy: 0.5145\n",
"Epoch 20/25\n",
"250/250 [==============================] - 53s 211ms/step - loss: 0.0878 - accuracy: 0.9816 - val_loss: 0.5564 - val_accuracy: 0.6530\n",
"Epoch 21/25\n",
"250/250 [==============================] - 53s 212ms/step - loss: 0.0714 - accuracy: 0.9901 - val_loss: 0.4285 - val_accuracy: 0.8095\n",
"Epoch 22/25\n",
"250/250 [==============================] - 53s 212ms/step - loss: 0.0567 - accuracy: 0.9942 - val_loss: 0.3829 - val_accuracy: 0.8545\n",
"Epoch 23/25\n",
"250/250 [==============================] - 53s 213ms/step - loss: 0.0473 - accuracy: 0.9975 - val_loss: 0.2913 - val_accuracy: 0.9285\n",
"Epoch 24/25\n",
"250/250 [==============================] - 53s 210ms/step - loss: 0.0383 - accuracy: 0.9990 - val_loss: 0.2352 - val_accuracy: 0.9615\n",
"Epoch 25/25\n",
"250/250 [==============================] - 53s 211ms/step - loss: 0.0311 - accuracy: 0.9996 - val_loss: 0.1907 - val_accuracy: 0.9815\n"
2022-04-05 20:38:05 +00:00
]
}
],
"source": [
"model = Sequential([\n",
" InputLayer(input_shape=RESIZED_SIZE),\n",
" \n",
" Conv2D(32, 3, activation=\"relu\"),\n",
" MaxPooling2D(pool_size=(2, 2)),\n",
" \n",
" Conv2D(64, 3, activation=\"relu\"),\n",
" MaxPooling2D(pool_size=(2, 2)),\n",
"\n",
" Conv2D(92, 3, activation=\"relu\"),\n",
" MaxPooling2D(pool_size=(2, 2)),\n",
"\n",
" Flatten(),\n",
"\n",
" Dense(250, activation=\"relu\"),\n",
" Dense(4, activation=\"softmax\")\n",
"])\n",
"\n",
"model.summary()\n",
"\n",
2022-04-05 20:38:05 +00:00
"adam = optimizers.Adam(learning_rate=7e-6)\n",
2022-04-05 21:31:33 +00:00
"model.compile(optimizer=adam, loss='sparse_categorical_crossentropy', metrics=['accuracy'])\n",
"history = model.fit(dataset_train, validation_data=dataset_validate, epochs=25, batch_size=BATCH_SIZE)"
2022-04-05 20:38:05 +00:00
]
},
{
"cell_type": "code",
2022-04-05 21:31:33 +00:00
"execution_count": 31,
2022-04-05 20:38:05 +00:00
"metadata": {},
"outputs": [
{
"data": {
2022-04-05 21:31:33 +00:00
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAEICAYAAACktLTqAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAAzGklEQVR4nO3dd3hU1db48e9KIIQSem9SBAQMISGAl6JwUSkiiCAQEAiIoNjbffV6VS6Wnyh6fXmxXHqXIkVEEEVBsdNCB0EIEjqhE0La/v2xJxBCKkxyMjPr8zzz5Mw5Z86sMydZ2bP3PnuLMQallFLexc/pAJRSSrmfJnellPJCmtyVUsoLaXJXSikvpMldKaW8kCZ3pZTyQprcvZiILBeRQe7e10kiEi0id+bBcY2I3Oxa/kREXsnJvtfxPv1F5OvrjVOpnBLt516wiMj5NE+LAZeAZNfz4caYWfkfVcEhItHAUGPMSjcf1wD1jDF73LWviNQC9gGFjTFJbglUqRwq5HQA6mrGmBKpy1klMhEppAlDFRT6+1jwaLWMhxCRdiISIyL/IyJHgCkiUkZElorIcRE55VqunuY1q0VkqGs5UkR+FJExrn33iUjn69y3toj8ICLnRGSliHwoIjMziTsnMb4uIj+5jve1iJRPs32AiOwXkVgReTmLz6eliBwREf8063qIyGbXcgsR+UVETovIYREZJyIBmRxrqoi8keb5C67XHBKRIen2vUdENorIWRE5ICIj02z+wfXztIicF5G/pX62aV7fSkTWisgZ189WOf1scvk5lxWRKa5zOCUii9Ns6y4iUa5z+FNEOrnWX1UFJiIjU6+ziNRyVU89JCJ/Ad+51s93XYczrt+RxmleX1RE3nNdzzOu37GiIvKliDyR7nw2i0iPjM5V5Ywmd89SGSgL3AQMw16/Ka7nNYGLwLgsXt8S2AWUB94BJomIXMe+s4HfgXLASGBAFu+Zkxj7AYOBikAA8DyAiDQCPnYdv6rr/aqTAWPMb8AF4O/pjjvbtZwMPOM6n78BHYARWcSNK4ZOrnjuAuoB6ev7LwADgdLAPcCjInKfa9vtrp+ljTEljDG/pDt2WeBLYKzr3N4HvhSRcunO4ZrPJgPZfc4zsNV8jV3H+o8rhhbAdOAF1zncDkRn8h4ZuQNoCHR0PV+O/ZwqAhuAtNWIY4BmQCvs7/E/gBRgGvBg6k4iEgJUw3426noZY/RRQB/YP7I7XcvtgAQgMIv9mwKn0jxfja3WAYgE9qTZVgwwQOXc7ItNHElAsTTbZwIzc3hOGcX4rzTPRwBfuZZfBeak2Vbc9Rncmcmx3wAmu5aDsIn3pkz2fRpYlOa5AW52LU8F3nAtTwbeTrNf/bT7ZnDcD4D/uJZrufYtlGZ7JPCja3kA8Hu61/8CRGb32eTmcwaqYJNomQz2+29qvFn9/rmej0y9zmnOrU4WMZR27VMK+8/nIhCSwX6BwClsOwbYfwIf5cXflC89tOTuWY4bY+JTn4hIMRH5r+tr7llsNUDptFUT6RxJXTDGxLkWS+Ry36rAyTTrAA5kFnAOYzySZjkuTUxV0x7bGHMBiM3svbCl9PtFpAhwP7DBGLPfFUd9V1XFEVccb2FL8dm5KgZgf7rzaykiq1zVIWeAR3J43NRj70+3bj+21Joqs8/mKtl8zjWw1+xUBi+tAfyZw3gzcvmzERF/EXnbVbVzlivfAMq7HoEZvZfrd3ou8KCI+AER2G8a6gZocvcs6bs2PQc0AFoaY0pypRogs6oWdzgMlBWRYmnW1chi/xuJ8XDaY7ves1xmOxtjtmOTY2eurpIBW72zE1s6LAn883piwH5zSWs2sASoYYwpBXyS5rjZdUU7hK1GSasmcDAHcaWX1ed8AHvNSmfwugNA3UyOeQH7rS1V5Qz2SXuO/YDu2KqrUtjSfWoMJ4D4LN5rGtAfW10WZ9JVYanc0+Tu2YKwX3VPu+pvX8vrN3SVhNcBI0UkQET+BtybRzF+BnQVkTauxs9RZP87Oxt4Cpvc5qeL4yxwXkRuAR7NYQzzgEgRaeT655I+/iBsqTjeVX/dL82249jqkDqZHHsZUF9E+olIIRHpAzQCluYwtvRxZPg5G2MOY+vCP3I1vBYWkdTkPwkYLCIdRMRPRKq5Ph+AKKCva/9woFcOYriE/XZVDPvtKDWGFGwV1/siUtVVyv+b61sWrmSeAryHltrdQpO7Z/sAKIotFf0KfJVP79sf2ygZi63nnov9o87IB1xnjMaYbcBj2IR9GFsvG5PNyz7FNvJ9Z4w5kWb989jEew6Y4Io5JzEsd53Dd8Ae18+0RgCjROQcto1gXprXxgFvAj+J7aVzW7pjxwJdsaXuWGwDY9d0cefUB2T9OQ8AErHfXo5h2xwwxvyObbD9D3AG+J4r3yZewZa0TwH/5upvQhmZjv3mdBDY7oojreeBLcBa4CQwmqtz0HQgGNuGo26Q3sSkbpiIzAV2GmPy/JuD8l4iMhAYZoxp43Qs3kBL7irXRKS5iNR1fY3vhK1nXexwWMqDuaq8RgDjnY7FW2hyV9ejMrab3nlsH+1HjTEbHY1IeSwR6YhtnzhK9lU/Koe0WkYppbyQltyVUsoLOTZwWPny5U2tWrWcenullPJI69evP2GMqZDdfo4l91q1arFu3Tqn3l4ppTySiKS/qzlDWi2jlFJeSJO7Ukp5IU3uSinlhQrUTEyJiYnExMQQHx+f/c7KEYGBgVSvXp3ChQs7HYpSKgsFKrnHxMQQFBRErVq1yHwOCeUUYwyxsbHExMRQu3Ztp8NRSmUh22oZEZksIsdEZGsm20VExorIHtfUWGHXG0x8fDzlypXTxF5AiQjlypXTb1ZKeYCc1LlPBTplsb0zdlqtetip3z6+kYA0sRdsen2U8gzZVssYY34QkVpZ7NIdmG7sOAa/ikhpEaniGkNaKaU8RlISxMXBpUtXP+rVg8KFYd8+2L376m1JSRARAQEB8MsvsGEDJCdfeaSkwAsvgAgsXQrx8dAru5Hx3cAdde7VuHoashjXumuSu4gMw5buqVkz/YQ2zouNjaVDhw4AHDlyBH9/fypUsDeC/f777wQEBGT62nXr1jF9+nTGjh2b5Xu0atWKn3/+2X1BK6UuS0iAI0fgxAk4eRJOn4YzZ+Dee6FiRfjxR5g40a5L+1ixAurWhbFj4bnnrj3ugQNQvTrMmAGvZTCwdffuNrkvWgTvvnvt9ueeA39/+PJLOHXKc5J7jhljxuMa0jM8PLzAjVhWrlw5oqKiABg5ciQlSpTg+eevTDaflJREoUIZf2Th4eGEh4dn+x6a2JXKOWMgJgZKloRSpWzJeeJEiI298jhxAsaNg9tvhy++yDhxfv+9Te6HD8N339ljlSoFlStDgwaQ+md9xx02ORcpAoGB9meRIlCmjN0+aBB06HBlfZEitkQfFGS3v/IKPP+8TeRpH36uCvCPPrIl+PzgjuR+kKvnmKzO9c0BWSBFRkYSGBjIxo0bad26NX379uWpp54iPj6eokWLMmXKFBo0aMDq1asZM2YMS5cuZeTIkfz111/s3buXv/76i6effponn3wSgBIlSnD+/HlWr17NyJEjKV++PFu3bqVZs2bMnDkTEWHZsmU8++yzFC9enNatW7N3716WLr165rXo6GgGDBjAhQsXABg3bhytWrUCYPTo0cycORM/Pz86d+7M22+/zZ49e3jkkUc4fvw4/v7+zJ8/n7p1M5vOUqn8ZYxNeidOwIcfwq5dsHMn/PEHXLgAU6ZAZCQcPQqjR0PZslCunH3UqWOTLEDz5jb5p24rXdr+Y6hSxW5/4AH7yEyzZvaRmZtuso/MBAVdSfQZyc8mK3ck9yXA4yIyB2gJnHFXfXu7dteu690bRoyw9WJduly7PTLSPk6cuPY/+OrV1xdHTEwMP//8M/7+/pw9e5Y1a9ZQqFAhVq5cyT//+U8WLFhwzWt27tzJqlWrOHfuHA0aNODRRx+9pm/4xo0b2bZtG1WrVqV169b89NNPhIeHM3z4cH744Qdq165NREREhjFVrFiRb775hsDAQHbv3k1ERATr1q1j+fL
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAXgAAAEICAYAAABVv+9nAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAA6UklEQVR4nO3dd3gUVffA8e9JILRQpIhAQDpICS0B6UU6SBWlCNIFRV6wIK8oIK+8NkR+iMCrFGkKqIBUiXQQkCaEYpBikC5EKRECJLm/P+4GIpAQQpLJbs7nefbJ7szszJksnL25c+dcMcaglFLK83g5HYBSSqnkoQleKaU8lCZ4pZTyUJrglVLKQ2mCV0opD6UJXimlPJQmeHVXIrJCRJ5L6m2dJCKhItIwGfZrRKS46/lkEXkrIdsm4jhdRCQosXHGs996InIiqfernJfO6QBU0hGR8FgvMwPXgCjX6+eNMXMSui9jTLPk2NbTGWP6JcV+RKQw8BuQ3hgT6dr3HCDBn6FSmuA9iDHGN+a5iIQCvY0xq27fTkTSxSQNpZTn0i6aNCDmT3AReV1EzgDTReQhEVkqIudE5C/Xc79Y71knIr1dz7uLyCYRGePa9jcRaZbIbYuIyAYRuSwiq0TkUxGZHUfcCYnxPyLyo2t/QSKSO9b6riJyTETCRGRYPL+faiJyRkS8Yy1rKyLBrudVRWSLiFwQkdMiMkFEfOLY1xci8k6s16+53nNKRHretm0LEflZRC6JyHERGRlr9QbXzwsiEi4i1WN+t7HeX0NEtovIRdfPGgn93cRHRB5zvf+CiOwXkVax1jUXkQOufZ4UkVddy3O7Pp8LIvKniGwUEc0vDtMPIO14BMgJPAr0xX72012vCwFXgQnxvL8acBDIDXwATBURScS2XwLbgFzASKBrPMdMSIydgR7Aw4APEJNwygCTXPvP7zqeH3dhjPkJ+BtocNt+v3Q9jwIGu86nOvAE8EI8ceOKoakrnkZACeD2/v+/gW5ADqAF0F9E2rjW1XH9zGGM8TXGbLlt3zmBZcB417mNBZaJSK7bzuGO3809Yk4PLAGCXO97CZgjIqVcm0zFdvdlBcoBa1zLXwFOAHmAvMAbgNZBcZgm+LQjGhhhjLlmjLlqjAkzxnxrjLlijLkMjAbqxvP+Y8aYz40xUcAMIB/2P3KCtxWRQkAgMNwYc90YswlYHNcBExjjdGPMr8aYq8B8oKJr+VPAUmPMBmPMNeAt1+8gLl8BnQBEJCvQ3LUMY8xOY8xWY0ykMSYU+N9d4ribp13x7TPG/I39Qot9fuuMMXuNMdHGmGDX8RKyX7BfCIeMMbNccX0FhABPxtomrt9NfB4HfIH3XJ/RGmAprt8NcAMoIyLZjDF/GWN2xVqeD3jUGHPDGLPRaKErx2mCTzvOGWMiYl6ISGYR+Z+rC+MStksgR+xuituciXlijLnieup7n9vmB/6MtQzgeFwBJzDGM7GeX4kVU/7Y+3Yl2LC4joVtrbcTkQxAO2CXMeaYK46Sru6HM644/ottzd/LP2IAjt12ftVEZK2rC+oi0C+B+43Z97Hblh0DCsR6Hdfv5p4xG2NifxnG3m977JffMRFZLyLVXcs/BA4DQSJyVESGJuw0VHLSBJ923N6aegUoBVQzxmTjVpdAXN0uSeE0kFNEMsdaVjCe7R8kxtOx9+06Zq64NjbGHMAmsmb8s3sGbFdPCFDCFccbiYkB280U25fYv2AKGmOyA5Nj7fderd9T2K6r2AoBJxMQ1732W/C2/vOb+zXGbDfGtMZ23yzC/mWAMeayMeYVY0xRoBXwsog88YCxqAekCT7tyort077g6s8dkdwHdLWIdwAjRcTH1fp7Mp63PEiM3wAtRaSW64LoKO797/1L4F/YL5Kvb4vjEhAuIqWB/gmMYT7QXUTKuL5gbo8/K/YvmggRqYr9YolxDtulVDSOfS8HSopIZxFJJyLPAGWw3SkP4idsa3+IiKQXkXrYz2iu6zPrIiLZjTE3sL+TaAARaSkixV3XWi5ir1vE1yWmUoAm+LRrHJAJOA9sBb5PoeN2wV6oDAPeAeZhx+vfzTgSGaMxZj/wIjZpnwb+wl4EjE9MH/gaY8z5WMtfxSbfy8DnrpgTEsMK1zmswXZfrLltkxeAUSJyGRiOqzXseu8V7DWHH10jUx6/bd9hQEvsXzlhwBCg5W1x3zdjzHVsQm+G/b1PBLoZY0Jcm3QFQl1dVf2wnyfYi8irgHBgCzDRGLP2QWJRD070OohykojMA0KMMcn+F4RSaY224FWKEpFAESkmIl6uYYStsX25SqkkpneyqpT2CLAAe8HzBNDfGPOzsyEp5Zm0i0YppTyUdtEopZSHcqyLJnfu3KZw4cJOHV4ppdzSzp07zxtj8iRkW8cSfOHChdmxY4dTh1dKKbckIrffwRyne3bRiEhB1+3UB1yV5f51l23quSra7XY9ht9v0EoppZJWQlrwkcArxphdriJMO0XkB9et3bFtNMa0TPoQlVJKJcY9W/DGmNMxFeNcFf1+4Z8FjZRSSqVC99UHL3YasUrYehW3qy4ie7DFil513Sp++/v7YmuRU6jQ7XWXlFLJ7caNG5w4cYKIiIh7b6wclTFjRvz8/EifPn2i95HgBC8ivsC3wCBjzKXbVu/C1oEOF5Hm2DsTS9y+D2PMZ8BnAAEBAToAX6kUduLECbJmzUrhwoWJe74W5TRjDGFhYZw4cYIiRYokej8JGgfvmuXlW2COMWbBXYK5ZIwJdz1fDqRP6PRgSqmUExERQa5cuTS5p3IiQq5cuR74L62EjKIR7DRdvxhjxsaxzSMxU7K5yp56Ef/kCkoph2hydw9J8TklpIumJrZE6F4R2e1a9gauyQuMMZOx06P1F5FIbP3uju48XZcxhhl7ZpA3S14aF2uMt1dckxwppVTqdc8E75o3M96vEmPMBOKfsNltXI+6Tu/FvZkVPAuAAlkL0L1id3pU7EGxnMUcjk4p9xYWFsYTT9iJns6cOYO3tzd58tibMrdt24aPj0+c792xYwczZ85k/Pjx8R6jRo0abN68+YFjXbduHWPGjGHp0gedQ8U5Wk0ylgsRF2g3rx1rQ9cyqt4oyuQpw7Td03h307uM3jiaeoXr0atSL9o91o7M6TPfe4dKqX/IlSsXu3fvBmDkyJH4+vry6quv3lwfGRlJunR3T0sBAQEEBATc8xhJkdw9hRYbczl24Rg1p9Vk0++bmNV2Fm/VfYv2ZdqzrPMyjg06xjv13+H3i7/TdWFX8n+Un/5L+7Pj1A7cuCdKqVShe/fu9OvXj2rVqjFkyBC2bdtG9erVqVSpEjVq1ODgwYOAbVG3bGnvpRw5ciQ9e/akXr16FC1a9B+tel9f35vb16tXj6eeeorSpUvTpUuXm/9fly9fTunSpalSpQoDBw68ud+4/Pnnn7Rp0wZ/f38ef/xxgoODAVi/fj0VK1akYsWKVKpUicuXL3P69Gnq1KlDxYoVKVeuHBs3bkzy31lCaQse2HV6Fy2+bMHVG1dZ+exK6hep/4/1ftn8GFZnGP+u/W82HNvA1J+n8sWeL5i8czLlHy5Pr0q9eNb/WXJljnNOZ6VSpXr17lz29NPwwgtw5Qo0b37n+u7d7eP8eXjqqX+uW7cucXGcOHGCzZs34+3tzaVLl9i4cSPp0qVj1apVvPHGG3z77bd3vCckJIS1a9dy+fJlSpUqRf/+/e8YM/7zzz+zf/9+8ufPT82aNfnxxx8JCAjg+eefZ8OGDRQpUoROnTrdM74RI0ZQqVIlFi1axJo1a+jWrRu7d+9mzJgxfPrpp9SsWZPw8HAyZszIZ599RpMmTRg2bBhRUVFcuXIlcb+UJJDmW/DLDy2nzvQ6+Hj78GPPH+9I7rF5iRf1CtdjVttZnH7lNJNaTCJDugwMWjmI/GPz0/nbzly6dvstAkqpe+nQoQPe3nYww8WLF+nQoQPlypVj8ODB7N9/xz2TALRo0YIMGTKQO3duHn74Yc6ePXvHNlWrVsXPzw8vLy8qVqxIaGgoISEhFC1a9Ob48oQk+E2bNtG1a1cAGjRoQFhYGJcuXaJ
2022-04-05 20:38:05 +00:00
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
}
],
"source": [
"def plot_training_analysis():\n",
2022-04-05 21:31:33 +00:00
" acc = history.history[\"accuracy\"]\n",
" val_acc = history.history[\"val_accuracy\"]\n",
" loss = history.history[\"loss\"]\n",
" val_loss = history.history[\"val_loss\"]\n",
2022-04-05 20:38:05 +00:00
"\n",
2022-04-05 21:31:33 +00:00
" epochs = range(len(loss))\n",
2022-04-05 20:38:05 +00:00
"\n",
2022-04-05 21:31:33 +00:00
" plt.plot(epochs, acc, \"b\", linestyle=\"--\", label=\"Training acc\")\n",
" plt.plot(epochs, val_acc, \"g\", label=\"Validation acc\")\n",
" plt.title(\"Training and validation accuracy\")\n",
" plt.legend()\n",
2022-04-05 20:38:05 +00:00
"\n",
2022-04-05 21:31:33 +00:00
" plt.figure()\n",
2022-04-05 20:38:05 +00:00
"\n",
2022-04-05 21:31:33 +00:00
" plt.plot(epochs, loss, \"b\", linestyle=\"--\", label=\"Training loss\")\n",
" plt.plot(epochs, val_loss, \"g\", label=\"Validation loss\")\n",
" plt.title(\"Training and validation loss\")\n",
" plt.legend()\n",
2022-04-05 20:38:05 +00:00
"\n",
2022-04-05 21:31:33 +00:00
" plt.show()\n",
2022-04-05 20:38:05 +00:00
"\n",
2022-04-05 21:31:33 +00:00
"\n",
"plot_training_analysis()\n"
]
}
],
"metadata": {
2022-04-05 20:38:05 +00:00
"interpreter": {
"hash": "e55666fbbf217aa3df372b978577f47b6009e2f78e2ec76a584f49cd54a1e62c"
},
"kernelspec": {
"display_name": ".env",
"language": "python",
"name": ".env"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.10"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
}