projet-reseaux-profond/notebook.ipynb

273 lines
400 KiB
Plaintext
Raw Normal View History

{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"import numpy as np\n",
"import PIL.Image\n",
"import glob\n",
"import os\n",
"\n",
"import matplotlib.pyplot as plt\n",
"%matplotlib inline"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"/tmp/deepl/dataset\n",
"['octane', 'werewolf', 'breakout', 'aftershock']\n"
]
}
],
"source": [
"IMAGE_SIZE = (400, 150, 3)\n",
"RESIZED_SIZE = (100, 50, 3)\n",
"RESIZED_SIZE_PIL = (RESIZED_SIZE[1], RESIZED_SIZE[0], RESIZED_SIZE[2])\n",
"DATASET_PATH = \"./dataset/\"\n",
"DATASET_PATH = os.path.abspath(DATASET_PATH)\n",
"CLASSES = glob.glob(f\"{DATASET_PATH}/*\")\n",
"CLASSES = list(map(lambda f: f.split(\"/\")[-1], CLASSES))\n",
"\n",
"print(DATASET_PATH)\n",
"print(CLASSES)"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"def load_data():\n",
" # Récupération des fichiers\n",
" files = glob.glob(f\"{DATASET_PATH}/**/*.jpg\", recursive = True)\n",
"\n",
" # Initialise les structures de données\n",
" x = np.zeros((len(files), *RESIZED_SIZE_PIL))\n",
" y = np.zeros((len(files), 1))\n",
"\n",
" # print(f\"x.shape = {x.shape}\")\n",
"\n",
" for i, path in enumerate(files):\n",
" # Lecture de l'image\n",
" img = PIL.Image.open(path)\n",
"\n",
" # print(f\"img.size = {img.size}\")\n",
"\n",
" # Redimensionnement de l'image\n",
" img = img.resize(RESIZED_SIZE[:-1], PIL.Image.ANTIALIAS)\n",
"\n",
" # print(f\"img.size = {img.size}\")\n",
"\n",
" test = np.asarray(img)\n",
"\n",
" # print(f\"test.shape = {test.shape}\")\n",
"\n",
" # Remplissage de la variable x\n",
" x[i] = test\n",
"\n",
" # On récupère l'index dans le path\n",
" class_label = path.split(\"/\")[-2]\n",
"\n",
" # On récupère le numéro de la classe à partir du string\n",
" class_label = CLASSES.index(class_label)\n",
" \n",
" # Remplissage de la variable y\n",
" y[i] = class_label\n",
"\n",
" return x, y"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"x, y = load_data()\n",
"x = x / 255"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAxoAAAGoCAYAAADB3ZMFAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9adBtS5rfhf2ezFxr7eEdznjHunWrqqu61WoNLTFIYGEwxhiHBQ5/AXkIQrIDmQ84PAgDJgIbh40sY4QjHA5CSBFyOAIsKWwctiwIkBqhsCYaQWvoQd2qoe+tuuMZ3vMOe1hDZj7+kJlrrf2ec25Xdd861VLvp7vueffea16ZTz7//zOJqnKUoxzlKEc5ylGOcpSjHOUon6eYH/QFHOUoRznKUY5ylKMc5ShH+TtPjkDjKEc5ylGOcpSjHOUoRznK5y5HoHGUoxzlKEc5ylGOcpSjHOVzlyPQOMpRjnKUoxzlKEc5ylGO8rnLEWgc5ShHOcpRjnKUoxzlKEf53OUINI5ylKMc5ShHOcpRjnKUo3zucgQav8pERP53IvJERD55Bed6T0T+kc/5mP+qiPw7n+cxj3KUoxzlKEc5ylGO8refHIHGryIRkS8Cvw/49ar6hoj8bhH5Cz/o6zrKUY5ylBeJiHxJRFRE3A/6Wo5ylKP8nS1HffO3pxyBxq8u+SLwVFUffR4HO07GoxzlKEc5ylGOcpSj/KDkCDR+ACIi/5KIfFNEbkTk50Tkv51DmP4M8JaIbETkTwB/CPj78ufLvG8jIv+GiHxbRD4VkT8kIsv82z8kIh+IyL+YQ6/+ryLyQET+lIhcisiFiPx5EZm/9x8Xkb8hIlci8idEZDG7zn9GRL6R9/uTIvLW7LcfE5E/k3/7VET+5RfcZyUif0xE/j0Rqb8vD/MoRznK5yYi8qMi8ueyvvhZEfkn8vdLEfmDIvJ+1hV/Ieud/1/e9TLrqb9PRH5IRP6siDzNYaD/rojcmZ3jPRH55z9D7/xOEflr+Rr+koj8plf6EI5ylKO8Ejnqm18bcgQaPxj5JvAPAOfA/wb4d4CfBf4bwEeqeqKq/xTwzwJ/OX++k/f9A8APAz8OfBV4G/hfzY79BnAPeBf4vaRQrA+Ah8DrwL8M6Gz7fxL4x4AvA78J+N0AIvIPA//7/PubwPvAH8+/nQI/AfyHwFv5Ov7j+Q1mpfD/Bjrgn1TV/nt+Skc5ylFemYhIBfx/gT8NvAb8j4F/V0R+BPg3gL8L+PtJ+uVfACLwX86738l66i8DQtIdbwE/CrwD/Ku3TvcyvfNbgD8K/I+A+8C/DfxJEWk+9xs+ylGO8gOTo775tSNHoPEDEFX9f6jqR6oaVfVPAF8H/t5faj8RERJ4+J+p6oWq3gC/H/hds80i8L9W1U5V98BAAgrvquqgqn9eVedA4/+cr+WCNOl/PH//3wP+qKr+lKp2wP+S5F35EvA7gU9U9Q+qaquqN6r6k7NjnpFAyDeB36Oq4Xt7Qkc5ylF+APLbgRPgD6hqr6p/FvhTJF3wPwD+J6r6oaoGVf1LWS88J6r6DVX9M1kHPQb+TeAfvLXZy/TO7wX+bVX9yXye/xuJrPjtn/fNHuUoR/mBylHf/BqRI9D4AYiI/NMzV90l8BuAB9/Frg+BFfBfzPb9D/P3RR6rajv7/H8EvgH8aRH5loj8S7eOOa9utSNNfEjswPvlB1XdAE9JHpR3SCDiZfLbSazBH7gFao5ylKP86pW3gO+oapx99z5pvi/47Dk/ioi8LiJ/XEQ+FJFrksf2tn57md55F/h9Rb9lHfdOvrajHOUof+fIUd/8GpEj0HjFIiLvAn8E+OeA+zkk6mdI7r/bcttIfwLsgR9T1Tv5f+eqevKyfbK34fep6leAfwL4n4vIf/W7uNSPSJOwXPea5Fr8EPgO8JXP2PdPk1yZ/7GIvP5dnOsoRznKD14+At65lcP1RdJ8b4EfesE+LyISfn/+/jeq6hnw3+fF+u1F8h3gX5vptzuqulLVP/Zd38VRjnKUvx3kqG9+jcgRaLx6WZMmxWMAEfk9JI/Gi+RT4AslkToj/z8C/J9E5LW8/9si8l9/2clyotNXc9jVFRBI4VW/lPwx4PeIyI/neMXfD/ykqr5Hcm++KSL/U0nJ6aci8tvmO6vqvw7830lg47vx1hzlKEf5wcpPkti+f0FSIYd/CPjHSfP4jwL/poi8JSI2J2E2JD0WOSQeToENcCUibwP/i+/hGv4I8M+KyG+TJGsR+W/mvLCjHOUof+fIUd/8GpEj0HjFoqo/B/xB4C+TgMRvBP7iSzb/s6Qk8U9E5En+7l8khUL9p9lN+BPAj3zGKb+Wt9nkc/5bqvqffBfX+RPAvwL8e8DHJHbhd+XfboD/GkkpfELKMfmvvOAY/1tSQvhPiMi9X+qcRznKUX5wkgs2/OOkohRPgH8L+KdV9eeBfx74aeCvABfA/wEwqroD/jXgL+bQg99OKnDxW0nExr8P/L++h2v4z4F/Bvi/AM9Iuu53fx73d5SjHOVXjxz1za8dkWMI/VGOcpSjHOUoRznKUY5ylM9bjh6NoxzlKEc5ylGOcpSjHOUon7scgcZRjnKUoxzlKEc5ylGOcpTPXX5FQENE/jER+QVJ3aNvl009ylGOcpTPTY765ihHOcqrkKOuOcpRPj/5ZedoiIgF/hYpKfgDUtLOfycnOx/lKEc5yucmR31zlKMc5VXIUdcc5Sifr7hfwb5/L/ANVf0WgIj8ceC/Bbx0Mi6bha6WSy53LaqKEcnVjgvYmZc+lvL/828O/5r9KLeqJotI+k4EGX9PfyPld8GY5NTRGNNVvAh3Sdq/7PNdFWiW8T+3thdEp2PFGGjbPaimk6iiKDFEfAjp2qfHgWr6j6sbXFVR1TXWuXG/F53xlYsqMUZ22y0heBKY1XSLgJh079YUh5pirWWxWIIYxBiY7vyzTzXbav7qhBe/ymlLHbf7pc+TxpKgGAGR9K8VwYhgDIhRpuEs6Qwx3XOI6WwhKk8vr9ls2x/wC/rbUr4nfbNsGl0tl1zvWqKCMTK+9dsPfxwDz6uVaZuZgpHnlU3ePY+TcqhR9wiCYKxB8zxF09iYRMeTC4IYwYiZ/fayES0Huu9F2ulAb96+9Oe/eNGfs/1/JUP3eyO1yh3P//3ejzId6/kvX3LEFz7mfBX6kn1un+AFx9Dx+cdx2Stj72X3JDLpKVPWxKx7RAQrZZuk+2MeVDGmY7734aMnqvrwJYc/yovle7dtFo2uFkXfKEbMpG9eqFAO5+Vzm8j07W19I6O+SQcvNslzto01qJLWX4XDvnjlWDO75iVz+7NmfLGrXvb5hZ/k8PM0o16wgmfbAVW894QY2O87YgiYcp8zW05meyebQ/ExYq2lrmuMc7i6odgjt6/ze9U3n60Nb7+3+acX63J9wffz3RT57hWglH2KDtG8b1noZPa8ZvaQCCZfr8n/s0YwCGKymUqkPF9V0Ch8enHN9Wb/wkfyKwEab5OanRT5APhttzcSkd9LavPOyWrFP/oP/AP8Rz/zDfrBs3YViOIlAoIxdr4f8z4uVtKN5l8xxmDMZBlYm19k/spVFjEG6wRjhMrYZBTatF/dVFhnWS4XaFR8P6CaDPxJbSetbo1iLFTOUTl3CItmKGC8ulsgw0i6chWDqLCIFiOWetGw2Vzx3jd/Fo2RyhlCjAyDZ7vd8uzZMwxQAcalaw+9J/aB1774Lndee4PX3/4Cp3fvEr1HY0TM9J4FQVSmxVEOlZbmyXY4br/7ZdzkpzQO5PwCgh/ou46f/qmf5OrZU2LwRFX84BER6qbGOcvZySIpCQ2cnJzyzrtfxjVr6vVdRByYanrQReeUK3/O2HvxlWuMqOrsvrPikgCiiMr4nMo7FxQrCbZFEZyAM9BYpbGBZQWLSrnTVJxUjsWJUi+gcYo1EAdDjMJuH/Beud4HOq9ctwP/+h/+/3zXz/coB/JL6ptDXbPkH/kd/yX+3E9/i857mqpGBXw28sxcyWZwWz4bEewcWCR0iRGTdIiYRBJkFWEqgzEGZ9PvlTNJOVubdI2rcNayWi2IRIbYE4P
"text/plain": [
"<Figure size 864x432 with 9 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
}
],
"source": [
"# Randomisation des indices et affichage de 9 images alétoires de la base d'apprentissage\n",
"indices = np.arange(x.shape[0])\n",
"np.random.shuffle(indices)\n",
"\n",
"plt.figure(figsize=(12, 6))\n",
"\n",
"for i in range(0, 3*3):\n",
" plt.subplot(3, 3, i+1)\n",
" plt.title(CLASSES[int(y[indices[i]])])\n",
" plt.imshow(x[indices[i]])\n",
"\n",
"plt.tight_layout()\n",
"plt.show()"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [],
"source": [
"import tensorflow\n",
"from tensorflow.keras.models import Sequential\n",
"from tensorflow.keras.layers import InputLayer, Dense, Flatten, Conv2D, MaxPooling2D\n",
"from tensorflow.keras import optimizers"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Model: \"sequential\"\n",
"_________________________________________________________________\n",
" Layer (type) Output Shape Param # \n",
"=================================================================\n",
" conv2d (Conv2D) (None, 49, 99, 32) 416 \n",
" \n",
" max_pooling2d (MaxPooling2D (None, 24, 49, 32) 0 \n",
" ) \n",
" \n",
" conv2d_1 (Conv2D) (None, 23, 48, 64) 8256 \n",
" \n",
" max_pooling2d_1 (MaxPooling (None, 11, 24, 64) 0 \n",
" 2D) \n",
" \n",
" flatten (Flatten) (None, 16896) 0 \n",
" \n",
" dense (Dense) (None, 50) 844850 \n",
" \n",
" dense_1 (Dense) (None, 4) 204 \n",
" \n",
"=================================================================\n",
"Total params: 853,726\n",
"Trainable params: 853,726\n",
"Non-trainable params: 0\n",
"_________________________________________________________________\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2022-03-30 11:46:58.052420: I tensorflow/core/platform/cpu_feature_guard.cc:151] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations: AVX2 FMA\n",
"To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.\n",
"2022-03-30 11:46:58.671001: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1525] Created device /job:localhost/replica:0/task:0/device:GPU:0 with 1538 MB memory: -> device: 0, name: Quadro K620, pci bus id: 0000:03:00.0, compute capability: 5.0\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 1/5\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2022-03-30 11:47:00.113976: I tensorflow/stream_executor/cuda/cuda_dnn.cc:368] Loaded cuDNN version 8100\n",
"2022-03-30 11:47:00.378978: W tensorflow/stream_executor/gpu/asm_compiler.cc:111] *** WARNING *** You are using ptxas 10.1.243, which is older than 11.1. ptxas before 11.1 is known to miscompile XLA code, leading to incorrect results or invalid-address errors.\n",
"\n",
"You may not need to update to CUDA 11.1; cherry-picking the ptxas binary is often sufficient.\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"320/320 [==============================] - 6s 15ms/step - loss: 1.2562 - accuracy: 0.4588 - val_loss: 2.1222 - val_accuracy: 0.0000e+00\n",
"Epoch 2/5\n",
"320/320 [==============================] - 5s 15ms/step - loss: 1.0819 - accuracy: 0.8850 - val_loss: 2.3470 - val_accuracy: 0.0000e+00\n",
"Epoch 3/5\n",
"320/320 [==============================] - 5s 15ms/step - loss: 0.9304 - accuracy: 0.9375 - val_loss: 2.3578 - val_accuracy: 0.0000e+00\n",
"Epoch 4/5\n",
"320/320 [==============================] - 5s 15ms/step - loss: 0.7820 - accuracy: 0.9375 - val_loss: 2.1716 - val_accuracy: 0.0000e+00\n",
"Epoch 5/5\n",
"320/320 [==============================] - 5s 15ms/step - loss: 0.6454 - accuracy: 0.9375 - val_loss: 2.0182 - val_accuracy: 0.0000e+00\n"
]
}
],
"source": [
"model = Sequential()\n",
"\n",
"model.add(InputLayer(input_shape=RESIZED_SIZE_PIL))\n",
"\n",
"model.add(Conv2D(32, 2, activation=\"relu\"))\n",
"model.add(MaxPooling2D(pool_size=(2, 2)))\n",
"\n",
"model.add(Conv2D(64, 2, activation=\"relu\"))\n",
"model.add(MaxPooling2D(pool_size=(2, 2)))\n",
"\n",
"model.add(Flatten())\n",
"\n",
"model.add(Dense(50, activation=\"relu\"))\n",
"\n",
"model.add(Dense(4, activation=\"softmax\"))\n",
"\n",
"model.summary()\n",
"\n",
"adam = optimizers.Adam(learning_rate=3e-6)\n",
"model.compile(optimizer=adam, loss='sparse_categorical_crossentropy', metrics=['accuracy'])\n",
"history = model.fit(x, y, validation_split=0.2, epochs=5, batch_size=10)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": ".env",
"language": "python",
"name": ".env"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.10"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
}