mirror of
https://github.com/Laurent2916/REVA-QCAV.git
synced 2024-11-09 15:02:03 +00:00
chore: move notebooks in parant folder
Former-commit-id: 56075c7a86305a7a9644dfaacd5bdcb62a863baf [formerly 35967dee0a3c159669b50e2823f36ff25e036f1e] Former-commit-id: b98a086c0ea8970562055cc3ad1a025a77b94b35
This commit is contained in:
parent
b1d7369188
commit
5fe7ceb306
|
@ -7,7 +7,7 @@ repos:
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: "v4.1.0"
|
rev: "v4.1.0"
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-added-large-files
|
# - id: check-added-large-files
|
||||||
- id: check-executables-have-shebangs
|
- id: check-executables-have-shebangs
|
||||||
- id: check-merge-conflict
|
- id: check-merge-conflict
|
||||||
- id: check-symlinks
|
- id: check-symlinks
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
ecf0b9ce39e210bc605fd3eab9db8b1215c35fda
|
|
|
@ -1,99 +0,0 @@
|
||||||
{
|
|
||||||
"cells": [
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 5,
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"from PIL import Image\n",
|
|
||||||
"\n",
|
|
||||||
"import albumentations as A\n",
|
|
||||||
"import torchvision.transforms as T\n",
|
|
||||||
"\n",
|
|
||||||
"import numpy as np\n",
|
|
||||||
"from utils import RandomPaste\n",
|
|
||||||
"from data.dataset import SyntheticDataset\n",
|
|
||||||
"\n",
|
|
||||||
"from pathlib import Path\n",
|
|
||||||
"\n",
|
|
||||||
"from joblib import Parallel, delayed\n"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 6,
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"transform = A.Compose(\n",
|
|
||||||
" [\n",
|
|
||||||
" A.Resize(512, 512),\n",
|
|
||||||
" A.Flip(),\n",
|
|
||||||
" A.ColorJitter(),\n",
|
|
||||||
" RandomPaste(5, \"/media/disk1/lfainsin/SPHERES/\"),\n",
|
|
||||||
" A.GaussianBlur(),\n",
|
|
||||||
" A.ISONoise(),\n",
|
|
||||||
" ],\n",
|
|
||||||
")\n",
|
|
||||||
"\n",
|
|
||||||
"dataset = SyntheticDataset(image_dir=\"/media/disk1/lfainsin/BACKGROUND/\", transform=transform)\n",
|
|
||||||
"transform = T.ToPILImage()"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 12,
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"def render(i, image, mask):\n",
|
|
||||||
"\n",
|
|
||||||
" image = transform(image)\n",
|
|
||||||
" mask = transform(mask)\n",
|
|
||||||
"\n",
|
|
||||||
" path = f\"/media/disk1/lfainsin/TRAIN_prerender/{i}/\"\n",
|
|
||||||
" Path(path).mkdir(parents=True, exist_ok=True)\n",
|
|
||||||
" \n",
|
|
||||||
" image.save(f\"{path}/image.jpg\")\n",
|
|
||||||
" mask.save(f\"{path}/MASK.PNG\")\n"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 13,
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"Parallel(n_jobs=-1)(delayed(render)(i, image, mask) for i, (image, mask) in enumerate(dataset))\n"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"metadata": {
|
|
||||||
"kernelspec": {
|
|
||||||
"display_name": "Python 3.8.0 ('.venv': poetry)",
|
|
||||||
"language": "python",
|
|
||||||
"name": "python3"
|
|
||||||
},
|
|
||||||
"language_info": {
|
|
||||||
"codemirror_mode": {
|
|
||||||
"name": "ipython",
|
|
||||||
"version": 3
|
|
||||||
},
|
|
||||||
"file_extension": ".py",
|
|
||||||
"mimetype": "text/x-python",
|
|
||||||
"name": "python",
|
|
||||||
"nbconvert_exporter": "python",
|
|
||||||
"pygments_lexer": "ipython3",
|
|
||||||
"version": "3.8.0"
|
|
||||||
},
|
|
||||||
"orig_nbformat": 4,
|
|
||||||
"vscode": {
|
|
||||||
"interpreter": {
|
|
||||||
"hash": "dc80d2c03865715c8671359a6bf138f6c8ae4e26ae025f2543e0980b8db0ed7e"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"nbformat": 4,
|
|
||||||
"nbformat_minor": 2
|
|
||||||
}
|
|
1
src/predict.ipynb.REMOVED.git-id
Normal file
1
src/predict.ipynb.REMOVED.git-id
Normal file
|
@ -0,0 +1 @@
|
||||||
|
c1a69536ef50112fdf8384eed234407b31a1dfdb
|
|
@ -1,80 +0,0 @@
|
||||||
import argparse
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import albumentations as A
|
|
||||||
import numpy as np
|
|
||||||
import onnx
|
|
||||||
import onnxruntime
|
|
||||||
from albumentations.pytorch import ToTensorV2
|
|
||||||
from PIL import Image
|
|
||||||
|
|
||||||
|
|
||||||
def get_args():
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description="Predict masks from input images",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--model",
|
|
||||||
"-m",
|
|
||||||
default="model.pth",
|
|
||||||
metavar="FILE",
|
|
||||||
help="Specify the file in which the model is stored",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--input",
|
|
||||||
"-i",
|
|
||||||
metavar="INPUT",
|
|
||||||
help="Filenames of input images",
|
|
||||||
required=True,
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--output",
|
|
||||||
"-o",
|
|
||||||
metavar="OUTPUT",
|
|
||||||
help="Filenames of output images",
|
|
||||||
)
|
|
||||||
|
|
||||||
return parser.parse_args()
|
|
||||||
|
|
||||||
|
|
||||||
def sigmoid(x):
|
|
||||||
return 1 / (1 + np.exp(-x))
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
args = get_args()
|
|
||||||
|
|
||||||
logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s")
|
|
||||||
|
|
||||||
onnx_model = onnx.load(args.model)
|
|
||||||
onnx.checker.check_model(onnx_model)
|
|
||||||
|
|
||||||
ort_session = onnxruntime.InferenceSession(args.model)
|
|
||||||
|
|
||||||
def to_numpy(tensor):
|
|
||||||
return tensor.detach().cpu().numpy() if tensor.requires_grad else tensor.cpu().numpy()
|
|
||||||
|
|
||||||
img = Image.open(args.input).convert("RGB")
|
|
||||||
|
|
||||||
logging.info(f"Preprocessing image {args.input}")
|
|
||||||
transform = A.Compose(
|
|
||||||
[
|
|
||||||
A.ToFloat(max_value=255),
|
|
||||||
ToTensorV2(),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
aug = transform(image=np.asarray(img))
|
|
||||||
img = aug["image"]
|
|
||||||
|
|
||||||
logging.info(f"Predicting image {args.input}")
|
|
||||||
img = img.unsqueeze(0)
|
|
||||||
|
|
||||||
# compute ONNX Runtime output prediction
|
|
||||||
ort_inputs = {ort_session.get_inputs()[0].name: to_numpy(img)}
|
|
||||||
ort_outs = ort_session.run(None, ort_inputs)
|
|
||||||
|
|
||||||
img_out_y = ort_outs[0]
|
|
||||||
|
|
||||||
img_out_y = Image.fromarray(np.uint8((img_out_y[0] * 255.0).clip(0, 255)[0]), mode="L")
|
|
||||||
|
|
||||||
img_out_y.save(args.output)
|
|
119
src/prerender.ipynb
Normal file
119
src/prerender.ipynb
Normal file
|
@ -0,0 +1,119 @@
|
||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 1,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stderr",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"/home/laurent_fainsin/unet/.venv/lib/python3.8/site-packages/tqdm/auto.py:22: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
|
||||||
|
" from .autonotebook import tqdm as notebook_tqdm\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ename": "ImportError",
|
||||||
|
"evalue": "cannot import name 'SyntheticDataset' from 'data.dataset' (/home/laurent_fainsin/unet/src/data/dataset.py)",
|
||||||
|
"output_type": "error",
|
||||||
|
"traceback": [
|
||||||
|
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
||||||
|
"\u001b[0;31mImportError\u001b[0m Traceback (most recent call last)",
|
||||||
|
"\u001b[1;32m/home/laurent_fainsin/unet/src/prerender.ipynb Cell 1\u001b[0m in \u001b[0;36m<cell line: 8>\u001b[0;34m()\u001b[0m\n\u001b[1;32m <a href='vscode-notebook-cell://ssh-remote%2Bdld2/home/laurent_fainsin/unet/src/prerender.ipynb#W0sdnNjb2RlLXJlbW90ZQ%3D%3D?line=5'>6</a>\u001b[0m \u001b[39mimport\u001b[39;00m \u001b[39mnumpy\u001b[39;00m \u001b[39mas\u001b[39;00m \u001b[39mnp\u001b[39;00m\n\u001b[1;32m <a href='vscode-notebook-cell://ssh-remote%2Bdld2/home/laurent_fainsin/unet/src/prerender.ipynb#W0sdnNjb2RlLXJlbW90ZQ%3D%3D?line=6'>7</a>\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mutils\u001b[39;00m \u001b[39mimport\u001b[39;00m RandomPaste\n\u001b[0;32m----> <a href='vscode-notebook-cell://ssh-remote%2Bdld2/home/laurent_fainsin/unet/src/prerender.ipynb#W0sdnNjb2RlLXJlbW90ZQ%3D%3D?line=7'>8</a>\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mdata\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mdataset\u001b[39;00m \u001b[39mimport\u001b[39;00m SyntheticDataset\n\u001b[1;32m <a href='vscode-notebook-cell://ssh-remote%2Bdld2/home/laurent_fainsin/unet/src/prerender.ipynb#W0sdnNjb2RlLXJlbW90ZQ%3D%3D?line=9'>10</a>\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mpathlib\u001b[39;00m \u001b[39mimport\u001b[39;00m Path\n\u001b[1;32m <a href='vscode-notebook-cell://ssh-remote%2Bdld2/home/laurent_fainsin/unet/src/prerender.ipynb#W0sdnNjb2RlLXJlbW90ZQ%3D%3D?line=11'>12</a>\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mjoblib\u001b[39;00m \u001b[39mimport\u001b[39;00m Parallel, delayed\n",
|
||||||
|
"\u001b[0;31mImportError\u001b[0m: cannot import name 'SyntheticDataset' from 'data.dataset' (/home/laurent_fainsin/unet/src/data/dataset.py)"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"from PIL import Image\n",
|
||||||
|
"\n",
|
||||||
|
"import albumentations as A\n",
|
||||||
|
"import torchvision.transforms as T\n",
|
||||||
|
"\n",
|
||||||
|
"import numpy as np\n",
|
||||||
|
"from utils import RandomPaste\n",
|
||||||
|
"from data.dataset import SyntheticDataset\n",
|
||||||
|
"\n",
|
||||||
|
"from pathlib import Path\n",
|
||||||
|
"\n",
|
||||||
|
"from joblib import Parallel, delayed\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 6,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"transform = A.Compose(\n",
|
||||||
|
" [\n",
|
||||||
|
" A.Resize(512, 512),\n",
|
||||||
|
" A.Flip(),\n",
|
||||||
|
" A.ColorJitter(),\n",
|
||||||
|
" RandomPaste(5, \"/media/disk1/lfainsin/SPHERES/\"),\n",
|
||||||
|
" A.GaussianBlur(),\n",
|
||||||
|
" A.ISONoise(),\n",
|
||||||
|
" ],\n",
|
||||||
|
")\n",
|
||||||
|
"\n",
|
||||||
|
"dataset = SyntheticDataset(image_dir=\"/media/disk1/lfainsin/BACKGROUND/\", transform=transform)\n",
|
||||||
|
"transform = T.ToPILImage()"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 12,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"def render(i, image, mask):\n",
|
||||||
|
"\n",
|
||||||
|
" image = transform(image)\n",
|
||||||
|
" mask = transform(mask)\n",
|
||||||
|
"\n",
|
||||||
|
" path = f\"/media/disk1/lfainsin/TRAIN_prerender/{i}/\"\n",
|
||||||
|
" Path(path).mkdir(parents=True, exist_ok=True)\n",
|
||||||
|
" \n",
|
||||||
|
" image.save(f\"{path}/image.jpg\")\n",
|
||||||
|
" mask.save(f\"{path}/MASK.PNG\")\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 13,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"Parallel(n_jobs=-1)(delayed(render)(i, image, mask) for i, (image, mask) in enumerate(dataset))\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": "Python 3.8.10 ('.venv': poetry)",
|
||||||
|
"language": "python",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": {
|
||||||
|
"name": "ipython",
|
||||||
|
"version": 3
|
||||||
|
},
|
||||||
|
"file_extension": ".py",
|
||||||
|
"mimetype": "text/x-python",
|
||||||
|
"name": "python",
|
||||||
|
"nbconvert_exporter": "python",
|
||||||
|
"pygments_lexer": "ipython3",
|
||||||
|
"version": "3.8.10"
|
||||||
|
},
|
||||||
|
"orig_nbformat": 4,
|
||||||
|
"vscode": {
|
||||||
|
"interpreter": {
|
||||||
|
"hash": "177da549c0efb5403fabf2463095e744d5ed7be0fe3e2a9b077a1765a2012433"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 2
|
||||||
|
}
|
|
@ -28,7 +28,7 @@ WORKERS:
|
||||||
value: 16
|
value: 16
|
||||||
|
|
||||||
EPOCHS:
|
EPOCHS:
|
||||||
value: 50
|
value: 100
|
||||||
TRAIN_BATCH_SIZE:
|
TRAIN_BATCH_SIZE:
|
||||||
value: 10
|
value: 10
|
||||||
VALID_BATCH_SIZE:
|
VALID_BATCH_SIZE:
|
||||||
|
|
Loading…
Reference in a new issue