2020-03-31 19:42:35 +00:00
|
|
|
#
|
|
|
|
#
|
|
|
|
# 0=================================0
|
|
|
|
# | Kernel Point Convolutions |
|
|
|
|
# 0=================================0
|
|
|
|
#
|
|
|
|
#
|
|
|
|
# ----------------------------------------------------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Callable script to start a training on ModelNet40 dataset
|
|
|
|
#
|
|
|
|
# ----------------------------------------------------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Hugues THOMAS - 06/03/2020
|
|
|
|
#
|
|
|
|
|
|
|
|
|
|
|
|
# ----------------------------------------------------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Imports and global variables
|
|
|
|
# \**********************************/
|
|
|
|
#
|
|
|
|
|
|
|
|
# Common libs
|
|
|
|
import os
|
|
|
|
import numpy as np
|
|
|
|
|
|
|
|
# Dataset
|
2023-05-15 14:22:48 +00:00
|
|
|
from datasetss.ModelNet40 import *
|
|
|
|
from datasetss.S3DIS import *
|
2020-03-31 19:42:35 +00:00
|
|
|
from torch.utils.data import DataLoader
|
|
|
|
|
|
|
|
from utils.config import Config
|
|
|
|
from utils.visualizer import ModelVisualizer
|
2020-04-23 13:51:16 +00:00
|
|
|
from models.architectures import KPCNN, KPFCNN
|
2020-03-31 19:42:35 +00:00
|
|
|
|
|
|
|
|
|
|
|
# ----------------------------------------------------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Main Call
|
|
|
|
# \***************/
|
|
|
|
#
|
|
|
|
|
|
|
|
|
2023-05-15 15:18:10 +00:00
|
|
|
def model_choice(chosen_log):
|
2020-03-31 19:42:35 +00:00
|
|
|
###########################
|
|
|
|
# Call the test initializer
|
|
|
|
###########################
|
|
|
|
|
|
|
|
# Automatically retrieve the last trained model
|
2023-05-15 15:18:10 +00:00
|
|
|
if chosen_log in ["last_ModelNet40", "last_ShapeNetPart", "last_S3DIS"]:
|
2020-03-31 19:42:35 +00:00
|
|
|
# Dataset name
|
2023-05-15 15:18:10 +00:00
|
|
|
test_dataset = "_".join(chosen_log.split("_")[1:])
|
2020-03-31 19:42:35 +00:00
|
|
|
|
|
|
|
# List all training logs
|
2023-05-15 15:18:10 +00:00
|
|
|
logs = np.sort(
|
|
|
|
[
|
|
|
|
os.path.join("results", f)
|
|
|
|
for f in os.listdir("results")
|
|
|
|
if f.startswith("Log")
|
|
|
|
]
|
|
|
|
)
|
2020-03-31 19:42:35 +00:00
|
|
|
|
|
|
|
# Find the last log of asked dataset
|
|
|
|
for log in logs[::-1]:
|
|
|
|
log_config = Config()
|
|
|
|
log_config.load(log)
|
|
|
|
if log_config.dataset.startswith(test_dataset):
|
|
|
|
chosen_log = log
|
|
|
|
break
|
|
|
|
|
2023-05-15 15:18:10 +00:00
|
|
|
if chosen_log in ["last_ModelNet40", "last_ShapeNetPart", "last_S3DIS"]:
|
2020-03-31 19:42:35 +00:00
|
|
|
raise ValueError('No log of the dataset "' + test_dataset + '" found')
|
|
|
|
|
|
|
|
# Check if log exists
|
|
|
|
if not os.path.exists(chosen_log):
|
2023-05-15 15:18:10 +00:00
|
|
|
raise ValueError("The given log does not exists: " + chosen_log)
|
2020-03-31 19:42:35 +00:00
|
|
|
|
|
|
|
return chosen_log
|
|
|
|
|
|
|
|
|
|
|
|
# ----------------------------------------------------------------------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Main Call
|
|
|
|
# \***************/
|
|
|
|
#
|
|
|
|
|
2023-05-15 15:18:10 +00:00
|
|
|
if __name__ == "__main__":
|
2020-03-31 19:42:35 +00:00
|
|
|
###############################
|
|
|
|
# Choose the model to visualize
|
|
|
|
###############################
|
|
|
|
|
|
|
|
# Here you can choose which model you want to test with the variable test_model. Here are the possible values :
|
|
|
|
#
|
|
|
|
# > 'last_XXX': Automatically retrieve the last trained model on dataset XXX
|
2020-04-27 22:01:40 +00:00
|
|
|
# > 'results/Log_YYYY-MM-DD_HH-MM-SS': Directly provide the path of a trained model
|
2020-03-31 19:42:35 +00:00
|
|
|
|
2023-05-15 15:18:10 +00:00
|
|
|
chosen_log = "results/Log_2020-04-23_19-42-18"
|
2020-04-24 16:00:11 +00:00
|
|
|
|
2020-04-27 22:01:40 +00:00
|
|
|
# Choose the index of the checkpoint to load OR None if you want to load the current checkpoint
|
|
|
|
chkp_idx = None
|
2020-03-31 19:42:35 +00:00
|
|
|
|
2020-04-27 22:01:40 +00:00
|
|
|
# Eventually you can choose which feature is visualized (index of the deform convolution in the network)
|
2020-03-31 19:42:35 +00:00
|
|
|
deform_idx = 0
|
|
|
|
|
|
|
|
# Deal with 'last_XXX' choices
|
|
|
|
chosen_log = model_choice(chosen_log)
|
|
|
|
|
|
|
|
############################
|
|
|
|
# Initialize the environment
|
|
|
|
############################
|
|
|
|
|
|
|
|
# Set which gpu is going to be used
|
2023-05-15 15:18:10 +00:00
|
|
|
GPU_ID = "0"
|
2020-03-31 19:42:35 +00:00
|
|
|
|
|
|
|
# Set GPU visible device
|
2023-05-15 15:18:10 +00:00
|
|
|
os.environ["CUDA_VISIBLE_DEVICES"] = GPU_ID
|
2020-03-31 19:42:35 +00:00
|
|
|
|
|
|
|
###############
|
|
|
|
# Previous chkp
|
|
|
|
###############
|
|
|
|
|
|
|
|
# Find all checkpoints in the chosen training folder
|
2023-05-15 15:18:10 +00:00
|
|
|
chkp_path = os.path.join(chosen_log, "checkpoints")
|
|
|
|
chkps = [f for f in os.listdir(chkp_path) if f[:4] == "chkp"]
|
2020-03-31 19:42:35 +00:00
|
|
|
|
|
|
|
# Find which snapshot to restore
|
|
|
|
if chkp_idx is None:
|
2023-05-15 15:18:10 +00:00
|
|
|
chosen_chkp = "current_chkp.tar"
|
2020-03-31 19:42:35 +00:00
|
|
|
else:
|
|
|
|
chosen_chkp = np.sort(chkps)[chkp_idx]
|
2023-05-15 15:18:10 +00:00
|
|
|
chosen_chkp = os.path.join(chosen_log, "checkpoints", chosen_chkp)
|
2020-03-31 19:42:35 +00:00
|
|
|
|
|
|
|
# Initialize configuration class
|
|
|
|
config = Config()
|
|
|
|
config.load(chosen_log)
|
|
|
|
|
|
|
|
##################################
|
|
|
|
# Change model parameters for test
|
|
|
|
##################################
|
|
|
|
|
|
|
|
# Change parameters for the test here. For example, you can stop augmenting the input data.
|
|
|
|
|
2020-04-23 13:51:16 +00:00
|
|
|
config.augment_noise = 0.0001
|
|
|
|
config.batch_num = 1
|
|
|
|
config.in_radius = 2.0
|
|
|
|
config.input_threads = 0
|
2020-03-31 19:42:35 +00:00
|
|
|
|
|
|
|
##############
|
|
|
|
# Prepare Data
|
|
|
|
##############
|
|
|
|
|
|
|
|
print()
|
2023-05-15 15:18:10 +00:00
|
|
|
print("Data Preparation")
|
|
|
|
print("****************")
|
2020-03-31 19:42:35 +00:00
|
|
|
|
2020-04-23 13:51:16 +00:00
|
|
|
# Initiate dataset
|
2023-05-15 15:18:10 +00:00
|
|
|
if config.dataset.startswith("ModelNet40"):
|
2020-04-23 13:51:16 +00:00
|
|
|
test_dataset = ModelNet40Dataset(config, train=False)
|
|
|
|
test_sampler = ModelNet40Sampler(test_dataset)
|
|
|
|
collate_fn = ModelNet40Collate
|
2023-05-15 15:18:10 +00:00
|
|
|
elif config.dataset == "S3DIS":
|
|
|
|
test_dataset = S3DISDataset(config, set="validation", use_potentials=True)
|
2020-04-23 13:51:16 +00:00
|
|
|
test_sampler = S3DISSampler(test_dataset)
|
|
|
|
collate_fn = S3DISCollate
|
|
|
|
else:
|
2023-05-15 15:18:10 +00:00
|
|
|
raise ValueError("Unsupported dataset : " + config.dataset)
|
2020-03-31 19:42:35 +00:00
|
|
|
|
2020-04-23 13:51:16 +00:00
|
|
|
# Data loader
|
2023-05-15 15:18:10 +00:00
|
|
|
test_loader = DataLoader(
|
|
|
|
test_dataset,
|
|
|
|
batch_size=1,
|
|
|
|
sampler=test_sampler,
|
|
|
|
collate_fn=collate_fn,
|
|
|
|
num_workers=config.input_threads,
|
|
|
|
pin_memory=True,
|
|
|
|
)
|
2020-03-31 19:42:35 +00:00
|
|
|
|
|
|
|
# Calibrate samplers
|
2020-04-23 13:51:16 +00:00
|
|
|
test_sampler.calibration(test_loader, verbose=True)
|
2020-03-31 19:42:35 +00:00
|
|
|
|
2023-05-15 15:18:10 +00:00
|
|
|
print("\nModel Preparation")
|
|
|
|
print("*****************")
|
2020-03-31 19:42:35 +00:00
|
|
|
|
|
|
|
# Define network model
|
|
|
|
t1 = time.time()
|
2023-05-15 15:18:10 +00:00
|
|
|
if config.dataset_task == "classification":
|
2020-03-31 19:42:35 +00:00
|
|
|
net = KPCNN(config)
|
2023-05-15 15:18:10 +00:00
|
|
|
elif config.dataset_task in ["cloud_segmentation", "slam_segmentation"]:
|
2020-04-23 13:51:16 +00:00
|
|
|
net = KPFCNN(config, test_dataset.label_values, test_dataset.ignored_labels)
|
2020-03-31 19:42:35 +00:00
|
|
|
else:
|
2023-05-15 15:18:10 +00:00
|
|
|
raise ValueError(
|
|
|
|
"Unsupported dataset_task for deformation visu: " + config.dataset_task
|
|
|
|
)
|
2020-03-31 19:42:35 +00:00
|
|
|
|
|
|
|
# Define a visualizer class
|
|
|
|
visualizer = ModelVisualizer(net, config, chkp_path=chosen_chkp, on_gpu=False)
|
2023-05-15 15:18:10 +00:00
|
|
|
print("Done in {:.1f}s\n".format(time.time() - t1))
|
2020-03-31 19:42:35 +00:00
|
|
|
|
2023-05-15 15:18:10 +00:00
|
|
|
print("\nStart visualization")
|
|
|
|
print("*******************")
|
2020-03-31 19:42:35 +00:00
|
|
|
|
|
|
|
# Training
|
|
|
|
visualizer.show_deformable_kernels(net, test_loader, config, deform_idx)
|