.
This commit is contained in:
parent
d1bb1ca36e
commit
aee5494f32
|
@ -4,15 +4,14 @@
|
|||
|
||||
### Models
|
||||
|
||||
We provide pretrained weights for S3DIS dataset. The raw weights come with a parameter file describing the architecture and network hyperparameters. THe code can thus load the network automatically.
|
||||
|
||||
The instructions to run these models are in the S3DIS documentation, section [Test the trained model](./doc/scene_segmentation_guide.md#test-the-trained-model).
|
||||
We provide pretrained weights for S3DIS dataset. The raw weights come with a parameter file describing the architecture and network hyperparameters. The code can thus load the network automatically.
|
||||
|
||||
|
||||
| Name (link) | KPConv Type | Description | Score |
|
||||
|:-------------|:-------------:|:-----|:-----:|
|
||||
| [Light_KPFCNN](https://drive.google.com/file/d/14sz0hdObzsf_exxInXdOIbnUTe0foOOz/view?usp=sharing) | rigid | A network with small `in_radius` for light GPU consumption (~8GB) | 65.4% |
|
||||
| [Heavy_KPFCNN](https://drive.google.com/file/d/1ySQq3SRBgk2Vt5Bvj-0N7jDPi0QTPZiZ/view?usp=sharing) | rigid | A network with better performances but needing bigger GPU (>18GB). | 66.4% |
|
||||
| [Deform_KPFCNN](https://drive.google.com/file/d/1ObGr2Srfj0f7Bd3bBbuQzxtjf0ULbpSA/view?usp=sharing) | rigid | Deformable convolution network needing big GPU (>20GB). | 67.3% |
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -47,6 +47,22 @@ from datasets.SemanticKitti import SemanticKittiDataset
|
|||
# \***********************/
|
||||
#
|
||||
|
||||
def listdir_str(path):
|
||||
|
||||
# listdir can return binary string instead od decoded string sometimes.
|
||||
# This function ensures a steady behavior
|
||||
|
||||
f_list = []
|
||||
for f in listdir(path):
|
||||
try:
|
||||
f = f.decode()
|
||||
except (UnicodeDecodeError, AttributeError):
|
||||
pass
|
||||
f_list.append(f)
|
||||
|
||||
return f_list
|
||||
|
||||
|
||||
|
||||
def running_mean(signal, n, axis=0, stride=1):
|
||||
signal = np.array(signal)
|
||||
|
@ -148,7 +164,7 @@ def load_single_IoU(filename, n_parts):
|
|||
|
||||
def load_snap_clouds(path, dataset, only_last=False):
|
||||
|
||||
cloud_folders = np.array([join(path, f) for f in listdir(path) if f.startswith('val_preds')])
|
||||
cloud_folders = np.array([join(path, f) for f in listdir_str(path) if f.startswith('val_preds')])
|
||||
cloud_epochs = np.array([int(f.split('_')[-1]) for f in cloud_folders])
|
||||
epoch_order = np.argsort(cloud_epochs)
|
||||
cloud_epochs = cloud_epochs[epoch_order]
|
||||
|
@ -165,7 +181,7 @@ def load_snap_clouds(path, dataset, only_last=False):
|
|||
Confs[c_i] += np.loadtxt(conf_file, dtype=np.int32)
|
||||
|
||||
else:
|
||||
for f in listdir(cloud_folder):
|
||||
for f in listdir_str(cloud_folder):
|
||||
if f.endswith('.ply') and not f.endswith('sub.ply'):
|
||||
data = read_ply(join(cloud_folder, f))
|
||||
labels = data['class']
|
||||
|
@ -176,7 +192,7 @@ def load_snap_clouds(path, dataset, only_last=False):
|
|||
|
||||
# Erase ply to save disk memory
|
||||
if c_i < len(cloud_folders) - 1:
|
||||
for f in listdir(cloud_folder):
|
||||
for f in listdir_str(cloud_folder):
|
||||
if f.endswith('.ply'):
|
||||
remove(join(cloud_folder, f))
|
||||
|
||||
|
@ -221,7 +237,7 @@ def compare_trainings(list_of_paths, list_of_labels=None):
|
|||
|
||||
print(path)
|
||||
|
||||
if ('val_IoUs.txt' in [f for f in listdir(path)]) or ('val_confs.txt' in [f for f in listdir(path)]):
|
||||
if ('val_IoUs.txt' in [f for f in listdir_str(path)]) or ('val_confs.txt' in [f for f in listdir_str(path)]):
|
||||
config = Config()
|
||||
config.load(path)
|
||||
else:
|
||||
|
@ -704,7 +720,7 @@ def experiment_name_1():
|
|||
res_path = 'results'
|
||||
|
||||
# Gather logs and sort by date
|
||||
logs = np.sort([join(res_path, l) for l in listdir(res_path) if start <= l <= end])
|
||||
logs = np.sort([join(res_path, l) for l in listdir_str(res_path) if start <= l <= end])
|
||||
|
||||
# Give names to the logs (for plot legends)
|
||||
logs_names = ['name_log_1',
|
||||
|
@ -733,7 +749,7 @@ def experiment_name_2():
|
|||
res_path = 'results'
|
||||
|
||||
# Gather logs and sort by date
|
||||
logs = np.sort([join(res_path, l) for l in listdir(res_path) if start <= l <= end])
|
||||
logs = np.sort([join(res_path, l) for l in listdir_str(res_path) if start <= l <= end])
|
||||
|
||||
# Optionally add a specific log at a specific place in the log list
|
||||
logs = logs.astype('<U50')
|
||||
|
|
|
@ -66,30 +66,6 @@ class S3DISConfig(Config):
|
|||
#########################
|
||||
|
||||
# # Define layers
|
||||
# architecture = ['simple',
|
||||
# 'resnetb',
|
||||
# 'resnetb_strided',
|
||||
# 'resnetb',
|
||||
# 'resnetb',
|
||||
# 'resnetb_strided',
|
||||
# 'resnetb_deformable',
|
||||
# 'resnetb_deformable',
|
||||
# 'resnetb_deformable_strided',
|
||||
# 'resnetb_deformable',
|
||||
# 'resnetb_deformable',
|
||||
# 'resnetb_deformable_strided',
|
||||
# 'resnetb_deformable',
|
||||
# 'resnetb_deformable',
|
||||
# 'nearest_upsample',
|
||||
# 'unary',
|
||||
# 'nearest_upsample',
|
||||
# 'unary',
|
||||
# 'nearest_upsample',
|
||||
# 'unary',
|
||||
# 'nearest_upsample',
|
||||
# 'unary']
|
||||
|
||||
# Define layers
|
||||
architecture = ['simple',
|
||||
'resnetb',
|
||||
'resnetb_strided',
|
||||
|
@ -99,11 +75,11 @@ class S3DISConfig(Config):
|
|||
'resnetb',
|
||||
'resnetb',
|
||||
'resnetb_strided',
|
||||
'resnetb',
|
||||
'resnetb',
|
||||
'resnetb_strided',
|
||||
'resnetb',
|
||||
'resnetb',
|
||||
'resnetb_deformable',
|
||||
'resnetb_deformable',
|
||||
'resnetb_deformable_strided',
|
||||
'resnetb_deformable',
|
||||
'resnetb_deformable',
|
||||
'nearest_upsample',
|
||||
'unary',
|
||||
'nearest_upsample',
|
||||
|
@ -113,6 +89,30 @@ class S3DISConfig(Config):
|
|||
'nearest_upsample',
|
||||
'unary']
|
||||
|
||||
# Define layers
|
||||
# architecture = ['simple',
|
||||
# 'resnetb',
|
||||
# 'resnetb_strided',
|
||||
# 'resnetb',
|
||||
# 'resnetb',
|
||||
# 'resnetb_strided',
|
||||
# 'resnetb',
|
||||
# 'resnetb',
|
||||
# 'resnetb_strided',
|
||||
# 'resnetb',
|
||||
# 'resnetb',
|
||||
# 'resnetb_strided',
|
||||
# 'resnetb',
|
||||
# 'resnetb',
|
||||
# 'nearest_upsample',
|
||||
# 'unary',
|
||||
# 'nearest_upsample',
|
||||
# 'unary',
|
||||
# 'nearest_upsample',
|
||||
# 'unary',
|
||||
# 'nearest_upsample',
|
||||
# 'unary']
|
||||
|
||||
###################
|
||||
# KPConv parameters
|
||||
###################
|
||||
|
@ -121,7 +121,7 @@ class S3DISConfig(Config):
|
|||
num_kernel_points = 15
|
||||
|
||||
# Radius of the input sphere (decrease value to reduce memory cost)
|
||||
in_radius = 1.8
|
||||
in_radius = 1.2
|
||||
|
||||
# Size of the first subsampling grid in meter (increase value to reduce memory cost)
|
||||
first_subsampling_dl = 0.03
|
||||
|
@ -130,7 +130,7 @@ class S3DISConfig(Config):
|
|||
conv_radius = 2.5
|
||||
|
||||
# Radius of deformable convolution in "number grid cell". Larger so that deformed kernel can spread out
|
||||
deform_radius = 6.0
|
||||
deform_radius = 5.0
|
||||
|
||||
# Radius of the area of influence of each kernel point in "number grid cell". (1.0 is the standard value)
|
||||
KP_extent = 1.2
|
||||
|
|
Loading…
Reference in a new issue