relu at head softmax correction

This commit is contained in:
HuguesTHOMAS 2021-07-30 20:46:01 +00:00
parent e13cc3874d
commit 3d683b6bd6
2 changed files with 5 additions and 3 deletions

View file

@ -116,7 +116,7 @@ class KPCNN(nn.Module):
block_in_layer = 0
self.head_mlp = UnaryBlock(out_dim, 1024, False, 0)
self.head_softmax = UnaryBlock(1024, config.num_classes, False, 0)
self.head_softmax = UnaryBlock(1024, config.num_classes, False, 0, no_relu=True)
################
# Network Losses
@ -294,7 +294,7 @@ class KPFCNN(nn.Module):
out_dim = out_dim // 2
self.head_mlp = UnaryBlock(out_dim, config.first_features_dim, False, 0)
self.head_softmax = UnaryBlock(config.first_features_dim, self.C, False, 0)
self.head_softmax = UnaryBlock(config.first_features_dim, self.C, False, 0, no_relu=True)
################
# Network Losses

View file

@ -707,7 +707,9 @@ def experiment_name_1():
logs = np.sort([join(res_path, l) for l in listdir(res_path) if start <= l <= end])
# Give names to the logs (for plot legends)
logs_names = ['name_log_1']
logs_names = ['name_log_1',
'name_log_2',
'name_log_3']
# safe check log names
logs_names = np.array(logs_names[:len(logs)])