relu at head softmax correction
This commit is contained in:
parent
e13cc3874d
commit
3d683b6bd6
|
@ -116,7 +116,7 @@ class KPCNN(nn.Module):
|
||||||
block_in_layer = 0
|
block_in_layer = 0
|
||||||
|
|
||||||
self.head_mlp = UnaryBlock(out_dim, 1024, False, 0)
|
self.head_mlp = UnaryBlock(out_dim, 1024, False, 0)
|
||||||
self.head_softmax = UnaryBlock(1024, config.num_classes, False, 0)
|
self.head_softmax = UnaryBlock(1024, config.num_classes, False, 0, no_relu=True)
|
||||||
|
|
||||||
################
|
################
|
||||||
# Network Losses
|
# Network Losses
|
||||||
|
@ -294,7 +294,7 @@ class KPFCNN(nn.Module):
|
||||||
out_dim = out_dim // 2
|
out_dim = out_dim // 2
|
||||||
|
|
||||||
self.head_mlp = UnaryBlock(out_dim, config.first_features_dim, False, 0)
|
self.head_mlp = UnaryBlock(out_dim, config.first_features_dim, False, 0)
|
||||||
self.head_softmax = UnaryBlock(config.first_features_dim, self.C, False, 0)
|
self.head_softmax = UnaryBlock(config.first_features_dim, self.C, False, 0, no_relu=True)
|
||||||
|
|
||||||
################
|
################
|
||||||
# Network Losses
|
# Network Losses
|
||||||
|
|
|
@ -707,7 +707,9 @@ def experiment_name_1():
|
||||||
logs = np.sort([join(res_path, l) for l in listdir(res_path) if start <= l <= end])
|
logs = np.sort([join(res_path, l) for l in listdir(res_path) if start <= l <= end])
|
||||||
|
|
||||||
# Give names to the logs (for plot legends)
|
# Give names to the logs (for plot legends)
|
||||||
logs_names = ['name_log_1']
|
logs_names = ['name_log_1',
|
||||||
|
'name_log_2',
|
||||||
|
'name_log_3']
|
||||||
|
|
||||||
# safe check log names
|
# safe check log names
|
||||||
logs_names = np.array(logs_names[:len(logs)])
|
logs_names = np.array(logs_names[:len(logs)])
|
||||||
|
|
Loading…
Reference in a new issue