"../exp" -> "./exp"

This commit is contained in:
Laurent FAINSIN 2023-04-07 13:33:06 +02:00
parent 7225eabb15
commit f79b9c697b
7 changed files with 18 additions and 19 deletions

View file

@ -1,4 +1,4 @@
bash_name: ../exp/tmp/2022_0407_0300_45.sh
bash_name: ./exp/tmp/2022_0407_0300_45.sh
clipforge:
clip_model: ViT-B/32
enable: 0
@ -105,13 +105,13 @@ latent_pts:
weight_kl_feat: 1.0
weight_kl_glb: 1.0
weight_kl_pt: 1.0
log_dir: ../exp/0407/airplane/cb8fb3h_train_l2e-4GlobalP2048_vae_adainB20l1E3W8
log_name: ../exp/0407/airplane/cb8fb3h_train_l2e-4GlobalP2048_vae_adainB20l1E3W8
log_dir: ./exp/0407/airplane/cb8fb3h_train_l2e-4GlobalP2048_vae_adainB20l1E3W8
log_name: ./exp/0407/airplane/cb8fb3h_train_l2e-4GlobalP2048_vae_adainB20l1E3W8
model_config: default
ngpu: 8
num_ref: 0
num_val_samples: 24
save_dir: ../exp/0407/airplane/cb8fb3h_train_l2e-4GlobalP2048_vae_adainB20l1E3W8
save_dir: ./exp/0407/airplane/cb8fb3h_train_l2e-4GlobalP2048_vae_adainB20l1E3W8
sde:
attn_mhead: 0
attn_mhead_local: -1

View file

@ -1,4 +1,4 @@
bash_name: ../exp/tmp/2022_0407_1347_21.sh
bash_name: ./exp/tmp/2022_0407_1347_21.sh
clipforge:
clip_model: ViT-B/32
enable: 0
@ -105,13 +105,13 @@ latent_pts:
weight_kl_feat: 1.0
weight_kl_glb: 1.0
weight_kl_pt: 1.0
log_dir: ../exp/0407/car/fbb941h_train_l2e-4GlobalP2048D03_vae_adainB20l1E3W8
log_name: ../exp/0407/car/fbb941h_train_l2e-4GlobalP2048D03_vae_adainB20l1E3W8
log_dir: ./exp/0407/car/fbb941h_train_l2e-4GlobalP2048D03_vae_adainB20l1E3W8
log_name: ./exp/0407/car/fbb941h_train_l2e-4GlobalP2048D03_vae_adainB20l1E3W8
model_config: default
ngpu: 8
num_ref: 0
num_val_samples: 24
save_dir: ../exp/0407/car/fbb941h_train_l2e-4GlobalP2048D03_vae_adainB20l1E3W8
save_dir: ./exp/0407/car/fbb941h_train_l2e-4GlobalP2048D03_vae_adainB20l1E3W8
sde:
attn_mhead: 0
attn_mhead_local: -1
@ -195,7 +195,7 @@ sde:
update_q_ema: false
use_adam: true
use_adamax: false
vae_checkpoint: ../exp/0326/car/f91abeh_hvae_kl0.5N32H1Anneall1_sumWlrInitScale_vae_adainB32l1E3W4/checkpoints/epoch_7999_iters_151999.pt
vae_checkpoint: ./exp/0326/car/f91abeh_hvae_kl0.5N32H1Anneall1_sumWlrInitScale_vae_adainB32l1E3W4/checkpoints/epoch_7999_iters_151999.pt
warmup_epochs: 20
weight_decay: 0.0003
weight_decay_norm_dae: 0.0

View file

@ -1,4 +1,4 @@
bash_name: ../exp/tmp/2022_0416_1418_42.sh
bash_name: ./exp/tmp/2022_0416_1418_42.sh
clipforge:
clip_model: ViT-B/32
enable: 0
@ -105,13 +105,13 @@ latent_pts:
weight_kl_feat: 1.0
weight_kl_glb: 1.0
weight_kl_pt: 1.0
log_dir: ../exp/0416/chair/afc967h_train_l2e-4GlobalP2048D04_vae_adainB20l1E3W8
log_name: ../exp/0416/chair/afc967h_train_l2e-4GlobalP2048D04_vae_adainB20l1E3W8
log_dir: ./exp/0416/chair/afc967h_train_l2e-4GlobalP2048D04_vae_adainB20l1E3W8
log_name: ./exp/0416/chair/afc967h_train_l2e-4GlobalP2048D04_vae_adainB20l1E3W8
model_config: default
ngpu: 8
num_ref: 0
num_val_samples: 24
save_dir: ../exp/0416/chair/afc967h_train_l2e-4GlobalP2048D04_vae_adainB20l1E3W8
save_dir: ./exp/0416/chair/afc967h_train_l2e-4GlobalP2048D04_vae_adainB20l1E3W8
sde:
attn_mhead: 0
attn_mhead_local: -1

View file

@ -160,7 +160,7 @@ class Prior(nn.Module):
# self.mixing_logit = torch.nn.Parameter(init, requires_grad=False) # not update
# self.is_active = None
# elif not args.learn_mixing_logit: # not learn, loaded from c04cd1h exp
# init = torch.load('../exp/1110/chair/c04cd1h_hvae3s_390f8dhInitSepesTrainvae0_hvaeB72l1E4W1/mlogit.pt')
# init = torch.load('./exp/1110/chair/c04cd1h_hvae3s_390f8dhInitSepesTrainvae0_hvaeB72l1E4W1/mlogit.pt')
# self.mixing_logit = torch.nn.Parameter(init, requires_grad=False)
# self.is_active = None
# else:

View file

@ -105,7 +105,7 @@ def main(args, config):
def get_args():
parser = argparse.ArgumentParser('encoder decoder examiner')
# experimental results
parser.add_argument('--exp_root', type=str, default='../exp',
parser.add_argument('--exp_root', type=str, default='./exp',
help='location of the results')
# parser.add_argument('--save', type=str, default='exp',
# help='id used for storing intermediate results')
@ -176,7 +176,7 @@ def get_args():
config.merge_from_list(args.opt)
# Create log_name
EXP_ROOT = args.exp_root # os.environ.get('EXP_ROOT', '../exp/')
EXP_ROOT = args.exp_root # os.environ.get('EXP_ROOT', './exp/')
if config.exp_name == '' or config.exp_name == 'none':
config.hash = io_helper.hash_str('%s' % config) + 'h'
cfg_file_name = exp_helper.get_expname(config)

View file

@ -8,7 +8,6 @@
import os
import json
from comet_ml import Experiment, OfflineExperiment
## import open3d as o3d
import time
import numpy as np
import torch
@ -324,7 +323,7 @@ def compute_score(output_name, ref_name, batch_size_test=256, device_str='cuda',
gen_pcs.cpu().numpy(), ref_pcs.cpu().numpy())
results['jsd'] = jsd
msg = print_results(results, **print_kwargs)
# with open('../exp/eval_out.txt', 'a') as f:
# with open('./exp/eval_out.txt', 'a') as f:
# run_time = time.strftime('%m%d-%H%M-%S')
# f.write('<< date: %s >>\n' % run_time)
# f.write('%s\n%s\n' % (exp.url, msg))

View file

@ -491,7 +491,7 @@ def common_init(rank, seed, save_dir, comet_key=''):
if os.path.exists('.wandb_api'):
wb_args = json.load(open('.wandb_api', 'r'))
wb_dir = '../exp/wandb/' if not os.path.exists(
wb_dir = './exp/wandb/' if not os.path.exists(
'/workspace/result') else '/workspace/result/wandb/'
if not os.path.exists(wb_dir):
os.makedirs(wb_dir)