diff --git a/compare_samples.py b/compare_samples.py index 9f186ab..e60ea1d 100644 --- a/compare_samples.py +++ b/compare_samples.py @@ -10,7 +10,9 @@ nominal = pv.read(VTKFILE_NOMINAL) # for each generated/sampled blade gen_files = Path("./output").glob("gen*.txt") -for gen_file in gen_files: +pc_files = Path("./output").glob("pc*.txt") +files = list(gen_files) + list(pc_files) +for gen_file in files: # load numpy txt blade = np.loadtxt(gen_file) diff --git a/test_generation.py b/test_generation.py index 8bf59cd..b018fb9 100644 --- a/test_generation.py +++ b/test_generation.py @@ -2,7 +2,6 @@ import argparse from pprint import pprint import datasets -import pyvista as pv import torch import torch.nn as nn import torch.utils.data @@ -502,8 +501,6 @@ def generate(model, opt): test_dataloader = torch.utils.data.DataLoader( test_dataset, batch_size=opt.batch_size, shuffle=False, num_workers=int(opt.workers), drop_last=False ) - VTKFILE_NOMINAL = Path("~/data/stage-laurent-f/datasets/Rotor37/processed/nominal_blade_rotated.vtk") - nominal = pv.read(VTKFILE_NOMINAL) with torch.no_grad(): samples = [] @@ -513,7 +510,8 @@ def generate(model, opt): x = data["positions"].transpose(1, 2) # m, s = data["mean"].float(), data["std"].float() - gen = model.gen_samples(x.shape, "cuda", clip_denoised=False).detach().cpu() + shape = torch.Size((*x.shape[:-1], 35000)) + gen = model.gen_samples(shape, "cuda", clip_denoised=False).detach().cpu() gen = gen.transpose(1, 2).contiguous() x = x.transpose(1, 2).contiguous() @@ -528,7 +526,6 @@ def generate(model, opt): pc = blade # unnormalize - pc = pc + nominal.points pc = pc * STD + MEAN print(f"Saving point cloud {idx}...") diff --git a/train_generation.py b/train_generation.py index 523361e..6d7587f 100644 --- a/train_generation.py +++ b/train_generation.py @@ -9,8 +9,6 @@ import torch.optim as optim import torch.utils.data from torch.distributions import Normal -import pyvista as pv - # from dataset.shapenet_data_pc import ShapeNet15kPointClouds from model.pvcnn_generation import PVCNN2Base from utils.file_utils import * @@ -652,8 +650,6 @@ def train(gpu, opt, output_dir): """ data """ train_dataset, _ = get_dataset(opt.dataroot, opt.npoints, opt.category) dataloader, _, train_sampler, _ = get_dataloader(opt, train_dataset, None) - VTKFILE_NOMINAL = Path("~/data/stage-laurent-f/datasets/Rotor37/processed/nominal_blade_rotated.vtk") - nominal = pv.read(VTKFILE_NOMINAL) """ create networks @@ -712,7 +708,6 @@ def train(gpu, opt, output_dir): lr_scheduler.step(epoch) for i, data in enumerate(dataloader): - x = data["positions"] - nominal.points x = data["positions"].transpose(1, 2) noises_batch = torch.randn_like(x)