# #mnist-bg-fix_l2_20190125_220558_unet_adam_lr1e-05_wd0.0_batch12 #bn = 'mnist-fg-fix_l2_20190125_215745_unet_adam_lr1e-05_wd0.0_batch15' #bn = 'mnist-fg-fix_l1smooth_20190127_000440_unet_adam_lr1e-05_wd0.0_batch12' #bn = 'mnist-fg-fix_l1_20190126_093344_unet_adam_lr1e-05_wd0.0_batch12' #bn = 'mnist-fg-fix_l1_20190126_093344_unet_adam_lr1e-05_wd0.0_batch12' #bn = 'mnist-bg-fix_l2_20190125_220558_unet_adam_lr1e-05_wd0.0_batch12' bn = 'mnist-fg-fix-v1_l2_20190128_165524_unet_adam_lr1e-05_wd0.0_batch48' #bn = 'mnist-fg-fix-v2_l2_20190128_163917_unet_adam_lr1e-05_wd0.0_batch48' model_path = log_dir_root_dflt / bn / 'checkpoint.pth.tar' model = UNet(n_channels=n_ch, n_classes=n_ch) state = torch.load(model_path, map_location='cpu') model.load_state_dict(state['state_dict']) model.eval() #%% # argkws = dict(output_size = 256, # epoch_size = 10, # bg_n_range = (5, 25), # int_range = (1., 1.), # max_rotation = 45, # is_v_flip = False) #argkws = dict(output_size = 256) gen = MNISTFashionFlow(is_separate=True, fg_n_range=(1, 5), **argkws) gen.test()
Created on Fri Aug 17 15:08:14 2018 @author: avelinojaver """ import sys from pathlib import Path dname = Path(__file__).resolve().parents[1] sys.path.append(str(dname)) from noise2noise.flow import CroppedFlow from noise2noise.models import UNet from torch import nn if __name__ == '__main__': from torch.utils.data import DataLoader import tqdm gen = CroppedFlow() loader = DataLoader(gen, batch_size=8) gen.train() tops = [] bots = [] mod = UNet(n_channels = 1, n_classes = 1) for X,Y in tqdm.tqdm(loader): Xhat = mod(X) break
from noise2noise.models import UNet from noise2noise.trainer import log_dir_root from read_movies.moviereader import MovieReader import numpy as np import torch import tqdm import math from scipy.ndimage.filters import median_filter import torch.nn.functional as F if __name__ == '__main__': model = UNet(n_channels=1, n_classes=1) #model_path = log_dir_root / 'l2_20180819_122435_unet_adam_lr0.0001_wd0.0_batch8' / 'checkpoint.pth.tar' model_path = log_dir_root / 'l1_20180819_122435_unet_adam_lr0.0001_wd0.0_batch8' / 'checkpoint.pth.tar' state = torch.load(model_path, map_location='cpu') model.load_state_dict(state['state_dict']) #movie_name = Path.home() / 'workspace/Vesicles/data/22_09_16/ves5/ramp100.22Sep2016_17.49.11.movie' #frame_number = 200 #movie_name = '/Users/avelinojaver/OneDrive - Nexus365/vesicle/data/script_ramp.08Dec2015_17.09.35.movie' movie_name = '/Users/avelinojaver/OneDrive - Nexus365/vesicle/data/script_ramp.08Dec2015_16.45.56.movie' frame_number = 1066 #tot-1#1761 reader = MovieReader(str(movie_name))
block[ii - xhat.shape[0] + 1:ii + 1] = xhat if __name__ == '__main__': cuda_id = 0 batch_size = 2 scale_log = (7, 11.1) #root_dir = Path('/Users/avelinojaver/OneDrive - Nexus365/microglia/hdf_movies/movies/2018.08.22_movies/180822_MicVid_20X_Dispense/180822_MicVid_20X_Dispense_J9-Media_J11-100uMATP_1/') save_dir = Path.home() / 'workspace/Vesicles/movies_cleaned/' root_dir = Path.home() / 'workspace/Vesicles/data/' model_path = log_dir_root / 'l1_20180819_122435_unet_adam_lr0.0001_wd0.0_batch8' / 'checkpoint.pth.tar' model = UNet(n_channels=1, n_classes=1) state = torch.load(str(model_path), map_location='cpu') model.load_state_dict(state['state_dict']) if torch.cuda.is_available(): print("THIS IS CUDA!!!!") dev_str = "cuda:" + str(cuda_id) else: dev_str = 'cpu' device = torch.device(dev_str) model = model.to(device) model.eval() fnames = list(root_dir.rglob('*.movie'))