def infer(self): self.preprocess_data() print('IT SHOULD LOAD THE ARGS FROM THE CKPT') try: args = np.load(os.path.join(self.model_path, 'args.npy')).item() except: args = config.parser() print(self.ckpt_file) args['model_ckpt'] = self.ckpt_file self.pred = infer.main(args, self.X_test)
def main(): opt = parser() test_dataset = SieveDataset(opt) # create dataloader test_loader = SieveDataLoader(opt, test_dataset) if opt.name == 'GMM': model = GMM(opt) # visualization if not os.path.exists( os.path.join(opt.tensorboard_dir, opt.name, opt.datamode)): os.makedirs( os.path.join(opt.tensorboard_dir, opt.name, opt.datamode)) board = SummaryWriter( log_dir=os.path.join(opt.tensorboard_dir, opt.name, opt.datamode)) checkpoint_path = osp.join(opt.checkpoint_dir, opt.name, 'gmm_final.pth') load_checkpoint(model, checkpoint_path) test_gmm(opt, test_loader, model, board) elif opt.name == 'TOM': model = UnetGenerator(26, 4, ngf=64) # visualization if not os.path.exists( os.path.join(opt.tensorboard_dir, opt.name, opt.datamode)): os.makedirs( os.path.join(opt.tensorboard_dir, opt.name, opt.datamode)) board = SummaryWriter( log_dir=os.path.join(opt.tensorboard_dir, opt.name, opt.datamode)) checkpoint_path = osp.join(opt.checkpoint_dir, opt.name, 'tom_final.pth') load_checkpoint(model, checkpoint_path) test_tom(opt, test_loader, model, board)
def main(): opt = parser() im_path = opt.input_image_path #person image path cloth_path = opt.cloth_image_path #cloth image path pose_path = opt.input_image_path.replace('.jpg', '_keypoints.json') #pose keypoint path generate_pose_keypoints(im_path) #generating pose keypoints segm_path = opt.human_parsing_image_path #segemented mask path img_name = im_path.split('/')[-1].split('.')[0] + '_' agnostic, c, im_ttp = generate_data(opt, im_path, cloth_path, pose_path, segm_path) agnostic = agnostic.to(device) c = c.to(device) im_ttp = im_ttp.to(device) gmm = GMM(opt) load_checkpoint(gmm, os.path.join(opt.checkpoint_dir, 'GMM', 'gmm_final.pth')) gmm.to(device) gmm.eval() unet_mask = UnetGenerator(25, 20, ngf=64) load_checkpoint(unet_mask, os.path.join(opt.checkpoint_dir, 'SEG', 'segm_final.pth')) unet_mask.to(device) unet_mask.eval() tom = UnetGenerator(26, 4, ngf=64) load_checkpoint(tom, os.path.join(opt.checkpoint_dir, 'TOM', 'tom_final.pth')) tom.to(device) tom.eval() with torch.no_grad(): output_segm = unet_mask(torch.cat([agnostic, c], 1)) grid_zero, theta, grid_one, delta_theta = gmm(agnostic, c) c_warp = F.grid_sample(c, grid_one, padding_mode='border') output_segm = F.log_softmax(output_segm, dim=1) output_argm = torch.max(output_segm, dim=1, keepdim=True)[1] final_segm = torch.zeros(output_segm.shape).to(device).scatter(1, output_argm, 1.0) input_tom = torch.cat([final_segm, c_warp, im_ttp], 1) with torch.no_grad(): output_tom = tom(input_tom) person_r = torch.tanh(output_tom[:,:3,:,:]) mask_c = torch.sigmoid(output_tom[:,3:,:,:]) mask_c = (mask_c >= 0.5).type(torch.float) img_tryon = mask_c * c_warp + (1 - mask_c) * person_r print('Output generated!') c_warp = c_warp*0.5+0.5 output_argm = output_argm.type(torch.float) person_r = person_r*0.5+0.5 img_tryon = img_tryon*0.5+0.5 tensortoimage(c_warp[0].cpu(), osp.join(opt.save_dir, img_name+'w_cloth.png')) tensortoimage(output_argm[0][0].cpu(), osp.join(opt.save_dir, img_name+'seg_mask.png')) tensortoimage(mask_c[0].cpu(), osp.join(opt.save_dir, img_name+'c_mask.png')) tensortoimage(person_r[0].cpu(), osp.join(opt.save_dir, img_name+'ren_person.png')) tensortoimage(img_tryon[0].cpu(), osp.join(opt.save_dir, img_name+'final_output.png')) print('Output saved at {}'.format(opt.save_dir))
def load_from_mat3d(dataset_path): import h5py if dataset_path != '' : # img = np.array(h5py.File(dataset_path)['img']) img = sio.loadmat(dataset_path)['img'] sz = img.shape if img.ndim == 3: img = img.reshape(1, 1, sz[0], sz[1], sz[2]) if img.ndim == 4: img = img.reshape(1, sz[0],sz[1],sz[2],sz[3]) sz = img.shape print('(%d,%d,%d,%d,%d) tensor loaded.' % (sz[0],sz[1],sz[2],sz[3],sz[4])) # Normalize img by its RMS img = img.reshape(sz[0], np.prod(sz[1:])) img /= np.sqrt(np.mean(np.square(img),axis=1,keepdims=True)) img = img.reshape(sz) * .8 # img /= np.sqrt(np.mean(np.square(img))) # img /= np.linalg.norm(img, ord='fro', axis=(1,2),keepdims=True) # img = img.reshape(sz[0], np.prod(sz[1:])) # img /= np.linalg.norm(img, axis=1,keepdims=True) # img = img.reshape(sz) return img else: return None ''' if __name__ == '__main__': a = config.parser() main(a)
def main(args): """ Main function to read the graph list, extract features, learn the embedding and save it. :param args: Object with the arguments. """ graphs = glob.glob(args.input_path + "*.json") # print("\nFeature extraction started.\n") document_collections = Parallel(n_jobs=args.workers)( delayed(feature_extractor)(g, args.wl_iterations) for g in tqdm(graphs)) # print("\nOptimization started.\n") model = Doc2Vec(document_collections, vector_size=args.dimensions, window=0, min_count=args.min_count, dm=0, sample=args.down_sampling, workers=args.workers, epochs=args.epochs, alpha=args.learning_rate) save_embedding(args.output_path, model, graphs, args.dimensions) if __name__ == "__main__": args = parser() main(args) print("ok")
#dict(curve=curve, GT=GT_points, pred_mask_rle=annotation["pred_mask_rle"], img=img) # time3 = datetime.datetime.now() # print("curve shape:", curve.shape) # print("GT points shape:", GT_points.shape) return curve, GT_points def test_time(self, num=100): import random import pandas random.shuffle(self.ann) time_data = [] for i in range(num): curve, GT_points = self.__getitem__(i) # curve, GT_points, time_dict = self.__getitem__(i) # time_data.append(time_dict) # time_data = pandas.DataFrame(time_data) # print(time_data.describe()) def show_invalid(self): print("invalid index size:", len(self.invalid_ind)) print( self.invalid_ind) if __name__=="__main__": config = parser() dataset = AnnPolygon(config, train=False) dataset.test_time()
def main(): opt = parser() train_dataset = SieveDataset(opt) # create dataloader train_loader = SieveDataLoader(opt, train_dataset) # create model & train & save the final checkpoint if opt.name == 'GMM': model = GMM(opt) # visualization if not os.path.exists(os.path.join(opt.tensorboard_dir, opt.name)): os.makedirs(os.path.join(opt.tensorboard_dir, opt.name)) board = SummaryWriter(log_dir = os.path.join(opt.tensorboard_dir, opt.name)) if not os.path.exists(os.path.join(opt.checkpoint_dir, opt.name)): os.makedirs(os.path.join(opt.checkpoint_dir, opt.name)) if not opt.checkpoint =='' and os.path.exists(opt.checkpoint): load_checkpoint(model, opt.checkpoint) train_gmm(opt, train_loader, model, board) save_checkpoint(model, os.path.join(opt.checkpoint_dir, opt.name, 'gmm_final.pth')) elif opt.name == 'SEG': #input channel = agnostic(22) + cloth(3) #output channel = segemantion output(20) model = UnetGenerator(25, 20, ngf=64) # visualization if not os.path.exists(os.path.join(opt.tensorboard_dir, opt.name)): os.makedirs(os.path.join(opt.tensorboard_dir, opt.name)) board = SummaryWriter(log_dir = os.path.join(opt.tensorboard_dir, opt.name)) #for checkpoint saving if not os.path.exists(os.path.join(opt.checkpoint_dir, opt.name)): os.makedirs(os.path.join(opt.checkpoint_dir, opt.name)) if not opt.checkpoint =='' and os.path.exists(opt.checkpoint): load_checkpoint(model, opt.checkpoint) print('Checkpoints loaded!') train_segm(opt, train_loader, model, board) save_checkpoint(model, os.path.join(opt.checkpoint_dir, opt.name, 'segm_final.pth')) elif opt.name == 'TOM': #input channel = generated seg mask(20) + texture translation prior(3) + warped cloth(3) #output channel = cloth mask(1) + rendered person(3) model = UnetGenerator(26, 4, ngf=64) #for Duelling Triplet Loss Strategy model_triloss = UnetGenerator(26, 4, ngf=64) # visualization if not os.path.exists(os.path.join(opt.tensorboard_dir, opt.name)): os.makedirs(os.path.join(opt.tensorboard_dir, opt.name)) board = SummaryWriter(log_dir = os.path.join(opt.tensorboard_dir, opt.name)) if not os.path.exists(os.path.join(opt.checkpoint_dir, opt.name)): os.makedirs(os.path.join(opt.checkpoint_dir, opt.name)) if not opt.checkpoint =='' and os.path.exists(opt.checkpoint): load_checkpoint(model, opt.checkpoint) train_tom(opt, train_loader, model, model_triloss, board) save_checkpoint(model, os.path.join(opt.checkpoint_dir, opt.name, 'tom_final.pth'))
from ascavis_data import alcdef, sha from ascavis_data.mpc import MpcSqlConnection, mpc_db_query import httplib2 import simplejson as json import glob import os from flask import Flask, Response, send_from_directory, request, redirect import config # Parse configuration cfg_parser = config.parser() cfg_parser.read( ["server.cfg", os.path.expanduser("~/.config/ascavis/server.cfg")]) options = dict(cfg_parser.items("ascavis_server")) ASSET_DIR = options["asset_dir"] ALCDEF_DIR = options["alcdef_dir"] API_MIME = options["api_mime"] # Setup server app = Flask(__name__) # Setup SHA client spitzer = sha.SpitzerHeritageArchive(httplib2.Http(".cache")) @app.route("/") def root(): """Base route""" return redirect('/app/index.html', 302)
# ANU Library room booker # Paul Apelt, u5568225 import datetime import config import network parser = config.parser() anulib = network.anulib() print(parser.timetable('timetable.conf')) login = parser.logins('login.conf')[0] print(anulib.login(login)) print(anulib.init(datetime.datetime.now(),'Hancock')) print(anulib.logout())