def get_model(args): model = eval(args.arch).model(num_classes=args.num_classes) model = torch.nn.DataParallel(model, range(args.num_gpu)) model.cuda() if args.resume == 'True': restore(args, model, None) return model
def get_model(args): model = eval(args.arch).model(num_classes=args.num_classes, args=args) model = torch.nn.DataParallel(model, args.gpus) model.cuda() if args.resume == 'True': restore(args, model, None, istrain=False) return model
def get_model(args): # amp.register_float_function(torch, 'sigmoid') model = eval(args.arch).model(pretrained=True, num_classes=args.num_classes, args=args) model.to(args.device) lr = args.lr added_layers = ['cls', 'fpn', 'fc', 'classifier' ] if args.diff_lr == 'True' else [] weight_list = [] bias_list = [] added_weight_list = [] added_bias_list = [] print('\n following parameters will be assigned 10x learning rate:') for name, value in model.named_parameters(): if any([x in name for x in added_layers]): print(name) if 'weight' in name: added_weight_list.append(value) elif 'bias' in name: added_bias_list.append(value) else: if 'weight' in name: weight_list.append(value) elif 'bias' in name: bias_list.append(value) optimizer = optim.SGD([{ 'params': weight_list, 'lr': lr }, { 'params': bias_list, 'lr': lr * 2 }, { 'params': added_weight_list, 'lr': lr * 10 }, { 'params': added_bias_list, 'lr': lr * 20 }], momentum=0.9, weight_decay=0.0005, nesterov=True) # if args.mixp: # model, optimizer = amp.initialize(model, optimizer, opt_level='O1') model = torch.nn.DataParallel(model, args.gpus) if args.resume == 'True': restore(args, model, optimizer, including_opt=False) return model, optimizer
def get_model(args): model = eval(args.arch).model(pretrained=True, num_classes=args.num_classes, args=args) model.cuda() model = torch.nn.DataParallel(model, range(args.num_gpu)) lr = args.lr added_layers = ['fc', 'cls'] if args.diff_lr == 'True' else [] weight_list = [] bias_list = [] added_weight_list = [] added_bias_list = [] print('\n following parameters will be assigned 10x learning rate:') for name, value in model.named_parameters(): if any([x in name for x in added_layers]): print name if 'weight' in name: added_weight_list.append(value) elif 'bias' in name: added_bias_list.append(value) else: if 'weight' in name: weight_list.append(value) elif 'bias' in name: bias_list.append(value) optimizer = optim.SGD([{ 'params': weight_list, 'lr': lr }, { 'params': bias_list, 'lr': lr * 2 }, { 'params': added_weight_list, 'lr': lr * 10 }, { 'params': added_bias_list, 'lr': lr * 20 }], momentum=0.9, weight_decay=0.0005, nesterov=True) if args.resume == 'True': restore(args, model, optimizer, including_opt=False) return model, optimizer
net = network.Network(input_image) res = net.build_networks() init_op = tf.group(tf.global_variables_initializer(), tf.local_variables_initializer()) # if delete this code, it will go wrong.I have no idea. saver = tf.train.Saver() with tf.Session() as sess: sess.run(init_op) restore(sess) for im_name in im_names: anno=cv2.imread(FLAGS.eval_dir+"Segmentation/"+im_name[:-1]+".png",cv2.IMREAD_GRAYSCALE) print ('-----------------------------------------') print("Evaluate pic name:%s"%(im_name[:-1]+".png")) test_imgs = [] test_imgs.append(preprocess_testimg(FLAGS.eval_dir+"JPEGImages/"+im_name[:-1]+".jpg")) test_res = sess.run(res, feed_dict={input_image: test_imgs}) test_res = np.argmax(test_res, axis=3) res_pic=test_res[0] #make the picture same size as original height_pads=res_pic.shape[0]-anno.shape[0] width_pads=res_pic.shape[1]-anno.shape[1] width_pads_left,width_pads_right,height_pads_top,height_pads_bottom=calculate_pads(width_pads,height_pads) for i in range(width_pads_left): res_pic=np.delete(res_pic,0,axis=1)