def main(exp_id=str(uuid4())): state = dict(iter=0, start_time=time.time()) args = config(exp_id=exp_id) # try to make the store dir (if it doesn't exist) exp_dir = os.path.join(args.storedir, exp_id) try: os.makedirs(exp_dir) except OSError as e: print("Directory exists ({e.message})") writer = SummaryWriter(exp_dir) logger = Logger(writer) (train_loader, test_loader), device = load(args) models, optimizers, schedulers = init(args, device, shape=train_loader.shape, last_iter=args.last_iter) for epoch in range(1, args.epochs + 1): args.epoch = epoch args.last_iter = epoch*len(train_loader) args.loop_msg = fstr(defaults.LOOP_MSG, args=args) train(state, args, models, device, train_loader, optimizers, schedulers, logger) test(state, args, models[0], device, test_loader, logger) logger.to_pickle(os.path.join(args.storedir, exp_id, 'store.pkl')) if args.save_model: for model, savefile in zip(models, defaults.SAVE_FILES): torch.save(model.state_dict(), os.path.join(exp_dir, fstr(savefile, args=args))) save_dict(vars(args), os.path.join(exp_dir, defaults.ARG_FILE))
def eval(exp_dir, epoch): with open(os.path.join(exp_dir, 'args.txt'), 'r') as f: kwargs = literal_eval(f.readline()) args = config(**kwargs) logger = Logger() print(f"lr1={args.lr1} lr2={args.lr2}") (train_loader, test_loader), device = load(args) models, optimizers = init(args, device, shape=train_loader.shape) state = {"iter": np.nan} save_model = os.path.join(exp_dir, f'save{epoch:03d}.pt') save_perturb = os.path.join(exp_dir, f'save_perturb{epoch:03d}.pt') out = {} try: models[0].load_state_dict(torch.load(save_model)) models[1].load_state_dict(torch.load(save_perturb)) out = test(state, args, models[0], device, test_loader, logger) delta = [_ for _ in models[1].parameters()][0] img = torchvision.utils.make_grid(delta, normalize=True) torchvision.utils.save_image(img, os.path.join(exp_dir, f'perturb{epoch:03d}.png')) except: print("model not found") return dict(lr1=args.lr1, lr2=args.lr2, **out)
plt.tight_layout() plt.ylabel('True label') plt.xlabel('Predicted label') plt.show() start = time.time() # Load the built model print("Loading model...") model = load_model('models/diabetes_model.h5') model.summary() # Load data and split into input/output print("Loading data...") data = loaders.load('datasets/diabetes.csv') data_input = data[:, :-1] data_output = data[:, -1].reshape(-1, 1) # Make predictions from data print("Getting predictions from model...") predictions = model.predict(x=data_input, batch_size=12, verbose=0) rounded_predictions = np.argmax(predictions, axis=-1) correct_count = 0 for i in range(len(rounded_predictions)): # print(data_output[i], rounded_predictions[i]) if data_output[i] == rounded_predictions[i]: correct_count += 1 print( f"Accuracy: {correct_count} / {len(rounded_predictions)} = {100 * correct_count / len(rounded_predictions):.2f}%"
import parser import loaders from network import Network if __name__ == '__main__': arg = parser.parser.parse_args() print(arg) train_loader, valid_loader, train_dataset = loaders.load(arg.data_directory) n = Network(arg.arch, arg.hidden_units, arg.learning_rate) n.train(arg.gpu, arg.epochs, train_loader, valid_loader) n.save(arg.save_dir, train_dataset)