Exemple #1
0
def create_dataset_before(args_file):
    with open(args_file, 'r') as f:
        for line in f:
            # args = parse_main_args(line.split())
            dargs = parse_data_args(line)
            print(dargs)
            create_if_not_exist_dataset(root='data/', arg_str=dargs)
Exemple #2
0
def test_model(model, device, save_mcc=False):
    ###### Run Test Here
    model.eval()

    # Grab data arguments from dataset filename
    model_name = model.__class__.__name__
    if model_name == 'iFlow':
        data_args = model.args['file'].split('/')[-1][4:-4] + '_f'
        seed = model.args['seed']
        epochs = model.args['epochs']
    elif model_name == 'iVAE':
        data_args = model.file.split('/')[-1][4:-4] + '_f'
        seed = model.seed
        epochs = model.epochs


    data_file = create_if_not_exist_dataset(root='data/{}/'.format(seed), arg_str=data_args)
    A = np.load(data_file)

    x = A['x']  # of shape
    x = torch.from_numpy(x).to(device)
    print("x.shape ==", x.shape)

    s = A['s']  # of shape
    # s = torch.from_numpy(s).to(device)
    print("s.shape ==", s.shape)

    u = A['u']  # of shape
    u = torch.from_numpy(u).to(device)
    print("u.shape ==", u.shape)

    if model_name == 'iVAE':
        _, z_est = model.elbo(x, u)
    elif model_name == 'iFlow':
        # (_, _, _), z_est = model.neg_log_likelihood(x, u)
        total_num_examples = reduce(operator.mul, map(int, data_args.split('_')[:2]))
        model.set_mask(total_num_examples)
        z_est, nat_params = model.inference(x, u)

    z_est = z_est.cpu().detach().numpy()


    Z_EST_FOLDER = osp.join('z_est/', data_args + '_' + str(epochs))

    if not osp.exists(Z_EST_FOLDER):
        os.makedirs(Z_EST_FOLDER)
    np.save("{}/z_est_{}.npy".format(Z_EST_FOLDER, model_name), z_est)
    if model_name == 'iFlow':
        nat_params = nat_params.cpu().detach().numpy()
        np.save("{}/nat_params.npy".format(Z_EST_FOLDER), nat_params)
    print("z_est.shape ==", z_est.shape)

    perf = mcc(s, z_est)
    # corr_coefs = correlation_coefficients(s, z_est)
    print("EVAL PERFORMANCE: {}".format(perf))

    if save_mcc:
        # Writes results for current model, flow type and n layers in mixing MLP
        # Saved as i-what_nsource_nlayers_flowlength_prior.txt
        split_args = data_args.split("_")
        if model_name == 'iVAE':
            with open(osp.join('results', '2D_mcc_scores', "_".join([model_name, "_".join(split_args[:2]), split_args[4]]) + '.txt'), 'a+') as f:
                # with open(osp.join('results', "_".join([args.i_what, "_".join(args.data_args.split("_")[:2]),
                #                                         args.data_args.split("_")[4], args.data_args.split("_")[6]]) + '.txt'), 'a+') as f:
                f.write(", " + str(perf))
        elif model_name == 'iFlow':
            with open(osp.join('results', '2D_mcc_scores', "_".join([model_name, "_".join(split_args[:2]), split_args[4]]) + '.txt'), 'a+') as f:
                # with open(osp.join('results', "_".join([args.i_what, "_".join(args.data_args.split("_")[:2]),
                #                                         args.data_args.split("_")[4],
                #                                         args.data_args.split("_")[6]]) + '.txt'), 'a+') as f:
                f.write(", " + str(perf))

    print("DONE.")
Exemple #3
0
    LOG_FOLDER = osp.join(EXPERIMENT_FOLDER, 'log/')
    TENSORBOARD_RUN_FOLDER = osp.join(EXPERIMENT_FOLDER, 'runs/')
    TORCH_CHECKPOINT_FOLDER = osp.join(EXPERIMENT_FOLDER, 'ckpt/')
    PT_MODELS_FOLDER = osp.join('pt_models/', args.data_args + '_' + args.i_what + '_' + str(args.epochs))
    Z_EST_FOLDER = osp.join('z_est/', args.data_args + '_' + str(args.epochs))

    os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu_id

    print(args)
    np.random.seed(args.seed)
    torch.manual_seed(args.seed)
    if torch.cuda.is_available():
        torch.cuda.manual_seed(args.seed)
        torch.cuda.manual_seed_all(args.seed)
        torch.backends.cudnn.deterministic = True
        torch.backends.cudnn.benchmark = False
    os.environ["PYTHONHASHSEED"] = str(args.seed)

    st = time.time()

    if args.file is None:
        args.file = create_if_not_exist_dataset(root='data/{}/'.format(args.seed), arg_str=args.data_args)

    device = torch.device('cuda' if args.cuda else 'cpu')

    metadata = vars(args).copy()
    metadata.update({"device": device})


    model = train_model(args, metadata, device=device)
    test_model(model, device=device, save_mcc=args.save_mcc)
Exemple #4
0
                        help='run without logging')
    parser.add_argument('-q',
                        '--log-freq',
                        type=int,
                        default=25,
                        help='logging frequency (default 25).')
    args = parser.parse_args()

    print(args)
    torch.manual_seed(args.seed)
    np.random.seed(args.seed)

    st = time.time()

    if args.file is None:
        args.file = create_if_not_exist_dataset(root='data/',
                                                arg_str=args.data_args)
    metadata = vars(args).copy()
    del metadata['no_log'], metadata['data_args']

    device = torch.device('cuda' if args.cuda else 'cpu')
    print('training on {}'.format(
        torch.cuda.get_device_name(device) if args.cuda else 'cpu'))

    # load data
    if not args.preload:
        dset = SyntheticDataset(args.file, 'cpu')
        loader_params = {
            'num_workers': 1,
            'pin_memory': True
        } if args.cuda else {}
        train_loader = DataLoader(dset,
Exemple #5
0
def load_plot_2d(
        seeds,
        data_arguments='1000_5_2_2_$mixing-layers_$seed_gauss_xtanh_u_f',
        iFlow_results_file=None,
        iVAE_results_file=None,
        epochs=20,
        mixing_layers=3):
    """
    seeds : list of dataset seeds for visualization
    data_arguments : arguments for the dataset, 'nps_ns_dl_dd_nl_s_p_a_u_n'
    iFlow_results_file : filename of corresponding iFlow results
    iVAE_results_file : filename of corresponding iVAE results
    epochs : number of training epochs
    mixing_layers : number of mixing layers used that generated the results
    """
    iFlow_perfs = None
    iVAE_perfs = None
    data_arguments = data_arguments.split("_")
    data_arguments = data_arguments[:4] + [str(mixing_layers)
                                           ] + data_arguments[5:]

    print("Number of layers in dataset mixing MLP: ", mixing_layers)
    if iFlow_results_file:
        with open(iFlow_results_file) as f:
            iFlow_perfs = list(map(eval, f.readline().split(',')[1:]))
            print('iFlow mean = {:.4f}, std = {:.4f}'.format(
                np.mean(iFlow_perfs), np.std(iFlow_perfs)))

    if iVAE_results_file:
        with open(iVAE_results_file) as f:
            iVAE_perfs = list(map(eval, f.readline().split(',')[1:]))
            print('iVAE mean = {:.4f}, std = {:.4f}'.format(
                np.mean(iVAE_perfs), np.std(iVAE_perfs)))

    for i, seed in enumerate(seeds):
        data_arguments[5] = str(seed)
        data_file = create_if_not_exist_dataset(
            root='data/{}/'.format(1), arg_str="_".join(data_arguments))
        # load data
        path_to_dataset = data_file
        print('Dataset seed = {}'.format(seed))
        with np.load(path_to_dataset) as data:
            x = data['x']
            u = data['u']
            s = data['s']
        # load predictions
        path_to_z_est = "z_est/" + "_".join(
            data_arguments[:5]) + "_" + str(seed) + "_" + "_".join(
                data_arguments[6:]) + '_' + str(epochs) + "/"
        z_est_iFlow = np.load(path_to_z_est + "z_est_iFlow.npy")
        z_est_iVAE = np.load(path_to_z_est + "z_est_iVAE.npy")

        # Plotted figure is saved with data_args as filename in results/2D_visualizations/
        fig_name = "_".join([
            "_".join(data_arguments[:5]),
            str(seed), "_".join(data_arguments[6:]),
            str(epochs)
        ])
        # plot and save figure
        plot_2d(s,
                x,
                u,
                z_est_iFlow,
                z_est_iVAE,
                iFlow_perfs[i],
                iVAE_perfs[i],
                filename=fig_name)
Exemple #6
0
def load_model_from_checkpoint(ckpt_path, device, model_seed=1):
    print('checkpoint path:', ckpt_path)
    model_args = ckpt_path.split('/')[
        1]  # get folder name containing model and data properties
    ckpt_filename = ckpt_path.split('/')[-1]

    model_args = model_args.split('_')
    epochs = model_args[-1]
    model_name = model_args[-2]
    data_args = model_args[:-2]

    data_file = create_if_not_exist_dataset(root='data/{}/'.format(model_seed),
                                            arg_str="_".join(data_args))

    nps = int(data_args[0])
    ns = int(data_args[1])
    aux_dim = int(data_args[1])
    n = nps * ns
    latent_dim = int(data_args[2])
    data_dim = int(data_args[3])

    print('Loading model', model_name)
    model_path = ckpt_path

    print('Loading data', data_file)
    A = np.load(data_file)

    x = A['x']  # of shape
    x = torch.from_numpy(x).to(device)
    print("x.shape ==", x.shape)

    s = A['s']  # of shape
    # s = torch.from_numpy(s).to(device)
    print("s.shape ==", s.shape)

    u = A['u']  # of shape
    u = torch.from_numpy(u).to(device)
    print("u.shape ==", u.shape)

    checkpoint = torch.load(model_path)

    # Arguments (metadata, from argparse in main.py), have to correspond to selected dataset and model properties
    # Hyperparameter and configurations as precribed in the paper.
    metadata = {
        'file': data_file,
        'path': data_file,
        'batch_size': 64,
        'epochs': epochs,
        'device': device,
        'seed': 1,
        'i_what': model_name,
        'max_iter': None,
        'hidden_dim': 50,
        'depth': 3,
        'lr': 1e-3,
        'cuda': True,
        'preload': True,
        'anneal': False,
        'log_freq': 25,
        'flow_type': 'RQNSF_AG',
        'num_bins': 8,
        'nat_param_act': 'Softplus',
        'gpu_id': '0',
        'flow_length': 10,
        'lr_drop_factor': 0.25,
        'lr_patience': 10
    }

    # Get dataset properties
    metadata.update({
        'nps': nps,
        'ns': ns,
        'n': n,
        'latent_dim': latent_dim,
        'data_dim': data_dim,
        'aux_dim': aux_dim
    })

    if model_name == 'iFlow':
        model = iFlow(args=metadata).to(device)
    elif model_name == "iVAE":
        model = iVAE(
            latent_dim,  # latent_dim
            data_dim,  # data_dim
            aux_dim,  # aux_dim
            n_layers=metadata['depth'],
            activation='lrelu',
            device=device,
            hidden_dim=metadata['hidden_dim'],
            anneal=metadata['anneal'],  # False
            file=metadata['file'],
            seed=1)

    model.load_state_dict(checkpoint['model_state_dict'])
    return model