def estimation(dataset,
               boot_index,
               model_params,
               model_params_grad,
               num_obs_samples,
               num_future_steps,
               category_tt_split,
               num_mc_samples,
               output_file,
               true_model_params=None):
    y, x = dataset
    y_complete = y.clone().detach()
    y_complete = y_complete[0:-num_future_steps]
    category_tt_split = 'session'
    y, x, y_future, x_future = train_future_split(y, x, num_future_steps)
    y_train, y_test, test_inds = train_test_split(y.cpu(),
                                                  x.cpu(),
                                                  cat=category_tt_split)
    x = x.clone().detach()  #torch.tensor(x, dtype=dtype, device=device)
    y_train = y_train.clone().detach(
    )  #torch.tensor(y_train, dtype=dtype, device=device)
    y_test = torch.tensor(y_test, dtype=dtype, device=device)
    test_inds = torch.tensor(test_inds, dtype=torch.long, device=device)
    y_future = y_future.clone().detach(
    )  #torch.tensor(y_future, dtype=dtype, device=device)
    x_future = x_future.clone().detach(
    )  #torch.tensor(x_future, dtype=dtype, device=device)

    y_train = torch.tensor(y, device=device)
    data = [y_train, x, y_test, test_inds, y_future, x_future, y_complete]

    model = LearningDynamicsModel(dim=dim)

    boot_output_file = output_file + '/' + str(boot_index)
    os.makedirs(boot_output_file)
    os.makedirs(boot_output_file + '/model_structs')
    os.makedirs(boot_output_file + '/data')
    os.makedirs(boot_output_file + '/plots')

    inference = Inference(
        data=data,
        model=model,
        model_params=model_params,
        model_params_grad=model_params_grad,
        savedir=boot_output_file,
        num_obs_samples=num_obs_samples,
        num_future_steps=num_future_steps,
        num_mc_samples=num_mc_samples,
        ppc_window=50,
        z_true=z_true,
        true_model_params=true_model_params)  # pass in just for figures

    opt_params = inference.run()
    torch.save(opt_params, boot_output_file + '/model_structs/opt_params.npy')
    torch.save(dataset, boot_output_file + '/data/dataset.npy')
    torch.save(model_params,
               boot_output_file + '/model_structs/model_params.npy')
    return opt_params
Esempio n. 2
0
        num_future_steps = 1
        category_tt_split = 'session'
        num_mc_samples = 10
        ppc_window = 50
        percent_test = .2
        features = [
            'Bias', 'X1', 'X2', 'Choice t-1', 'RW Side t-1', 'X1 t-1', 'X2 t-1'
        ]

        y_complete = torch.tensor(y.copy(), device=device)
        y_complete = y_complete[0:-num_future_steps]

        #y, x, y_future, x_future = train_future_split(y, x, num_future_steps)
        y_future = y
        x_future = x
        y_train, y_test, test_inds = train_test_split(y, x, category_tt_split,
                                                      percent_test)
        x = torch.tensor(x, dtype=dtype, device=device)
        y_train = torch.tensor(y_train, dtype=dtype, device=device)
        y_test = torch.tensor(y_test, dtype=dtype, device=device)
        test_inds = torch.tensor(test_inds, dtype=torch.long, device=device)
        y_future = torch.tensor(y_future, dtype=dtype, device=device)
        x_future = torch.tensor(x_future, dtype=dtype, device=device)
        #data = [y_train, x]
        data = [y_train, x, y_test, test_inds, y_future, x_future, y_complete]
        datasets.append(data)

    model_params_grad = {
        'init_latent_loc': False,
        'init_latent_log_scale': False,
        'transition_log_scale': False,
        'log_gamma': True,
Esempio n. 3
0
        # os.mkdir(savedir)
        # f = '../data/W066_short.csv'

        x, y, rw = read_and_process(num_obs_samples, f, savedir=savedir)
        rw = torch.tensor(rw, dtype=dtype, device=device)

        dim = x.shape[2]
        T = x.shape[0]
        init_prior = ([0.0] * dim, [math.log(1.0)] * dim)
        transition_scale = [math.log(1.0)]  #* dim
    num_future_steps = 1
    category_tt_split = 'single'
    num_mc_samples = 10
    y, x, y_future, x_future = train_future_split(y, x, num_future_steps)
    y_train, y_test, test_inds = train_test_split(y, x, cat=category_tt_split)
    x = torch.tensor(x, dtype=dtype, device=device)
    y_train = torch.tensor(y_train, dtype=dtype, device=device)
    y_test = torch.tensor(y_test, dtype=dtype, device=device)
    test_inds = torch.tensor(test_inds, dtype=torch.long, device=device)
    y_future = torch.tensor(y_future, dtype=dtype, device=device)
    x_future = torch.tensor(x_future, dtype=dtype, device=device)
    #data = [y_train, x]
    data = [y_train, x, y_test, test_inds, y_future, x_future]
    # declare model here
    init_transition_log_scale = [math.log(1.)]  # * dim
    model = LearningDynamicsModel(init_prior, init_transition_log_scale, dim=3)
    inference = Inference(data,
                          model,
                          savedir='',
                          num_obs_samples=num_obs_samples,