def run_mlstm1900_example():
    # Set up an example
    sequence = "MRKGEELFTGVVPILVELDGDVNGHKFSVRGEGEGDATNGKLTLKFICTTGKLPVPWPTLVTTLTYGVQCFARYPDHMKQHDFFKSAMPEGYVQERTISFKDDGTYKTRAEVKFEGDTLVNRIELKGIDFKEDGNILGHKLEYNFNSHNVYITADKQKNGIKANFKIRHNVEDGSVQLADHYQQNTPIGDGPVLLPDNHYLSTQSVLSKDPNEKRDHMVLLEFVTAAGITHGMDELYK"
    print("sequence length: ", len(sequence))

    sequence = aa_seq_to_int(sequence)[:-1]

    embeddings = np.load("embed_matrix:0.npy")
    x = np.vstack([embeddings[i] for i in sequence])
    print("embedding shape: ", x.shape)

    # x = sliding_window(sequence, size=10)
    params = dict()
    params["gh"] = np.load("rnn_mlstm_mlstm_gh:0.npy")
    params["gmh"] = np.load("rnn_mlstm_mlstm_gmh:0.npy")
    params["gmx"] = np.load("rnn_mlstm_mlstm_gmx:0.npy")
    params["gx"] = np.load("rnn_mlstm_mlstm_gx:0.npy")

    params["wh"] = np.load("rnn_mlstm_mlstm_wh:0.npy")
    params["wmh"] = np.load("rnn_mlstm_mlstm_wmh:0.npy")
    params["wmx"] = np.load("rnn_mlstm_mlstm_wmx:0.npy")
    params["wx"] = np.load("rnn_mlstm_mlstm_wx:0.npy")

    params["b"] = np.load("rnn_mlstm_mlstm_b:0.npy")

    # Pass through mLSTM1900
    out = mlstm1900(params, x)
    print("output: ", out)
    print("reps: ", out.mean(axis=0))
    print("output shape: ", out.shape)
    assert out.shape == (x.shape[0], 1900)
Exemple #2
0
def load_params_1900(name: str = "uniref50") -> Dict:
    """Load pre-trained mLSTM1900 weights from the UniRep paper."""
    params = dict()
    params["gh"] = np.load(
        weights_1900_dir / name / "rnn_mlstm_mlstm_gh:0.npy"
    )
    params["gmh"] = np.load(
        weights_1900_dir / name / "rnn_mlstm_mlstm_gmh:0.npy"
    )
    params["gmx"] = np.load(
        weights_1900_dir / name / "rnn_mlstm_mlstm_gmx:0.npy"
    )
    params["gx"] = np.load(
        weights_1900_dir / name / "rnn_mlstm_mlstm_gx:0.npy"
    )

    params["wh"] = np.load(
        weights_1900_dir / name / "rnn_mlstm_mlstm_wh:0.npy"
    )
    params["wmh"] = np.load(
        weights_1900_dir / name / "rnn_mlstm_mlstm_wmh:0.npy"
    )
    params["wmx"] = np.load(
        weights_1900_dir / name / "rnn_mlstm_mlstm_wmx:0.npy"
    )
    params["wx"] = np.load(
        weights_1900_dir / name / "rnn_mlstm_mlstm_wx:0.npy"
    )

    params["b"] = np.load(weights_1900_dir / name / "rnn_mlstm_mlstm_b:0.npy")

    return params
Exemple #3
0
def load_dense_1900(name: str = "uniref50") -> Dict:
    """
    Load pre-trained dense layer weights from the UniRep paper.

    The dense layer weights are used to predict next character
    from the output of the mLSTM1900.
    """
    # params = dict()
    w = np.load(weights_1900_dir / name / "fully_connected_weights:0.npy")
    b = np.load(weights_1900_dir / name / "fully_connected_biases:0.npy")
    return w, b
Exemple #4
0
def lhd_saddle_B(surface_data, NS):
	""" Only to be called once """
	r_surf, _, _ = surface_data
	fc_s = np.load("/Users/nmcgreiv/research/ad/FOCUSADD/focusadd/initFiles/lhd/lhd_fc_saddle.npy")
	I_s = np.load("/Users/nmcgreiv/research/ad/FOCUSADD/focusadd/initFiles/lhd/lhd_I_saddle.npy")
	theta = np.linspace(0, 2 * PI, NS + 1)
	NF = fc_s.shape[2]
	NC = fc_s.shape[1]
	coil_data = NC, NS, NF, 0, 0, 0, 0, 0, 0, 0
	r_s = CoilSet.compute_r_centroid(coil_data, fc_s, theta)
	dl_s = CoilSet.compute_x1y1z1(coil_data, fc_s, theta) * (2 * PI / NS)
	return LossFunction.biotSavart(r_surf, I_s, dl_s[:,:,None,None,:], r_s[:,:,None,None,:])
Exemple #5
0
def load_dense_1900(folderpath: Optional[str] = None) -> Tuple:
    """
    Load pre-trained dense layer weights from the UniRep paper.

    The dense layer weights are used to predict next character
    from the output of the mLSTM1900.
    """
    weights_1900_dir = get_weights_dir(folderpath=folderpath)

    w = np.load(weights_1900_dir / "fully_connected_weights:0.npy")
    b = np.load(weights_1900_dir / "fully_connected_biases:0.npy")
    return w, b
    def load_model(self):

        self.pcacomponents = np.load(
            self.prefix + "pcacomponents" + self.suffix + ".npy"
        )
        self.components_prior_params = np.load(
            self.prefix + "components_prior_params" + self.suffix + ".npy",
        )
        self.polynomials_prior_mean = np.load(
            self.prefix + "polynomials_prior_mean" + self.suffix + ".npy",
        )
        self.polynomials_prior_loginvvar = np.load(
            self.prefix + "polynomials_prior_loginvvar" + self.suffix + ".npy",
        )
 def load_trajectory(self, reference_env, reference_action_fp):
     actions = np.load(reference_action_fp)
     self.reference_actions = np.repeat(actions[:, np.newaxis],
                                        self.nenvs,
                                        axis=1)
     self.reference_trajectory = evaluate_actions(reference_env,
                                                  self.reference_actions)
     self.flattened_reference = np.squeeze(self.reference_trajectory)
Exemple #8
0
def get_initial_params(args):
    input_file = args.input_file
    if args.axis.lower() == "w7x":
        assert args.num_zeta == 150
        assert args.num_theta == 20
        assert args.num_coils == 50
        # need to assert that axis has right number of points
        r = np.load("initFiles/w7x/w7x_r_surf.npy")
        nn = np.load("initFiles/w7x/w7x_nn_surf.npy")
        sg = np.load("initFiles/w7x/w7x_sg_surf.npy")
        surface_data = (r, nn, sg)
        if input_file is None:
            fc = np.load("initFiles/w7x/w7x_fc.npy")
            fr = np.zeros((2, args.num_coils, args.num_fourier_rotate))
            params = (fc, fr)
            coil_data = get_coil_data(args)
        else:
            with tb.open_file(input_file, "r") as f:
                coil_data = f.root.metadata[0]
                fc = np.asarray(f.root.coilSeries[:, :, :])
                fr = np.asarray(f.root.rotationSeries[:, :, :])
                params = (fc, fr)
    elif args.axis.lower() == "lhd":
        assert args.num_zeta == 200
        assert args.num_theta == 40
        assert args.num_coils == 2
        assert args.num_fourier_coils == 10
        r = np.load("initFiles/lhd/lhd_r_surf.npy")
        nn = np.load("initFiles/lhd/lhd_nn_surf.npy")
        sg = np.load("initFiles/lhd/lhd_sg_surf.npy")
        surface_data = (r, nn, sg)
        if input_file is None:
            fc = np.load("initFiles/lhd/lhd_fc.npy")
            fr = np.zeros((2, args.num_coils, args.num_fourier_rotate))
            params = (fc, fr)
            coil_data = get_coil_data(args)
        else:
            assert 1 == 0
    else:
        filename = "./initFiles/axes/{}.txt".format(args.axis)
        surface = Surface(
            filename,
            args.num_zeta,
            args.num_theta,
            args.radius_surface,
            res=args.axis_resolution,
        )
        if input_file is not None:
            coil_data, params = CoilSet.get_initial_data(
                surface, input_file="{}.hdf5".format(input_file))
        else:
            coil_data, params = CoilSet.get_initial_data(
                surface, args_dict=create_args_dict(args))
            surface_data = (surface.get_r_central(), surface.get_nn(),
                            surface.get_sg())
    return coil_data, params, surface_data
Exemple #9
0
def main():
    smc = np.load('mc_ddpip_3d_smeared.npy')
    print(smc.shape)

    data = np.column_stack(get_vars(smc)[:3])
    print(data.shape)

    do_fit(data, 100)
Exemple #10
0
 def __init__(self):
     npz = np.load('data/100k_reviews_binary_10k_vocab.npz',
                   allow_pickle=True)
     num_reviews = npz['x_train'].shape[0] + npz['x_test'].shape[0]
     print("Number of reviews: %d,  vocab size: %d" %
           (num_reviews, npz['x_train'].shape[1]))
     self.data = (npz['x_train'], npz['y_train'], npz['x_test'],
                  npz['y_test'])
Exemple #11
0
    def get_estimate(self,
                     data,
                     kwargs,
                     fit_kwargs=None,
                     state=False,
                     validate=True,
                     fit=False,
                     set=True):
        _kwargs = copy.deepcopy(kwargs)
        _kwargs = self.preload(_kwargs, state=state, validate=validate)

        imnn = self.imnn(**_kwargs)

        if not set:
            with pytest.raises(ValueError) as info:
                imnn.get_estimate(data)
            assert info.match(
                re.escape(
                    "Fisher information has not yet been calculated. Please " +
                    "run `imnn.set_F_statistics({w}, {key}, {validate}) " +
                    "with `w = imnn.final_w`, `w = imnn.best_w`, " +
                    "`w = imnn.inital_w` or otherwise, `validate = True` " +
                    "should be set if not simulating on the fly."))
            return

        _fit_kwargs = copy.deepcopy(fit_kwargs)
        λ = _fit_kwargs.pop("λ")
        ϵ = _fit_kwargs.pop("ϵ")
        if fit:
            imnn.fit(λ, ϵ, **_fit_kwargs)
        else:
            imnn.set_F_statistics(key=self.stats_key)
        estimate = imnn.get_estimate(data)

        name = self.set_name(state, validate, fit, _kwargs["n_d"])
        if self.save:
            files = {f"{name}estimate": estimate}
            try:
                targets = np.load(f"test/{self.filename}.npz")
                files = {k: files[k] for k in files.keys() - targets.keys()}
                np.savez(f"test/{self.filename}.npz", **{**files, **targets})
            except Exception:
                np.savez(f"test/{self.filename}.npz", **files)
        targets = np.load(f"test/{self.filename}.npz")
        assert np.all(np.equal(estimate, targets[f"{name}estimate"]))
Exemple #12
0
 def load_data(self):
   npz = np.load('data/mnist.npz', allow_pickle=True)
   (x_train, y_train, x_test, y_test) = npz['mnist']
   y_train = np.eye(10)[y_train]
   y_test = np.eye(10)[y_test]
   x_train = x_train.reshape((60000, 28*28)) / 255.0
   x_test = x_test.reshape((10000, 28*28)) / 255.0
   
   return (x_train, y_train, x_test, y_test)
Exemple #13
0
 def load(cls, fp):
     z = jnp.load(fp).items()
     var = cls._read_npz(z)
     instance = cls()
     for k, v in var.items():
         params = cls._unflatten_weights(v)
         setattr(instance, k, params)
     logger.info(f"Successfully loaded parameters from {fp}")
     return instance
Exemple #14
0
def load_samples(filename):

    x = np.load(filename, allow_pickle=True)

    prior_samples = x['prior_samples'].item()
    mcmc_samples = x['mcmc_samples'].item()
    post_pred_samples = x['post_pred_samples'].item()
    forecast_samples = x['forecast_samples'].item()

    return prior_samples, mcmc_samples, post_pred_samples, forecast_samples
Exemple #15
0
def load_dataset(filename, id):
    if not os.path.exists(filename):
        !gdown --id $id

    npz_data = np.load(filename)
    out = {
        "data_grid_search":npz_data['train_data'] / 255.,
        "data_test":npz_data['test_data'] / 255.,
    }
    return out
Exemple #16
0
 def test_bks(self, dtype):
     LATCON = 3.5660930663857577e+01
     displacement, shift = space.periodic(LATCON)
     dist_fun = space.metric(displacement)
     species = np.tile(np.array([0, 1, 1]), 1000)
     current_dir = os.getcwd()
     filename = os.path.join(current_dir, 'tests/data/silica_positions.npy')
     with open(filename, 'rb') as f:
         R_f = np.array(np.load(f))
     energy_fn = energy.bks_silica_pair(dist_fun, species=species)
     self.assertAllClose(-857939.528386092, energy_fn(R_f))
Exemple #17
0
def plot_loaded_points():
    onp.random.seed(0)
    params = np.array([0, 1, -1, 0.5, 2, 0, -1, -2])
    x, y = generate_data(params, 0.1)
    x = np.load('x.npy')
    y = np.load('y.npy')
    fig, ax = plt.subplots()
    for i in range(4):
        ax.plot(
            [-1, 2],
            [-1 * params[i] + params[i + 4], 2 * params[i] + params[i + 4]],
            'k-')
    ax.set_xlim([0, 1])
    ax.set_ylim([-3, 3])
    ax.plot(x, y, 'k+', markersize=10)
    ax.set_ylabel(r'y')
    ax.set_xlabel(r'x')
    ax.spines['right'].set_visible(False)
    ax.spines['top'].set_visible(False)
    plt.show()
def load_data(file_results, N, l):
    if os.path.isfile(file_results):
        D = jnp.load(file_results, allow_pickle=True)
        Dtr = D.item()['Dtr']
        Dt = Data_nh3()
    else:
        Dtr, Dt = split_trainig_test_coup(N, l)
        Xtr, gXtr, gXctr, ytr = Dtr
        Dtr = (Xtr, gXtr, gXctr, ytr)

    return Dtr, Dt
Exemple #19
0
def load_params_1900(folderpath: Optional[str] = None) -> Dict:
    """Load pre-trained mLSTM1900 weights from the UniRep paper."""
    weights_1900_dir = get_weights_dir(folderpath=folderpath)

    params = dict()
    params["gh"] = np.load(weights_1900_dir / "rnn_mlstm_mlstm_gh:0.npy")
    params["gmh"] = np.load(weights_1900_dir / "rnn_mlstm_mlstm_gmh:0.npy")
    params["gmx"] = np.load(weights_1900_dir / "rnn_mlstm_mlstm_gmx:0.npy")
    params["gx"] = np.load(weights_1900_dir / "rnn_mlstm_mlstm_gx:0.npy")

    params["wh"] = np.load(weights_1900_dir / "rnn_mlstm_mlstm_wh:0.npy")
    params["wmh"] = np.load(weights_1900_dir / "rnn_mlstm_mlstm_wmh:0.npy")
    params["wmx"] = np.load(weights_1900_dir / "rnn_mlstm_mlstm_wmx:0.npy")
    params["wx"] = np.load(weights_1900_dir / "rnn_mlstm_mlstm_wx:0.npy")

    params["b"] = np.load(weights_1900_dir / "rnn_mlstm_mlstm_b:0.npy")

    return params
Exemple #20
0
    def __init__(self):
        """Initialize the distribution.

    Load the values, tangents, and x-coordinate scaling of a spline that
    approximates the partition function. The spline was produced by running
    the script in fit_partition_spline.py.
    """
        with get_resource_as_file(
                'robust_loss_jax/data/partition_spline.npz') as spline_file:
            with jnp.load(spline_file, allow_pickle=False) as f:
                self._spline_x_scale = f['x_scale']
                self._spline_values = f['values']
                self._spline_tangents = f['tangents']
def load_templates(initialization, lamgrid, input_dir, subsampling):
    if initialization == "rrpca":
        files = ["rrtemplate-galaxy.fits"]
        n_archetypes = 1
        pcacomponents_init = load_redrock_templates(lamgrid, 16, files=files)
        n_components = pcacomponents_init.shape[0]
    elif initialization == "datapca":
        n_archetypes = 1
        pcacomponents_init = np.load(input_dir +
                                     "/pca_init.npy")[:, ::subsampling]
        # print("Loaded", input_dir + "/pca_init.npy")
        assert lamgrid.size == pcacomponents_init.shape[1]
        n_components = pcacomponents_init.shape[0]
    elif initialization == "rrarch":
        files = ["rrarchetype-galaxy.fits"]
        n_components = 1
        pcacomponents_init = load_redrock_templates(lamgrid, 200, files=files)
        n_archetypes = pcacomponents_init.shape[0]
    elif initialization == "rrarchpca":
        n_archetypes = 1
        temp_wave = np.load("data/rrarchetype-galaxy-pca.npy")
        temp_components_ = np.load("data/rrarchetype-galaxy-wave.npy")
        n_components = temp_components_.shape[0]
        pcacomponents_init = onp.zeros((n_components, lamgrid.size))
        for i in range(n_components):
            pcacomponents_init[i, :] = scipy.interpolate.interp1d(
                temp_wave,
                temp_components_[i, :],
                kind="linear",
                bounds_error=False,
                fill_value="extrapolate",
                assume_sorted=True,
            )(lamgrid)
    else:
        print("Invalid initialization name:", initialization)
        stop(1)
    pcacomponents_init = pcacomponents_init.reshape(
        (n_archetypes, n_components, lamgrid.size))
    return pcacomponents_init
Exemple #22
0
def main():

    radii = np.linspace(0.0, 0.45, 20)

    N = 50
    r = np.load("../initFiles/lhd/lhd_r_surf.npy")
    nn = np.load("../initFiles/lhd/lhd_nn_surf.npy")
    sg = np.load("../initFiles/lhd/lhd_sg_surf.npy")
    surface_data = (r, nn, sg)

    def get_all_coil_data(filename):
        with tb.open_file(filename, "r") as f:
            coil_data = f.root.metadata[0]
            fc = np.asarray(f.root.coilSeries[:, :, :])
            fr = np.asarray(f.root.rotationSeries[:, :])  # NEEDS TO BE EDITED
            params = (fc, fr)
        return coil_data, params

    coil_data_fil, coil_params_fil = get_all_coil_data(
        "../../tests/lhd/scan/lhd_l0.hdf5")
    rs, zs = Poincare.getPoincarePoints(N, 0.0, radii, False, coil_data_fil,
                                        coil_params_fil)
    npo.save("rs_LHD_fil.npy", npo.asarray(rs))
    npo.save("zs_LHD_fil.npy", npo.asarray(zs))

    coil_data_fb, coil_params_fb = get_all_coil_data(
        "../../tests/lhd/scan/lhd_l4.hdf5")
    rs, zs = Poincare.getPoincarePoints(N, 0.0, radii, False, coil_data_fb,
                                        coil_params_fb)
    npo.save("rs_LHD_fb4.npy", npo.asarray(rs))
    npo.save("zs_LHD_fb4.npy", npo.asarray(zs))

    coil_data_fil, coil_params_fil = get_all_coil_data(
        "../../tests/lhd/scan/lhd_l0.hdf5")
    rs, zs = Poincare.getPoincarePoints(N, 0.0, radii, False, coil_data_fb,
                                        coil_params_fil)
    npo.save("rs_LHD_fil4.npy", npo.asarray(rs))
    npo.save("zs_LHD_fil4.npy", npo.asarray(zs))
def download(run_id):
    api = wandb.Api()
    run = api.run(f'IsingModel/{run_id}')
    config = argparse.Namespace(**run.config)
    resdir = Path(f'results_mri/{run.name}_{run_id}')
    ckpt_file = run.file('checkpoint_last.pkl').download(resdir, replace=True)
    with open(ckpt_file.name, 'rb') as f:
        checkpoint = pickle.load(f)
    history = checkpoint['history']

    best_file = run.file('params_best.npy').download(resdir, replace=True)
    opt_params = jnp.load(best_file.name)

    return resdir, config, history, opt_params
Exemple #24
0
def load_params():

    params = dict()
    params["gh"] = np.load(this_dir / "1900_weights/rnn_mlstm_mlstm_gh:0.npy")
    params["gmh"] = np.load(this_dir /
                            "1900_weights/rnn_mlstm_mlstm_gmh:0.npy")
    params["gmx"] = np.load(this_dir /
                            "1900_weights/rnn_mlstm_mlstm_gmx:0.npy")
    params["gx"] = np.load(this_dir / "1900_weights/rnn_mlstm_mlstm_gx:0.npy")

    params["wh"] = np.load(this_dir / "1900_weights/rnn_mlstm_mlstm_wh:0.npy")
    params["wmh"] = np.load(this_dir /
                            "1900_weights/rnn_mlstm_mlstm_wmh:0.npy")
    params["wmx"] = np.load(this_dir /
                            "1900_weights/rnn_mlstm_mlstm_wmx:0.npy")
    params["wx"] = np.load(this_dir / "1900_weights/rnn_mlstm_mlstm_wx:0.npy")

    params["b"] = np.load(this_dir / "1900_weights/rnn_mlstm_mlstm_b:0.npy")

    return params
Exemple #25
0
def plot_embeddings(embedding_paths, titles, save_path):
    def outlier_mask(data, m=5.0):
        return jnp.all((data - jnp.median(data, keepdims=True))**2 <
                       m * jnp.std(data, keepdims=True),
                       axis=1)

    # Load the embeddings
    ys, us = [], []
    for path in embedding_paths:
        with np.load(path) as data:
            z, y, u = data['z'], data['y'], data['u']
            mask = outlier_mask(u)
            # ys.append(y[mask])
            # us.append(u[mask])
            ys.append(y)
            us.append(u)

            # df1 = pd.DataFrame()

    fig, axes = plt.subplots(1, 2)
    axes = axes.ravel()
    fig.set_size_inches(10, 5)

    us = [us[0], us[3]]
    ys = [ys[0], ys[3]]

    for i, (ax, u, y, title) in enumerate(zip(axes, us, ys, titles)):
        scatter = ax.scatter(*u.T, s=3.0, c=y, cmap='Spectral', alpha=0.6)
        ax.set_title(title, fontsize=20)
        ax.set_yticklabels([])
        ax.set_xticklabels([])
        ax.tick_params(axis='both', which='both', length=0)
        if (i == 0):
            ax.set_xlim(-5.5, 4.8)
            ax.set_ylim(5, 15)
        else:
            ax.set_xlim(-4, 7)
            ax.set_ylim(-6, 8)

    plt.subplots_adjust(wspace=0, hspace=0, left=0, right=1, bottom=0, top=1)
    cbar = fig.colorbar(scatter, boundaries=jnp.arange(11) - 0.5)
    cbar.set_ticks(jnp.arange(10))
    cbar.ax.set_yticklabels([
        'Airplane', 'Automobile', 'Bird', 'Cat', 'Deer', 'Dog', 'Frog',
        'Horse', 'Ship', 'Truck'
    ])
    cbar.ax.tick_params(labelsize=12)
    plt.savefig(save_path, bbox_inches='tight', format='pdf')
    plt.close()
def Data_nh3(bool_norm=False):
    X = jnp.load('Data_nh3/nh3_cgeom.npy')
    y = jnp.load('Data_nh3/nh3_energy.npy')
    gX0 = jnp.load('Data_nh3/nh3_grad_energy_state_0.npy')
    gX1 = jnp.load('Data_nh3/nh3_grad_energy_state_1.npy')

    gXc = jnp.load('Data_nh3/nh3_grad_energy_state_1.npy')

    #     DELETE BAD POINT
    j0 = 1824  #BAD POINT
    X = onp.delete(X, j0, 0)
    y = onp.delete(y, j0, 0)
    gX0 = onp.delete(gX0, j0, 0)
    gX1 = onp.delete(gX1, j0, 0)
    gXc = onp.delete(gXc, j0, 0)

    gX = jnp.concatenate((gX0[None], gX1[None]), axis=0)
    gX = jnp.reshape(gX, (gX.shape[1], gX.shape[0], gX.shape[2]))

    if bool_norm:
        y = normalize(y, axis=0)
        print(y)

    return X, gX, gXc, y
Exemple #27
0
    def retrieve_ntt_loss(self):
        # Retrieve the NTT loss result.

        ntk_distance_dict = {}

        target_distance_dict = {}

        for nn_density_level in self.ntt_setup_dict['NN_DENSITY_LEVEL_LIST']:

            vali_loss_array_runs = []
            for run_index in range(1, self.ntt_setup_dict['NUM_RUNS'] + 1):

                density_run_run_dir = '/density_' + str(
                    round(nn_density_level, 2)) + '/' + 'run_' + str(run_index)

                loss_array_fileName = '/loss_array_' + self.model_str + density_run_run_dir.replace(
                    '/', '_') + '.npy'

                vali_list = list(
                    np.load(self.ntt_result_path + density_run_run_dir +
                            loss_array_fileName,
                            allow_pickle=True))

                vali_loss_array_runs.append(vali_list)

            min_length = min(map(len, vali_loss_array_runs))
            vali_loss_array_runs = [
                sublist[-min_length:] for sublist in vali_loss_array_runs
            ]

            vali_loss_array_runs = np.array(vali_loss_array_runs)

            vali_ntk_distances = vali_loss_array_runs[:, :, 1]
            vali_target_distances = vali_loss_array_runs[:, :, 2]

            #             av_ntk_distance, trial_variation_ntk_distance = mean_and_variation_acc_across_trials(vali_ntk_distances)
            #             av_target_distance, trial_variation_target_distance = mean_and_variation_acc_across_trials(vali_target_distances)
            av_ntk_distance, trial_variation_ntk_distance = mean_and_var_across_trials(
                vali_ntk_distances)
            av_target_distance, trial_variation_target_distance = mean_and_var_across_trials(
                vali_target_distances)

            ntk_distance_dict[str(nn_density_level)] = (
                av_ntk_distance, trial_variation_ntk_distance)
            target_distance_dict[str(nn_density_level)] = (
                av_target_distance, trial_variation_target_distance)
        return ntk_distance_dict, target_distance_dict
def data_preprocessing():
    """ Seperates data (spin configurations) into test and training set and generates labels"""
    rng = random.PRNGKey(0)

    temperatures = jnp.linspace(1.0, 4.0, 7)
    temperatures1 = [1.0, 1.5, 3.0, 3.5, 4.0]
    temperatures2 = [2.0, 2.5]

    x_train = []
    y_train = []
    x_test = []
    y_test = []
    for T in temperatures:
        configs = jnp.load('data/spins_T%s.npy' % T)
        magnetization_density = jnp.abs(
            jnp.array([jnp.sum(config) / config.size for config in configs]))
        labels = jnp.where(magnetization_density < 0.5, 0, 1)
        if T in temperatures2:
            x_test.append(configs)
            y_test.append(labels)
        else:
            indices = random.permutation(rng, labels.size)
            y_test.append(labels[indices[:int(0.2 * labels.size)]])
            y_train.append(labels[indices[int(0.2 * labels.size):]])
            x_test.append(configs[indices[:int(0.2 * labels.size)]])
            x_train.append(configs[indices[int(0.2 * labels.size):]])

    y_test_new = jnp.array(y_test[0])
    x_test_new = jnp.array(x_test[0])
    for i in range(len(y_test) - 1):
        y_test_new = jnp.concatenate((y_test_new, y_test[i + 1]))
        x_test_new = jnp.concatenate((x_test_new, x_test[i + 1]))

    L = jnp.array(x_train).shape[2]
    x_test = jnp.array(x_test_new).reshape((-1, L, L, 1)).astype(jnp.float64)
    y_test = jnp.array(y_test_new).reshape((-1, 1))
    x_train = jnp.array(x_train).reshape((-1, L, L, 1)).astype(jnp.float64)
    y_train = jnp.array(y_train).reshape((-1, 1))

    jnp.save('data/x_test.npy', x_test)
    jnp.save('data/y_test.npy', y_test)
    jnp.save('data/x_train.npy', x_train)
    jnp.save('data/y_train.npy', y_train)

    return x_train, y_train, x_test, y_test
Exemple #29
0
def bin2d(x, y, npix=10, v=None, w=None, size=None, verbose=False):

    # Compute weighted bin count map
    wmap, xbins, ybins = jnp.histogram2d(x,
                                         y,
                                         bins=npix,
                                         range=[-size / 2, size / 2],
                                         weights=w)
    # Handle division by zero (i.e., empty pixels)
    #wmap = jax.ops.index_update(wmap, jax.ops.index[jnp.where(wmap==0)], jnp.inf)
    contours = jnp.load('../data/COSMOS/contours.npy')
    wmap = wmap + contours
    # Compute mean values per pixel
    result = (jnp.histogram2d(
        x, y, bins=npix, range=[-size / 2, size / 2], weights=(v * w))[0] /
              wmap).T

    return result
Exemple #30
0
def load_supervised_training_result(supervised_result_file, wiring_str, nn_density_list, num_plots = -1, supervised_result_path = '/tungstenfs/scratch/gzenke/liutian/nt_transfer/saved_data/supervised_results/'):
    result_dict = {}
    final_result_dict = {}

#     if wiring_str == 'rand':
        # layerwise and global random pruning is the same thing because each parameter will be pruned with an identitical chance.
#         supervised_result_file=supervised_result_file.replace("global", "layerwise")
        
    for density_level in nn_density_list:

        model_path = supervised_result_path + supervised_result_file
        model_wiring_dir = supervised_result_file +  '_' + wiring_str 

        loadFile =  model_path + '/' + model_wiring_dir + '/density_' + str(density_level) + '/supervised_trained_' + supervised_result_file + '_' + 'density_' + str(density_level)

        density_result = list(np.load(loadFile + '.npy' ,allow_pickle='TRUE'))

        density_str = str(density_result[0])

        av_train, trial_variation_train = mean_and_var_across_trials(density_result[1][:, :num_plots] )

        av_test, trial_variation_test = mean_and_var_across_trials(density_result[2][:, :num_plots])

#         av_train, trial_variation_train = mean_and_var_across_trials(density_result[1])

#         av_test, trial_variation_test = mean_and_var_across_trials(density_result[2])

        

        result_density_dict = {'train': {'av': av_train, 'var': trial_variation_train}, 'test': {'av': av_test, 'var': trial_variation_test}}

#         final_result_density_dict = {'train': {'av': av_train[-1], 'var': trial_variation_train[:, -1]}, 'test': {'av': av_test[-1], 'var': trial_variation_test[:, -1]}}
        final_result_density_dict = {'train': {'av': av_train[-1], 'var': trial_variation_train[ -1]}, 'test': {'av': av_test[-1], 'var': trial_variation_test[ -1]}}

        result_dict[str(density_level) ] = result_density_dict

        final_result_dict[str(density_level) ] = final_result_density_dict


    return result_dict, final_result_dict