Ejemplo n.º 1
0
    def test_simulation_gen(self):
        
        random.seed(123)
        time,trap,trajectory = datagen.generate(self.num_points,0.005,self.k,(1.,1.),(1e-5,2e-5),1e-6,8.9e-4,self.phi,self.position,293,1,True)
        mean_traj = np.mean(np.fabs(trajectory), axis=0)

        self.assertTrue(np.isclose(time[self.num_points - 1], self.last_time, rtol=1e-05, atol=1e-08))
        self.assertTrue(np.allclose(mean_traj, self.expected_mean_traj_prob, rtol=1e-05, atol=1e-08))
Ejemplo n.º 2
0
    def test_force_single(self):
        random.seed(123)

        time, traps, trajectories = datagen.generate(1000, 0.005, self.k,
                                                     (2, 1), (1e-6, 1e-6),
                                                     0.5e-6, 9.7e-4, self.phi,
                                                     (0., 0.), 300, 1, False)

        _, means = forcecalc.calculate(time, trajectories[:, 0:2],
                                       traps[:, 0:2], self.k, self.phi)

        self.assertTrue(
            np.allclose(means,
                        self.expected_means_calc,
                        rtol=1e-05,
                        atol=1e-08))
Ejemplo n.º 3
0
import os
import torch
import pickle

from MeLU import MeLU
from options import config
from model_training import training
from data_generation import generate
from evidence_candidate import selection

if __name__ == "__main__":
    master_path = "/home/sunpeijie/files/task/out/melu/ml"
    if not os.path.exists("{}/".format(master_path)):
        os.mkdir("{}/".format(master_path))
        # preparing dataset. It needs about 22GB of your hard disk space.
        generate(master_path)

    # training model.
    melu = MeLU(config)
    model_filename = "{}/models.pkl".format(master_path)
    if not os.path.exists(model_filename):
        # Load training dataset.
        training_set_size = int(
            len(os.listdir("{}/warm_state".format(master_path))) / 4)
        supp_xs_s = []
        supp_ys_s = []
        query_xs_s = []
        query_ys_s = []
        for idx in range(training_set_size):
            supp_xs_s.append(
                pickle.load(
Ejemplo n.º 4
0
import data_generation as dg
import draw as d
from algorithm.km_smote import Over_Sample
from algorithm.under_sample import Under_Sample


if __name__ == "__main__":
    dg = dg.DataGenerator(1000, 0.9)
    data, label = dg.generate()
    di = {"n_clusters": 10}
    osampler = Over_Sample(data=data, label=label, n=3, categorical_features=[], **di)
    syth = osampler.do_synthetic()
    print("syth leng", len(syth))
    syth_label = [1.0] * len(syth)
    dr = d.Drawer(data, label)
    dr.plot_scatter()
    dr1 = d.Drawer(syth, syth_label)
    dr1.plot_scatter()
    under_sample = Under_Sample(major=data[0:900].tolist(), major_label=[0.]*900, synthetics=syth, synthetics_label=syth_label, categorical_features=[], rate=0.5, **di)
    under = under_sample.do_undersample()
    print("under length", len(under))
    under_label = [0.0] * len(under)
    x = under + syth
    y = under_label + syth_label
    dr2 = d.Drawer(x, y)
    dr2.plot_scatter()