Beispiel #1
0
def sample_trajectory_loglin(dataframe,
                             n_particles,
                             n_samples,
                             pseudo_obs=1.0,
                             alpha=0.5,
                             beta=0.5,
                             radius=None,
                             reset_cache=True,
                             **args):
    p = dataframe.shape[1]
    if radius is None:
        radius = p

    n_levels = np.array(dataframe.columns.get_level_values(1), dtype=int)
    levels = np.array([range(l) for l in n_levels])

    sd = seqdist.LogLinearJTPosterior()
    sd.init_model(dataframe.get_values(), pseudo_obs, levels, {})
    return sample_trajectory(n_particles,
                             alpha,
                             beta,
                             radius,
                             n_samples,
                             sd,
                             reset_cache=reset_cache)
def sample_trajectories_loglin_parallel(dataframe,
                                        n_samples,
                                        randomize=[1000],
                                        pseudo_obs=[1.0],
                                        reps=1,
                                        output_directory=".",
                                        **args):

    n_levels = np.array(dataframe.columns.get_level_values(1), dtype=int)
    levels = np.array([range(l) for l in n_levels])
    queue = multiprocessing.Queue()
    processes = []
    rets = []

    for _ in range(reps):
        for r in randomize:
            for T in n_samples:
                sd = seqdist.LogLinearJTPosterior()
                sd.init_model(dataframe.get_values(), pseudo_obs, levels)
                print("Starting: " + str((T, r, str(sd), True)))

                proc = Process(target=trajectory_to_queue,
                               args=(T, r, sd, queue, True))
                proc.start()
                processes.append(proc)
                time.sleep(2)

    for _ in processes:
        ret = queue.get()  # will block
        rets.append(ret)
    for p in processes:
        p.join()

    return rets
Beispiel #3
0
def sample_trajectory_loglin(dataframe, n_samples, pseudo_obs=1.0, randomize=1000, cache={}, **args):

    n_levels = np.array(dataframe.columns.get_level_values(1), dtype=int)
    levels = np.array([range(l) for l in n_levels])
    sd = seqdist.LogLinearJTPosterior()
    sd.init_model(dataframe.get_values(), pseudo_obs, levels, cache_complete_set_prob=cache)

    return sample_trajectory(n_samples, randomize, sd)
Beispiel #4
0
def sample_trajectories_loglin_parallel(dataframe,
                                        n_particles,
                                        n_samples,
                                        pseudo_observations=[1.0],
                                        alphas=[0.5],
                                        betas=[0.5],
                                        radii=[None],
                                        reset_cache=True,
                                        reps=1,
                                        output_directory=".",
                                        **args):
    p = dataframe.shape[1]
    if radii == [None]:
        radii = [p]

    n_levels = np.array(dataframe.columns.get_level_values(1), dtype=int)
    levels = np.array([range(l) for l in n_levels])
    queue = multiprocessing.Queue()
    processes = []
    rets = []

    cache = {}
    for _ in range(reps):
        for N in n_particles:
            for T in n_samples:
                for rad in radii:
                    for alpha in alphas:
                        for beta in betas:
                            for pseudo_obs in pseudo_observations:
                                sd = seqdist.LogLinearJTPosterior()
                                sd.init_model(dataframe.get_values(),
                                              pseudo_obs,
                                              levels,
                                              cache_complete_set_prob=cache)
                                print("Starting: " +
                                      str((N, T, alpha, beta, rad, str(sd),
                                           reset_cache, output_directory,
                                           True)))

                                proc = Process(target=trajectory_to_queue,
                                               args=(N, T, alpha, beta, rad,
                                                     sd, queue, reset_cache,
                                                     True))
                                proc.start()
                                processes.append(proc)
                                time.sleep(2)

    for _ in processes:
        ret = queue.get()  # will block
        rets.append(ret)
    for p in processes:
        p.join()

    return rets
Beispiel #5
0
    def from_json(self, mcmc_json):
        graphs = [json_graph.node_link_graph(js_graph)
                  for js_graph in mcmc_json["trajectory"]]

        self.set_trajectory(graphs)
        self.set_time(mcmc_json["run_time"])
        self.optional = mcmc_json["optional"]
        self.sampling_method = mcmc_json["sampling_method"]
        if mcmc_json["model"]["name"] == "ggm_jt_post":
            self.seqdist = sd.GGMJTPosterior()
        elif mcmc_json["model"]["name"] == "loglin_jt_post":
            self.seqdist = sd.LogLinearJTPosterior()

        self.seqdist.init_model_from_json(mcmc_json["model"])
Beispiel #6
0
def sample_trajectories_loglin_to_file(dataframe, n_samples, randomize=[1000], pseudo_obs=[1.0],
                                    reps=1, output_directory=".", **args):

    n_levels = np.array(dataframe.columns.get_level_values(1), dtype=int)
    levels = np.array([range(l) for l in n_levels])

    graph_trajectories = []
    for _ in range(reps):
        for r in randomize:
            for T in n_samples:
                sd = seqdist.LogLinearJTPosterior()
                sd.init_model(dataframe.get_values(), pseudo_obs, levels)

                graph_trajectory = trajectory_to_file(T, r, sd, dir=output_directory)
                graph_trajectories.append(graph_trajectory)
    return graph_trajectories
Beispiel #7
0
def sample_trajectories_loglin_to_file(dataframe,
                                       n_particles,
                                       n_samples,
                                       pseudo_observations=[1.0],
                                       alphas=[0.5],
                                       betas=[0.5],
                                       radii=[None],
                                       reset_cache=True,
                                       reps=1,
                                       output_directory=".",
                                       output_filename="trajectory.json",
                                       **args):
    p = dataframe.shape[1]
    if radii == [None]:
        radii = [p]

    n_levels = np.array(dataframe.columns.get_level_values(1), dtype=int)
    levels = np.array([range(l) for l in n_levels])
    graph_trajectories = []
    cache = {}
    for _ in range(reps):
        for N in n_particles:
            for T in n_samples:
                for rad in radii:
                    for alpha in alphas:
                        for beta in betas:
                            for pseudo_obs in pseudo_observations:
                                sd = seqdist.LogLinearJTPosterior()
                                sd.init_model(dataframe.get_values(),
                                              pseudo_obs,
                                              levels,
                                              cache_complete_set_prob=cache)
                                graph_trajectory = trajectory_to_file(
                                    N,
                                    T,
                                    alpha,
                                    beta,
                                    rad,
                                    sd,
                                    reset_cache=reset_cache,
                                    output_filename=output_filename,
                                    dir=output_directory)

                                graph_trajectories.append(graph_trajectory)
    return graph_trajectories