Exemple #1
0
def individuals_location(j):
    j.generator(
        RepeatG(
            Product(
                alpha_v=[0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.99]),
            no_of_runs,
        ))

    j.processor(Threading(config.User.get("cpu_count", os.cpu_count())))

    j.runner(
        SubprocessRunner.factory(
            j.dependencies[build_individuals_location].artefacts["executable"],
            input=ProcessArgs(P("alpha_v")),
            output=Stdout(),
        ))

    coll = j.collector(
        Demux(["alpha_v"],
              Factory(
                  Concatenate,
                  file_path=Join("individuals_location.out.", P("alpha_v"),
                                 ".tsv"),
              )))

    j.artefact(result=coll.aggregate)

    j.after(
        lambda j: print("Wrote results to",
                        list(map(lambda f: f.name, j.artefacts["result"]))))
    j.after(lambda j: list(map(lambda f: f.close(), j.artefacts["result"])))
Exemple #2
0
def gomoryhu_aggregation(j):
    j.generator(
        Product(
            k=[240],
            hhc=[0.01],
            k_over_l=[3],
            n=[10000],
            alpha_v=[0.7, 0.8, 0.85, 0.9, 0.93, 0.95, 0.96, 0.97, 0.98, 0.99],
            timesteps=[100],
            d_timesteps=[300]))

    j.processor(Threading(config.User.get("cpu_count", os.cpu_count())))

    j.runner(
        SubprocessRunner.factory(
            j.dependencies[build_gomoryhu_aggregation].artefacts["executable"],
            input=ProcessArgs(P("k"), P("hhc"),
                              Function(lambda p: p["k"] // p["k_over_l"]),
                              P("n"), P("alpha_v"), P("timesteps"),
                              P("d_timesteps")),
            output=Stdout(),
        ))

    coll = j.collector(
        Demux([
            "k", "hhc", "k_over_l", "n", "alpha_v", "timesteps", "d_timesteps"
        ],
              Factory(
                  Concatenate,
                  file_path=Join("gomoryhu_aggregation", "_k", P("k"), "_c",
                                 P("hhc"), "_l",
                                 Function(lambda p: p["k"] // p["k_over_l"]),
                                 "_n", P("n"), "_av", P("alpha_v"), "_t",
                                 P("timesteps"), "_d", P("d_timesteps"),
                                 ".out"),
              )))

    j.artefact(result=coll.aggregate)

    j.after(
        lambda j: print("Wrote results to",
                        list(map(lambda f: f.name, j.artefacts["result"]))))
    j.after(lambda j: list(map(lambda f: f.close(), j.artefacts["result"])))
Exemple #3
0
def cluster_impurity(j):
    j.generator(
        RepeatG(
            Product(
                # k=[30, 100, 500, 1000],
                k=[500],
                # k_over_l=[3, 5, 10, 20],
                k_over_l=[20],
                hhc=[0.1, 0.3, 0.5, 0.7, 0.9],
            ),
            no_of_runs,
        ))

    j.processor(Threading(config.User.get("cpu_count", os.cpu_count())))

    j.runner(
        SubprocessRunner.factory(
            j.dependencies[build_cluster_impurity].artefacts["executable"],
            input=ProcessArgs(P("k"),
                              Function(lambda p: p["k"] // p["k_over_l"]),
                              P("hhc")),
            output=Stdout(),
        ))

    coll = j.collector(
        Demux(["k", "k_over_l", "hhc"],
              Factory(
                  CSV,
                  file_path=Join("cluster_impurity.out.", P("k"), ".",
                                 P("k_over_l"), ".", P("hhc"), ".tsv"),
                  input_csv_args=dict(delimiter=' '),
                  output_csv_args=dict(delimiter=' '),
              )))

    j.artefact(result=coll.aggregate)

    j.after(
        lambda j: print("Wrote results to",
                        list(map(lambda f: f.name, j.artefacts["result"]))))
    j.after(lambda j: list(map(lambda f: f.close(), j.artefacts["result"])))
Exemple #4
0
def subcluster_individuals(j):
    j.generator(
        RepeatG(
            Product(
                # k=[30, 100, 500, 1000],
                k=[500],
                # n=[500, 1000, 2000, 5000],
                n=[5000],
                alpha_v=[0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.99]),
            no_of_runs,
        ))

    j.processor(Threading(config.User.get("cpu_count", os.cpu_count())))

    j.runner(
        SubprocessRunner.factory(
            j.dependencies[build_subcluster_individuals].
            artefacts["executable"],
            input=ProcessArgs(P("k"), P("n"), P("alpha_v")),
            output=Stdout(),
        ))

    coll = j.collector(
        Demux(["k", "n", "alpha_v"],
              Factory(
                  CSV,
                  file_path=Join("subcluster_individuals.out.", P("k"), ".",
                                 P("n"), ".", P("alpha_v"), ".tsv"),
                  input_csv_args=dict(delimiter=' '),
                  output_csv_args=dict(delimiter=' '),
              )))

    j.artefact(result=coll.aggregate)

    j.after(
        lambda j: print("Wrote results to",
                        list(map(lambda f: f.name, j.artefacts["result"]))))
    j.after(lambda j: list(map(lambda f: f.close(), j.artefacts["result"])))