Esempio n. 1
0
def multi_progress_bar_example(left=True):
    jobs = [
        # Each job takes between 1 and 10 steps to complete
        [0, random.randint(1, 10)] for i in range(25)  # 25 jobs total
    ]

    widgets = [
        progressbar.Percentage(),
        ' ',
        progressbar.MultiProgressBar('jobs', fill_left=left),
    ]

    max_value = sum([total for progress, total in jobs])
    with progressbar.ProgressBar(widgets=widgets, max_value=max_value) as bar:
        while True:
            incomplete_jobs = [
                idx for idx, (progress, total) in enumerate(jobs)
                if progress < total
            ]
            if not incomplete_jobs:
                break
            which = random.choice(incomplete_jobs)
            jobs[which][0] += 1
            progress = sum([progress for progress, total in jobs])

            bar.update(progress, jobs=jobs, force=True)
            time.sleep(0.02)
Esempio n. 2
0
def test_multi_progress_bar_out_of_range():
    widgets = [
        progressbar.MultiProgressBar('multivalues'),
    ]

    bar = progressbar.ProgressBar(widgets=widgets, max_value=10)
    with pytest.raises(ValueError):
        bar.update(multivalues=[123])

    with pytest.raises(ValueError):
        bar.update(multivalues=[-1])
Esempio n. 3
0
def main(args):

    if args.threads:
        os.environ["OMP_NUM_THREADS"] = str(args.threads)

    files = FileInput(
        args.first_expr,
        args.second_expr,
        args.first_header,
        args.second_header,
        args.map,
    )
    logging.info("Validating files...")
    files.validate()

    logging.info("Parsing pathway map {}".format(files.pw_map))
    pw_map = PathwayMap(files.pw_map, args.min_pathway_membership,
                        args.max_pathway_membership)
    logging.info("Have {} pathways".format(len(pw_map.map)))

    logging.info("Reading expression/headers of {} / {}".format(
        files.first, files.first_headers))
    X1 = ExprMatrix(files.first, files.first_headers)

    logging.info("Reading expression/headers of {} / {}".format(
        files.second, files.second_headers))
    X2 = ExprMatrix(files.second, files.second_headers)

    if args.ortho:
        X = JoinedOrthoMatrix(X1.data, X2.data, X1.headers, X2.headers,
                              args.ortho)
    else:
        X = JoinedMatrix(X1.data, X2.data, X1.headers, X2.headers)

    pathways = []
    if not args.pathway:
        pathways = [p for p in pw_map.map]
    else:
        pathways = args.pathway.split(",")

    out_data = {}

    for pw in pathways:

        tmp = os.path.join(args.tempdir, pw)

        logging.info("Starting pathway {}".format(pw))
        logging.info("{} genes in this pathway".format(len(pw_map.map[pw])))
        X1_, X2_, header = X.get_subset(pw_map.map[pw])
        logging.info("{} genes with expression data\n".format(len(header)))

        if len(header) < args.min_pathway_membership:
            logging.warning(
                "Only {} genes with expression data in pathway {}. Skipping...\n"
                .format(len(header), pw))
            continue
        if len(header) > args.max_pathway_membership:
            logging.warning(
                "More than {} ({}) genes with expression data in pathway {}. Skipping...\n"
                .format(args.max_pathway_membership, len(header), pw))
            continue

        handle_g = get_persistent_tempfile(return_open_handle=True,
                                           n_dir=tmp,
                                           prefix="genes")
        for g in header:
            handle_g.write("{}\n".format(g))
        handle_g.close()

        nboot = args.permutations
        nref = args.reference_runs
        p = args.p_norm

        rd = RunDict(header, pw, nboot)

        na = 4
        if not args.fastest:
            na = 6
            if not args.faster:
                na = 11
        na += 2
        pbar_ctr = 0

        widgets = [
            progressbar.Counter(), "/",
            str((nboot + nref) * na * 2), "|",
            progressbar.Percentage(), "|",
            progressbar.MultiProgressBar("jobs"), "|",
            progressbar.Timer(), "|",
            progressbar.ETA()
        ]
        jobs = [[0, na * 2] for i in range(nboot + nref)]
        with progressbar.ProgressBar(max_value=(nboot + nref) * na * 2,
                                     widgets=widgets) as bar:
            bar.start()
            for i in range(nboot + nref):
                X.row_index = X.row_index_ori.copy()
                if i >= nref:
                    X.permute()
                else:
                    X.reset()

                tmp = os.path.join(args.tempdir, pw, str(i))

                if args.keep_input:
                    row_index = X.row_index
                    handle_ind = get_persistent_tempfile(
                        return_open_handle=True, n_dir=tmp, prefix="row_index")
                    for row_i in row_index:
                        handle_ind.write(str(row_i) + "\n")
                    handle_ind.close()
                    # Activate override
                    remove_persistent_tempfile(handle_ind.name, True)

                X1_, X2_, header = X.get_subset(pw_map.map[pw])
                logging.debug("Shapes after subset: {} || {}".format(
                    X1_.shape, X2_.shape))
                handle_1 = get_persistent_tempfile(return_open_handle=True,
                                                   n_dir=tmp,
                                                   prefix="expr1")
                handle_2 = get_persistent_tempfile(return_open_handle=True,
                                                   n_dir=tmp,
                                                   prefix="expr2")

                np.savetxt(handle_1, X1_, delimiter="\t")
                np.savetxt(handle_2, X2_, delimiter="\t")

                handle_1.close()
                handle_2.close()

                wrapper_1 = SeidrWrapper(
                    handle_1.name,
                    handle_g.name,
                    str(args.threads),
                    X1_.shape,
                    args.ensemble,
                    fail_on_error=args.strict,
                    tmpdir=os.path.join(tmp, "left"),
                    keep_adjacencies=args.keep_adjacencies,
                    keep_aggregate=args.keep_aggregate,
                    keep_seidrfiles=args.keep_seidrfiles,
                    keep_result_files=args.keep_results,
                )
                wrapper_2 = SeidrWrapper(
                    handle_2.name,
                    handle_g.name,
                    str(args.threads),
                    X2_.shape,
                    args.ensemble,
                    fail_on_error=args.strict,
                    tmpdir=os.path.join(tmp, "right"),
                    keep_adjacencies=args.keep_adjacencies,
                    keep_aggregate=args.keep_aggregate,
                    keep_seidrfiles=args.keep_seidrfiles,
                    keep_result_files=args.keep_results,
                )

                wrapper_1.pearson()
                jobs[i][0] += 1
                bar.update(sum([x[0] for x in jobs]), jobs=jobs, force=True)
                wrapper_1.spearman()
                jobs[i][0] += 1
                bar.update(sum([x[0] for x in jobs]), jobs=jobs, force=True)
                wrapper_1.pcor()
                jobs[i][0] += 1
                bar.update(sum([x[0] for x in jobs]), jobs=jobs, force=True)
                wrapper_1.tomsimilarity()
                jobs[i][0] += 1
                bar.update(sum([x[0] for x in jobs]), jobs=jobs, force=True)
                if not args.fastest:
                    wrapper_1.mi()
                    jobs[i][0] += 1
                    bar.update(sum([x[0] for x in jobs]),
                               jobs=jobs,
                               force=True)
                    wrapper_1.narromi()
                    jobs[i][0] += 1
                    bar.update(sum([x[0] for x in jobs]),
                               jobs=jobs,
                               force=True)
                    if not args.faster:
                        wrapper_1.plsnet()
                        jobs[i][0] += 1
                        bar.update(sum([x[0] for x in jobs]),
                                   jobs=jobs,
                                   force=True)
                        wrapper_1.llr()
                        jobs[i][0] += 1
                        bar.update(sum([x[0] for x in jobs]),
                                   jobs=jobs,
                                   force=True)
                        wrapper_1.tigress()
                        jobs[i][0] += 1
                        bar.update(sum([x[0] for x in jobs]),
                                   jobs=jobs,
                                   force=True)
                        wrapper_1.genie3()
                        jobs[i][0] += 1
                        bar.update(sum([x[0] for x in jobs]),
                                   jobs=jobs,
                                   force=True)
                        wrapper_1.elnet()
                        jobs[i][0] += 1
                        bar.update(sum([x[0] for x in jobs]),
                                   jobs=jobs,
                                   force=True)
                wrapper_1.aggregate()
                jobs[i][0] += 1
                bar.update(sum([x[0] for x in jobs]), jobs=jobs, force=True)
                wrapper_1.adjacency()
                jobs[i][0] += 1
                bar.update(sum([x[0] for x in jobs]), jobs=jobs, force=True)

                wrapper_2.pearson()
                jobs[i][0] += 1
                bar.update(sum([x[0] for x in jobs]), jobs=jobs, force=True)
                wrapper_2.spearman()
                jobs[i][0] += 1
                bar.update(sum([x[0] for x in jobs]), jobs=jobs, force=True)
                wrapper_2.pcor()
                jobs[i][0] += 1
                bar.update(sum([x[0] for x in jobs]), jobs=jobs, force=True)
                wrapper_2.tomsimilarity()
                jobs[i][0] += 1
                bar.update(sum([x[0] for x in jobs]), jobs=jobs, force=True)
                if not args.fastest:
                    wrapper_2.mi()
                    jobs[i][0] += 1
                    bar.update(sum([x[0] for x in jobs]),
                               jobs=jobs,
                               force=True)
                    wrapper_2.narromi()
                    jobs[i][0] += 1
                    bar.update(sum([x[0] for x in jobs]),
                               jobs=jobs,
                               force=True)
                    if not args.faster:
                        wrapper_2.plsnet()
                        jobs[i][0] += 1
                        bar.update(sum([x[0] for x in jobs]),
                                   jobs=jobs,
                                   force=True)
                        wrapper_2.llr()
                        jobs[i][0] += 1
                        bar.update(sum([x[0] for x in jobs]),
                                   jobs=jobs,
                                   force=True)
                        wrapper_2.tigress()
                        jobs[i][0] += 1
                        bar.update(sum([x[0] for x in jobs]),
                                   jobs=jobs,
                                   force=True)
                        wrapper_2.genie3()
                        jobs[i][0] += 1
                        bar.update(sum([x[0] for x in jobs]),
                                   jobs=jobs,
                                   force=True)
                        wrapper_2.elnet()
                        jobs[i][0] += 1
                        bar.update(sum([x[0] for x in jobs]),
                                   jobs=jobs,
                                   force=True)
                wrapper_2.aggregate()
                jobs[i][0] += 1
                bar.update(sum([x[0] for x in jobs]), jobs=jobs, force=True)
                wrapper_2.adjacency()
                jobs[i][0] += 1
                bar.update(sum([x[0] for x in jobs]), jobs=jobs, force=True)

                rd.update(wrapper_1, wrapper_2, nref, p, i)

                remove_persistent_tempfile(handle_1.name, args.keep_input)
                remove_persistent_tempfile(handle_2.name, args.keep_input)
                wrapper_1.clean()
                wrapper_2.clean()
            bar.finish()
            remove_persistent_tempfile(handle_g.name, args.keep_input)
            # # Calculate P values as in the paper
            rd.calculate_p()
            rd.print()
            out_data[pw] = rd

    with open("out_data.pickle", "wb") as od:
        pickle.dump(out_data, od)