示例#1
0
def _run_merge_jobs(tasks):
    fns = list()
    for p_id, (merge_args, las_bfn) in tasks.items():
            run_dir = merge_args['merge_subdir']
            job_done = "merge_%05d_done" %p_id
            script_fn = os.path.join(run_dir, "merge_%05d.sh" % (p_id))
            merge_args['job_done'] = job_done
            merge_args['script_fn'] = script_fn
            del merge_args['merge_subdir'] # was just a temporary hack
            support.run_las_merge(**merge_args)
            run_cmd('bash %s' %script_fn, sys.stdout, sys.stderr, shell=False)
            fns.append(os.path.join(run_dir, las_bfn))
    return fns # *.las
示例#2
0
def _run_merge_jobs(tasks):
    fns = list()
    for p_id, (merge_args, las_bfn) in tasks.items():
        run_dir = merge_args['merge_subdir']
        job_done = "merge_%05d_done" % p_id
        script_fn = os.path.join(run_dir, "merge_%05d.sh" % (p_id))
        merge_args['job_done'] = job_done
        merge_args['script_fn'] = script_fn
        del merge_args['merge_subdir']  # was just a temporary hack
        support.run_las_merge(**merge_args)
        run_cmd('bash %s' % script_fn, sys.stdout, sys.stderr, shell=False)
        fns.append(os.path.join(run_dir, las_bfn))
    return fns  # *.las
示例#3
0
def _run_merge_jobs(tasks, dry_run=False, merge_json_fn=None):
    """dry_run --- if True, do not actually run the scripts,
       merge_json_fn --- if not None, write dict{p_id->mege_args} to it
    """
    fns = list()
    json_data = dict()
    for p_id, (merge_args, las_bfn) in tasks.items():
        run_dir = merge_args['merge_subdir']
        job_done = "merge_%05d_done" % p_id
        script_fn = os.path.join(run_dir, "merge_%05d.sh" % (p_id))
        merge_args['job_done'] = job_done
        merge_args['script_fn'] = script_fn
        del merge_args['merge_subdir']  # was just a temporary hack
        # merge_args <- dict{
        # 'job_done'  : 'merge_00001_done',
        # 'script_fn' : 'merge_00001.sh',
        # 'script'    : 'LAmege -v ...',
        # 'config'    : config}
        support.run_las_merge(**merge_args)
        mkdir(run_dir)
        with cd(run_dir):
            if dry_run is False:
                run_cmd('bash %s' % os.path.basename(script_fn),
                        sys.stdout,
                        sys.stderr,
                        shell=False)
        fns.append(os.path.join(run_dir, las_bfn))

        # add script_dir to args for scattered tasks to work in the correct dir
        json_data[p_id] = {
            'script_dir': os.path.join(os.getcwd(),
                                       run_dir),  # 'merge_00001.sh'
            'script_fn': os.path.basename(script_fn)
        }  # '/pbi/.../tasks/falcon_ns.task.task_falcon0_run_merge_consensus_jobs/m_00001',
        json_fn = 'merge_jobs.json' if merge_json_fn is None else merge_json_fn
    # Write dict{p_id: dict{'script_fn':script_fn, 'script_dir':script_dir}} to a json file
    with open(json_fn, 'w') as writer:
        writer.write(json.dumps(json_data) + "\n")

    return fns  # *.las, e.g., ['m_00001/raw_reads.1.las', 'm_00002/raw_reads.2.las', 'm_00003/raw_reads.3.las']
def _run_merge_jobs(tasks, dry_run=False, merge_json_fn=None):
    """dry_run --- if True, do not actually run the scripts,
       merge_json_fn --- if not None, write dict{p_id->mege_args} to it
    """
    fns = list()
    json_data = dict()
    for p_id, (merge_args, las_bfn) in tasks.items():
        run_dir = merge_args["merge_subdir"]
        job_done = "merge_%05d_done" % p_id
        script_fn = os.path.join(run_dir, "merge_%05d.sh" % (p_id))
        merge_args["job_done"] = job_done
        merge_args["script_fn"] = script_fn
        del merge_args["merge_subdir"]  # was just a temporary hack
        # merge_args <- dict{
        # 'job_done'  : 'merge_00001_done',
        # 'script_fn' : 'merge_00001.sh',
        # 'script'    : 'LAmege -v ...',
        # 'config'    : config}
        support.run_las_merge(**merge_args)
        mkdir(run_dir)
        with cd(run_dir):
            if dry_run is False:
                run_cmd("bash %s" % os.path.basename(script_fn), sys.stdout, sys.stderr, shell=False)
        fns.append(os.path.join(run_dir, las_bfn))

        # add script_dir to args for scattered tasks to work in the correct dir
        json_data[p_id] = {
            "script_dir": os.path.join(os.getcwd(), run_dir),  # 'merge_00001.sh'
            "script_fn": os.path.basename(script_fn),
        }  # '/pbi/.../tasks/falcon_ns.task.task_falcon0_run_merge_consensus_jobs/m_00001',
        json_fn = "merge_jobs.json" if merge_json_fn is None else merge_json_fn
    # Write dict{p_id: dict{'script_fn':script_fn, 'script_dir':script_dir}} to a json file
    with open(json_fn, "w") as writer:
        writer.write(json.dumps(json_data) + "\n")

    return fns  # *.las, e.g., ['m_00001/raw_reads.1.las', 'm_00002/raw_reads.2.las', 'm_00003/raw_reads.3.las']