Пример #1
0
def run_spades(configs_dir, execution_home, cfg, dataset_data,
               ext_python_modules_home, log):
    if not isinstance(cfg.iterative_K, list):
        cfg.iterative_K = [cfg.iterative_K]
    cfg.iterative_K = sorted(cfg.iterative_K)
    used_K = []

    # checking and removing conflicting K-mer directories
    if options_storage.restart_from and (options_storage.restart_k_mers !=
                                         options_storage.original_k_mers):
        processed_K = []
        for k in range(options_storage.MIN_K, options_storage.MAX_K, 2):
            cur_K_dir = os.path.join(cfg.output_dir, "K%d" % k)
            if os.path.isdir(cur_K_dir) and os.path.isfile(
                    os.path.join(cur_K_dir, "final_contigs.fasta")):
                processed_K.append(k)
        if processed_K:
            RL = get_read_length(cfg.output_dir, processed_K[0],
                                 ext_python_modules_home, log)
            needed_K = update_k_mers_in_special_cases(cfg.iterative_K,
                                                      RL,
                                                      log,
                                                      silent=True)
            needed_K = [k for k in needed_K if k < RL]
            original_K = reveal_original_k_mers(RL)

            k_to_delete = []
            for id, k in enumerate(needed_K):
                if len(processed_K) == id:
                    if processed_K[-1] == original_K[
                            -1]:  # the last K in the original run was processed in "last_one" mode
                        k_to_delete = [original_K[-1]]
                    break
                if processed_K[id] != k:
                    k_to_delete = processed_K[id:]
                    break
            if not k_to_delete and (len(processed_K) > len(needed_K)):
                k_to_delete = processed_K[len(needed_K) - 1:]
            if k_to_delete:
                log.info(
                    "Restart mode: removing previously processed directories for K=%s "
                    "to avoid conflicts with K specified with --restart-from" %
                    (str(k_to_delete)))
                for k in k_to_delete:
                    shutil.rmtree(os.path.join(cfg.output_dir, "K%d" % k))

    bin_reads_dir = os.path.join(cfg.output_dir, ".bin_reads")
    if os.path.isdir(bin_reads_dir) and not options_storage.continue_mode:
        shutil.rmtree(bin_reads_dir)
    cfg.tmp_dir = support.get_tmp_dir(prefix="spades_")

    finished_on_stop_after = False
    K = cfg.iterative_K[0]
    if len(cfg.iterative_K) == 1:
        run_iteration(configs_dir, execution_home, cfg, log, K, None, True)
        used_K.append(K)
    else:
        run_iteration(configs_dir, execution_home, cfg, log, K, None, False)
        used_K.append(K)
        if options_storage.stop_after == "k%d" % K:
            finished_on_stop_after = True
        else:
            prev_K = K
            RL = get_read_length(cfg.output_dir, K, ext_python_modules_home,
                                 log)
            cfg.iterative_K = update_k_mers_in_special_cases(
                cfg.iterative_K, RL, log)
            if len(cfg.iterative_K) < 2 or cfg.iterative_K[1] + 1 > RL:
                if cfg.rr_enable:
                    if len(cfg.iterative_K) < 2:
                        log.info(
                            "== Rerunning for the first value of K (%d) with Repeat Resolving"
                            % cfg.iterative_K[0])
                    else:
                        support.warning(
                            "Second value of iterative K (%d) exceeded estimated read length (%d). "
                            "Rerunning for the first value of K (%d) with Repeat Resolving"
                            % (cfg.iterative_K[1], RL, cfg.iterative_K[0]),
                            log)
                    run_iteration(configs_dir, execution_home, cfg, log,
                                  cfg.iterative_K[0], None, True)
                    used_K.append(cfg.iterative_K[0])
                    K = cfg.iterative_K[0]
            else:
                rest_of_iterative_K = cfg.iterative_K
                rest_of_iterative_K.pop(0)
                count = 0
                for K in rest_of_iterative_K:
                    count += 1
                    last_one = count == len(cfg.iterative_K) or (
                        rest_of_iterative_K[count] + 1 > RL)
                    run_iteration(configs_dir, execution_home, cfg, log, K,
                                  prev_K, last_one)
                    used_K.append(K)
                    prev_K = K
                    if last_one:
                        break
                    if options_storage.stop_after == "k%d" % K:
                        finished_on_stop_after = True
                        break
                if count < len(cfg.iterative_K) and not finished_on_stop_after:
                    support.warning(
                        "Iterations stopped. Value of K (%d) exceeded estimated read length (%d)"
                        % (cfg.iterative_K[count], RL), log)

    if options_storage.stop_after and options_storage.stop_after.startswith(
            'k'):
        support.finish_here(log)
    latest = os.path.join(cfg.output_dir, "K%d" % K)

    if cfg.correct_scaffolds and not options_storage.run_completed:
        if options_storage.continue_mode and os.path.isfile(
                os.path.join(cfg.output_dir, "SCC", "corrected_scaffolds.fasta"
                             )) and not options_storage.restart_from == "scc":
            log.info("\n===== Skipping %s (already processed). \n" %
                     "scaffold correction")
        else:
            if options_storage.continue_mode:
                support.continue_from_here(log)
            run_scaffold_correction(configs_dir, execution_home, cfg, log,
                                    latest, 21)
        latest = os.path.join(os.path.join(cfg.output_dir, "SCC"), "K21")
        if options_storage.stop_after == 'scc':
            support.finish_here(log)

    if cfg.correct_scaffolds:
        correct_scaffolds_fpath = os.path.join(latest,
                                               "corrected_scaffolds.fasta")
        if os.path.isfile(correct_scaffolds_fpath):
            shutil.copyfile(correct_scaffolds_fpath, cfg.result_scaffolds)
    elif not finished_on_stop_after:  # interupted by --stop-after, so final K is not processed!
        if os.path.isfile(os.path.join(latest, "before_rr.fasta")):
            result_before_rr_contigs = os.path.join(
                os.path.dirname(cfg.result_contigs), "before_rr.fasta")
            if not os.path.isfile(result_before_rr_contigs
                                  ) or not options_storage.continue_mode:
                shutil.copyfile(os.path.join(latest, "before_rr.fasta"),
                                result_before_rr_contigs)
        if options_storage.rna:
            if os.path.isfile(os.path.join(latest, "transcripts.fasta")):
                if not os.path.isfile(cfg.result_transcripts
                                      ) or not options_storage.continue_mode:
                    shutil.copyfile(os.path.join(latest, "transcripts.fasta"),
                                    cfg.result_transcripts)
            if os.path.isfile(os.path.join(latest, "transcripts.paths")):
                if not os.path.isfile(cfg.result_transcripts_paths
                                      ) or not options_storage.continue_mode:
                    shutil.copyfile(os.path.join(latest, "transcripts.paths"),
                                    cfg.result_transcripts_paths)
            for filtering_type in options_storage.filtering_types:
                prefix = filtering_type + "_filtered_"
                result_filtered_transcripts = os.path.join(
                    cfg.output_dir, prefix + options_storage.transcripts_name)
                latest_filtered_transcripts = os.path.join(
                    latest, prefix + "final_paths.fasta")
                if os.path.isfile(latest_filtered_transcripts):
                    if not os.path.isfile(
                            result_filtered_transcripts
                    ) or not options_storage.continue_mode:
                        shutil.copyfile(latest_filtered_transcripts,
                                        result_filtered_transcripts)
        else:
            if os.path.isfile(os.path.join(latest, "final_contigs.fasta")):
                if not os.path.isfile(cfg.result_contigs
                                      ) or not options_storage.continue_mode:
                    shutil.copyfile(
                        os.path.join(latest, "final_contigs.fasta"),
                        cfg.result_contigs)
            if os.path.isfile(os.path.join(latest, "first_pe_contigs.fasta")):
                result_first_pe_contigs = os.path.join(
                    os.path.dirname(cfg.result_contigs),
                    "first_pe_contigs.fasta")
                if not os.path.isfile(result_first_pe_contigs
                                      ) or not options_storage.continue_mode:
                    shutil.copyfile(
                        os.path.join(latest, "first_pe_contigs.fasta"),
                        result_first_pe_contigs)
            if cfg.rr_enable:
                if os.path.isfile(os.path.join(latest, "scaffolds.fasta")):
                    if not os.path.isfile(
                            cfg.result_scaffolds
                    ) or not options_storage.continue_mode:
                        shutil.copyfile(
                            os.path.join(latest, "scaffolds.fasta"),
                            cfg.result_scaffolds)
                if os.path.isfile(os.path.join(latest, "scaffolds.paths")):
                    if not os.path.isfile(
                            cfg.result_scaffolds_paths
                    ) or not options_storage.continue_mode:
                        shutil.copyfile(
                            os.path.join(latest, "scaffolds.paths"),
                            cfg.result_scaffolds_paths)
            if os.path.isfile(
                    os.path.join(latest, "assembly_graph_with_scaffolds.gfa")):
                if not os.path.isfile(cfg.result_graph_gfa
                                      ) or not options_storage.continue_mode:
                    shutil.copyfile(
                        os.path.join(latest,
                                     "assembly_graph_with_scaffolds.gfa"),
                        cfg.result_graph_gfa)
            if os.path.isfile(os.path.join(latest, "assembly_graph.fastg")):
                if not os.path.isfile(
                        cfg.result_graph) or not options_storage.continue_mode:
                    shutil.copyfile(
                        os.path.join(latest, "assembly_graph.fastg"),
                        cfg.result_graph)
            if os.path.isfile(os.path.join(latest, "final_contigs.paths")):
                if not os.path.isfile(cfg.result_contigs_paths
                                      ) or not options_storage.continue_mode:
                    shutil.copyfile(
                        os.path.join(latest, "final_contigs.paths"),
                        cfg.result_contigs_paths)

    if cfg.developer_mode:
        # saves
        saves_link = os.path.join(os.path.dirname(cfg.result_contigs), "saves")
        if os.path.lexists(
                saves_link
        ):  # exists returns False for broken links! lexists return True
            os.remove(saves_link)
        os.symlink(os.path.join(latest, "saves"), saves_link)

    if os.path.isdir(bin_reads_dir):
        shutil.rmtree(bin_reads_dir)
    if os.path.isdir(cfg.tmp_dir):
        shutil.rmtree(cfg.tmp_dir)

    return used_K
Пример #2
0
def run_mulksg(configs_dir, execution_home, cfg, dataset_data,
               ext_python_modules_home, log, comm, rank, size):
    #Running this on multiple nodes, need to differentiate the actual work here
    if not isinstance(cfg.iterative_K, list):
        cfg.iterative_K = [cfg.iterative_K]
    cfg.iterative_K = sorted(cfg.iterative_K)

    while (len(cfg.iterative_K) - 1) % size != 0:
        bgap_idx = 0
        bgap = 0
        for i in range(0, len(cfg.iterative_K) - 1):
            gap = cfg.iterative_K[i + 1] - cfg.iterative_K[i]
            if gap > bgap:
                bgap = gap
                bgap_idx = i
        if bgap == 0:
            break
        ul = cfg.iterative_K[i + 1]
        ll = cfg.iterative_K[i]
        new_k = (int)(ul - ll) / 2
        new_k = new_k / 2 * 2 + 1
        cfg.iterative_K.append(new_k)
        cfg.iterative_K = sorted(cfg.iterative_K)
        #add to cfg.iterative_K
        #add a number between the biggest gaps in the current cfg.iterative_K

    if rank == 0:  #Setup all the configs, we will put them in a work_list
        # checking and removing conflicting K-mer directories
        if options_storage.restart_from and (options_storage.restart_k_mers !=
                                             options_storage.original_k_mers):
            processed_K = []
            for k in range(options_storage.MIN_K, options_storage.MAX_K, 2):
                cur_K_dir = os.path.join(cfg.output_dir, "K%d" % k)
                if os.path.isdir(cur_K_dir) and os.path.isfile(
                        os.path.join(cur_K_dir, "final_contigs.fasta")):
                    processed_K.append(k)
            if processed_K:
                RL = get_read_length(cfg.output_dir, processed_K[0],
                                     ext_python_modules_home, log)
                needed_K = update_k_mers_in_special_cases(cfg.iterative_K,
                                                          RL,
                                                          log,
                                                          silent=True)
                needed_K = [k for k in needed_K if k < RL]
                original_K = reveal_original_k_mers(RL)

                k_to_delete = []
                for id, k in enumerate(needed_K):
                    if len(processed_K) == id:
                        if processed_K[-1] == original_K[
                                -1]:  # the last K in the original run was processed in "last_one" mode
                            k_to_delete = [original_K[-1]]
                        break
                    if processed_K[id] != k:
                        k_to_delete = processed_K[id:]
                        break
                if not k_to_delete and (len(processed_K) > len(needed_K)):
                    k_to_delete = processed_K[len(needed_K) - 1:]
                if k_to_delete:
                    log.info(
                        "Restart mode: removing previously processed directories for K=%s "
                        "to avoid conflicts with K specified with --restart-from"
                        % (str(k_to_delete)))
                    for k in k_to_delete:
                        shutil.rmtree(os.path.join(cfg.output_dir, "K%d" % k))
        bin_reads_dir = os.path.join(cfg.output_dir, ".bin_reads")
        if os.path.isdir(bin_reads_dir):
            shutil.rmtree(bin_reads_dir)
        cfg.tmp_dir = support.get_tmp_dir(prefix="mulksg_")

        first_kValues = cfg.iterative_K
        last_kValue = []
        last_kValue.append(first_kValues[-1])
        del first_kValues[-1]
        #process all of the files as last_one
        first_commands, contig_files = create_mulksg_configs(
            configs_dir, execution_home, cfg, log, first_kValues, None, True,
            ext_python_modules_home)
        work_list = first_commands
        if size > 1 and len(cfg.iterative_K) > 1:
            num_running = 0
            for command in work_list:
                if num_running == (size - 1):
                    support.sys_call(command, log)
                    num_running = 0
                else:
                    node_rank = comm.recv()  #source=ANY_SOURCE
                    comm.send(command, node_rank)
                    num_running += 1

            for i in range(1, size):
                node_rank = comm.recv()  #source=ANY_SOURCE
                comm.send("Done", node_rank)
                print "Rank", node_rank, "Finished"
        else:
            if len(cfg.iterative_K) > 1:
                pool = Pool(len(first_commands))
                results = pool.map(
                    support.sys_call,
                    first_commands)  #(command for command in first_commands))
        #Now we have generated all the contig files, concatenate into one file
        extra_contig_filename = None
        try:
            extra_contig_filename = cfg.output_dir + "/extracontigs.fa"  #"/K" + last_kValue[0]
            with open(extra_contig_filename, 'w') as contig_file:
                for fname in contig_files:
                    try:
                        with open(fname) as infile:
                            contig_file.write(infile.read())
                    except:
                        continue

        except:
            log.info("Could not create extra contig file!!!!")
            extra_contig_filename = None

        last_command, contig_files = create_mulksg_configs(
            configs_dir, execution_home, cfg, log, last_kValue,
            extra_contig_filename, True, ext_python_modules_home)
        last_command = last_command[0]
        support.sys_call(last_command, log)
        K = last_kValue[0]
        latest = os.path.join(cfg.output_dir, "K%d" % K)

        finished_on_stop_after = False

        if cfg.correct_scaffolds and not options_storage.run_completed:
            if options_storage.continue_mode and os.path.isfile(
                    os.path.join(cfg.output_dir, "SCC",
                                 "corrected_scaffolds.fasta")
            ) and not options_storage.restart_from == "scc":
                log.info("\n===== Skipping %s (already processed). \n" %
                         "scaffold correction")
            else:
                if options_storage.continue_mode:
                    support.continue_from_here(log)
                run_scaffold_correction(configs_dir, execution_home, cfg, log,
                                        latest, 21)
            latest = os.path.join(os.path.join(cfg.output_dir, "SCC"), "K21")
            if options_storage.stop_after == 'scc':
                support.finish_here(log)

        if cfg.correct_scaffolds:
            correct_scaffolds_fpath = os.path.join(
                latest, "corrected_scaffolds.fasta")
            if os.path.isfile(correct_scaffolds_fpath):
                shutil.copyfile(correct_scaffolds_fpath, cfg.result_scaffolds)
        elif not finished_on_stop_after:  # interupted by --stop-after, so final K is not processed!
            if os.path.isfile(os.path.join(latest, "before_rr.fasta")):
                result_before_rr_contigs = os.path.join(
                    os.path.dirname(cfg.result_contigs), "before_rr.fasta")
                if not os.path.isfile(result_before_rr_contigs
                                      ) or not options_storage.continue_mode:
                    shutil.copyfile(os.path.join(latest, "before_rr.fasta"),
                                    result_before_rr_contigs)
            if options_storage.rna:
                if os.path.isfile(os.path.join(latest, "transcripts.fasta")):
                    if not os.path.isfile(
                            cfg.result_transcripts
                    ) or not options_storage.continue_mode:
                        shutil.copyfile(
                            os.path.join(latest, "transcripts.fasta"),
                            cfg.result_transcripts)
                if os.path.isfile(os.path.join(latest, "transcripts.paths")):
                    if not os.path.isfile(
                            cfg.result_transcripts_paths
                    ) or not options_storage.continue_mode:
                        shutil.copyfile(
                            os.path.join(latest, "transcripts.paths"),
                            cfg.result_transcripts_paths)
                for filtering_type in options_storage.filtering_types:
                    prefix = filtering_type + "_filtered_"
                    result_filtered_transcripts = os.path.join(
                        cfg.output_dir,
                        prefix + options_storage.transcripts_name)
                    latest_filtered_transcripts = os.path.join(
                        latest, prefix + "final_paths.fasta")
                    if os.path.isfile(latest_filtered_transcripts):
                        if not os.path.isfile(
                                result_filtered_transcripts
                        ) or not options_storage.continue_mode:
                            shutil.copyfile(latest_filtered_transcripts,
                                            result_filtered_transcripts)
            else:
                if os.path.isfile(os.path.join(latest, "final_contigs.fasta")):
                    if not os.path.isfile(
                            cfg.result_contigs
                    ) or not options_storage.continue_mode:
                        shutil.copyfile(
                            os.path.join(latest, "final_contigs.fasta"),
                            cfg.result_contigs)
                if os.path.isfile(
                        os.path.join(latest, "first_pe_contigs.fasta")):
                    result_first_pe_contigs = os.path.join(
                        os.path.dirname(cfg.result_contigs),
                        "first_pe_contigs.fasta")
                    if not os.path.isfile(
                            result_first_pe_contigs
                    ) or not options_storage.continue_mode:
                        shutil.copyfile(
                            os.path.join(latest, "first_pe_contigs.fasta"),
                            result_first_pe_contigs)
                if cfg.rr_enable:
                    if os.path.isfile(os.path.join(latest, "scaffolds.fasta")):
                        if not os.path.isfile(
                                cfg.result_scaffolds
                        ) or not options_storage.continue_mode:
                            shutil.copyfile(
                                os.path.join(latest, "scaffolds.fasta"),
                                cfg.result_scaffolds)
                    if os.path.isfile(os.path.join(latest, "scaffolds.paths")):
                        if not os.path.isfile(
                                cfg.result_scaffolds_paths
                        ) or not options_storage.continue_mode:
                            shutil.copyfile(
                                os.path.join(latest, "scaffolds.paths"),
                                cfg.result_scaffolds_paths)
                if os.path.isfile(
                        os.path.join(latest,
                                     "assembly_graph_with_scaffolds.gfa")):
                    if not os.path.isfile(
                            cfg.result_graph_gfa
                    ) or not options_storage.continue_mode:
                        shutil.copyfile(
                            os.path.join(latest,
                                         "assembly_graph_with_scaffolds.gfa"),
                            cfg.result_graph_gfa)
                if os.path.isfile(os.path.join(latest,
                                               "assembly_graph.fastg")):
                    if not os.path.isfile(
                            cfg.result_graph
                    ) or not options_storage.continue_mode:
                        shutil.copyfile(
                            os.path.join(latest, "assembly_graph.fastg"),
                            cfg.result_graph)
                if os.path.isfile(os.path.join(latest, "final_contigs.paths")):
                    if not os.path.isfile(
                            cfg.result_contigs_paths
                    ) or not options_storage.continue_mode:
                        shutil.copyfile(
                            os.path.join(latest, "final_contigs.paths"),
                            cfg.result_contigs_paths)

        if os.path.isdir(bin_reads_dir):
            shutil.rmtree(bin_reads_dir)
        if os.path.isdir(cfg.tmp_dir):
            shutil.rmtree(cfg.tmp_dir)

        return first_kValues + last_kValue

    elif rank != 0 and len(cfg.iterative_K) > 1:
        comm.send(rank, 0)
        command = comm.recv(source=0)
        while command != "Done":
            # print rank, command
            support.sys_call(command, log)
            comm.send(rank, 0)
            command = comm.recv(source=0)
        # comm.send(rank, 0) #tell the master I am done and exiting
        return None
def main(args):
    os.environ["LC_ALL"] = "C"

    if len(args) == 1:
        show_usage(0)

    log = logging.getLogger('spades')
    log.setLevel(logging.DEBUG)

    console = logging.StreamHandler(sys.stdout)
    console.setFormatter(logging.Formatter('%(message)s'))
    console.setLevel(logging.DEBUG)
    log.addHandler(console)

    support.check_binaries(bin_home, log)

    # parse options and safe all parameters to cfg
    options = args
    cfg, dataset_data = fill_cfg(options, log)

    if options_storage.continue_mode:
        cmd_line, options = get_options_from_params(os.path.join(options_storage.output_dir, "params.txt"), args[0])
        if not options:
            support.error("failed to parse command line of the previous run! Please restart from the beginning or specify another output directory.")
        cfg, dataset_data = fill_cfg(options, log, secondary_filling=True)
        if options_storage.restart_from:
            check_cfg_for_partial_run(cfg, type='restart-from')
        options_storage.continue_mode = True
    if options_storage.stop_after:
        check_cfg_for_partial_run(cfg, type='stop-after')

    log_filename = os.path.join(cfg["common"].output_dir, "spades.log")
    if options_storage.continue_mode:
        log_handler = logging.FileHandler(log_filename, mode='a')
    else:
        log_handler = logging.FileHandler(log_filename, mode='w')
    log.addHandler(log_handler)

    if options_storage.continue_mode:
        log.info("\n======= SPAdes pipeline continued. Log can be found here: " + log_filename + "\n")
        log.info("Restored from " + cmd_line)
        if options_storage.restart_from:
            updated_params = ""
            skip_next = False
            for v in args[1:]:
                if v == '-o' or v == '--restart-from':
                    skip_next = True
                    continue
                if skip_next or v.startswith('--restart-from='):  # you can specify '--restart-from=k33' but not '-o=out_dir'
                    skip_next = False
                    continue
                updated_params += " " + v
            updated_params = updated_params.strip()
            log.info("with updated parameters: " + updated_params)
            cmd_line += " " + updated_params
        log.info("")

    params_filename = os.path.join(cfg["common"].output_dir, "params.txt")
    params_handler = logging.FileHandler(params_filename, mode='w')
    log.addHandler(params_handler)

    if options_storage.continue_mode:
        log.info(cmd_line)
    else:
        command = "Command line: "
        for v in args:
            # substituting relative paths with absolute ones (read paths, output dir path, etc)
            v, prefix = support.get_option_prefix(v)
            if v in options_storage.dict_of_rel2abs.keys():
                v = options_storage.dict_of_rel2abs[v]
            if prefix:
                command += prefix + ":"
            command += v + " "
        log.info(command)

    # special case
#    if "mismatch_corrector" in cfg and not support.get_lib_ids_by_type(dataset_data, 'paired-end'):
#        support.warning('cannot perform mismatch correction without at least one paired-end library! Skipping this step.', log)
#        del cfg["mismatch_corrector"]

    print_used_values(cfg, log)
    log.removeHandler(params_handler)

    support.check_single_reads_in_options(options, log)

    if not options_storage.continue_mode:
        log.info("\n======= SPAdes pipeline started. Log can be found here: " + log_filename + "\n")

    # splitting interlaced reads and processing Ns in additional contigs if needed
    if support.dataset_has_interlaced_reads(dataset_data) or support.dataset_has_additional_contigs(dataset_data)\
            or support.dataset_has_nxmate_reads(dataset_data):
        dir_for_split_reads = os.path.join(options_storage.output_dir, 'split_input')
        if support.dataset_has_interlaced_reads(dataset_data) or support.dataset_has_nxmate_reads(dataset_data):
            if not os.path.isdir(dir_for_split_reads):
                os.makedirs(dir_for_split_reads)
            if support.dataset_has_interlaced_reads(dataset_data):
                dataset_data = support.split_interlaced_reads(dataset_data, dir_for_split_reads, log)
            if support.dataset_has_nxmate_reads(dataset_data):
                dataset_data = support.process_nxmate_reads(dataset_data, dir_for_split_reads, log)
        if support.dataset_has_additional_contigs(dataset_data):
            dataset_data = support.process_Ns_in_additional_contigs(dataset_data, dir_for_split_reads, log)
        options_storage.dataset_yaml_filename = os.path.join(options_storage.output_dir, "input_dataset.yaml")
        pyyaml.dump(dataset_data, open(options_storage.dataset_yaml_filename, 'w'))
        cfg["dataset"].yaml_filename = options_storage.dataset_yaml_filename

    try:
        # copying configs before all computations (to prevent its changing at run time)
        tmp_configs_dir = os.path.join(cfg["common"].output_dir, "configs")
        if os.path.isdir(tmp_configs_dir) and not options_storage.continue_mode:
            shutil.rmtree(tmp_configs_dir)
        if not os.path.isdir(tmp_configs_dir):
            if options_storage.configs_dir:
                dir_util.copy_tree(options_storage.configs_dir, tmp_configs_dir, preserve_times=False)
            else:
                dir_util.copy_tree(os.path.join(spades_home, "configs"), tmp_configs_dir, preserve_times=False)

        corrected_dataset_yaml_filename = ''
        if "error_correction" in cfg:
            STAGE_NAME = "Read error correction"
            bh_cfg = merge_configs(cfg["error_correction"], cfg["common"])
            corrected_dataset_yaml_filename = os.path.join(bh_cfg.output_dir, "corrected.yaml")
            if os.path.isfile(corrected_dataset_yaml_filename) and options_storage.continue_mode \
                and not options_storage.restart_from == "ec":
                log.info("\n===== Skipping %s (already processed). \n" % STAGE_NAME)
            else:
                support.continue_from_here(log)

                if "HEAPCHECK" in os.environ:
                    del os.environ["HEAPCHECK"]
                if "heap_check" in bh_cfg.__dict__:
                    os.environ["HEAPCHECK"] = bh_cfg.heap_check

                if os.path.exists(bh_cfg.output_dir):
                    shutil.rmtree(bh_cfg.output_dir)
                os.makedirs(bh_cfg.output_dir)

                if support.get_lib_ids_by_type(dataset_data, options_storage.LONG_READS_TYPES):
                    not_used_dataset_data = support.get_libs_by_type(dataset_data, options_storage.LONG_READS_TYPES)
                    to_correct_dataset_data = support.rm_libs_by_type(dataset_data, options_storage.LONG_READS_TYPES)
                    to_correct_dataset_yaml_filename = os.path.join(bh_cfg.output_dir, "to_correct.yaml")
                    pyyaml.dump(to_correct_dataset_data, open(to_correct_dataset_yaml_filename, 'w'))
                    bh_cfg.__dict__["dataset_yaml_filename"] = to_correct_dataset_yaml_filename
                else:
                    not_used_dataset_data = None
                    bh_cfg.__dict__["dataset_yaml_filename"] = cfg["dataset"].yaml_filename

                log.info("\n===== %s started. \n" % STAGE_NAME)

                hammer_logic.run_hammer(corrected_dataset_yaml_filename, tmp_configs_dir, bin_home, bh_cfg, not_used_dataset_data,
                    ext_python_modules_home, log)
                log.info("\n===== %s finished. \n" % STAGE_NAME)
            if options_storage.stop_after == 'ec':
                support.finish_here(log)

        result_contigs_filename = os.path.join(cfg["common"].output_dir, "contigs.fasta")
        result_scaffolds_filename = os.path.join(cfg["common"].output_dir, "scaffolds.fasta")
        result_assembly_graph_filename = os.path.join(cfg["common"].output_dir, "assembly_graph.fastg")
        truseq_long_reads_file_base = os.path.join(cfg["common"].output_dir, "truseq_long_reads")
        truseq_long_reads_file = truseq_long_reads_file_base + ".fasta"
        misc_dir = os.path.join(cfg["common"].output_dir, "misc")
        ### if mismatch correction is enabled then result contigs are copied to misc directory
        assembled_contigs_filename = os.path.join(misc_dir, "assembled_contigs.fasta")
        assembled_scaffolds_filename = os.path.join(misc_dir, "assembled_scaffolds.fasta")
        if "assembly" in cfg and not options_storage.run_completed:
            STAGE_NAME = "Assembling"
            spades_cfg = merge_configs(cfg["assembly"], cfg["common"])
            spades_cfg.__dict__["result_contigs"] = result_contigs_filename
            spades_cfg.__dict__["result_scaffolds"] = result_scaffolds_filename
            spades_cfg.__dict__["result_graph"] = result_assembly_graph_filename

            if options_storage.continue_mode and (os.path.isfile(spades_cfg.result_contigs)
                                                  or ("mismatch_corrector" in cfg and
                                                      os.path.isfile(assembled_contigs_filename)))\
                and not options_storage.restart_from == 'as' \
                and not options_storage.restart_from == 'scc' \
                and not (options_storage.restart_from and options_storage.restart_from.startswith('k')):

                log.info("\n===== Skipping %s (already processed). \n" % STAGE_NAME)
                # calculating latest_dir for the next stages
                latest_dir = support.get_latest_dir(os.path.join(spades_cfg.output_dir, "K*"))
                if not latest_dir:
                    support.error("failed to continue the previous run! Please restart from previous stages or from the beginning.", log)
            else:
                old_result_files = [result_contigs_filename, result_scaffolds_filename,
                                    assembled_contigs_filename, assembled_scaffolds_filename]
                for old_result_file in old_result_files:
                    if os.path.isfile(old_result_file):
                        os.remove(old_result_file)

                if options_storage.restart_from == 'as':
                    support.continue_from_here(log)

                if os.path.isfile(corrected_dataset_yaml_filename):
                    dataset_data = pyyaml.load(open(corrected_dataset_yaml_filename, 'r'))
                    dataset_data = support.relative2abs_paths(dataset_data, os.path.dirname(corrected_dataset_yaml_filename))
                if spades_cfg.disable_rr:
                    spades_cfg.__dict__["rr_enable"] = False
                else:
                    spades_cfg.__dict__["rr_enable"] = True

                if "HEAPCHECK" in os.environ:
                    del os.environ["HEAPCHECK"]
                if "heap_check" in spades_cfg.__dict__:
                    os.environ["HEAPCHECK"] = spades_cfg.heap_check

                log.info("\n===== %s started.\n" % STAGE_NAME)

                # creating dataset
                dataset_filename = os.path.join(spades_cfg.output_dir, "dataset.info")
                if not os.path.isfile(dataset_filename) or not options_storage.continue_mode:
                    dataset_file = open(dataset_filename, 'w')
                    import process_cfg
                    dataset_file.write("single_cell" + '\t' + process_cfg.bool_to_str(cfg["dataset"].single_cell) + '\n')
                    dataset_file.write("meta" + '\t' + process_cfg.bool_to_str(cfg["dataset"].meta) + '\n')
                    dataset_file.write("moleculo" + '\t' + process_cfg.bool_to_str(cfg["dataset"].truseq) + '\n')
                    if os.path.isfile(corrected_dataset_yaml_filename):
                        dataset_file.write("reads" + '\t' + process_cfg.process_spaces(corrected_dataset_yaml_filename) + '\n')
                    else:
                        dataset_file.write("reads" + '\t' + process_cfg.process_spaces(cfg["dataset"].yaml_filename) + '\n')
                    if spades_cfg.developer_mode and "reference" in cfg["dataset"].__dict__:
                        dataset_file.write("reference_genome" + '\t')
                        dataset_file.write(process_cfg.process_spaces(cfg["dataset"].reference) + '\n')
                    dataset_file.close()
                spades_cfg.__dict__["dataset"] = dataset_filename

                latest_dir = spades_logic.run_spades(tmp_configs_dir, bin_home, spades_cfg, dataset_data, ext_python_modules_home, log)

                if os.path.isdir(misc_dir) and not options_storage.continue_mode:
                    shutil.rmtree(misc_dir)
                if not os.path.isdir(misc_dir):
                    os.makedirs(misc_dir)

                if options_storage.continue_mode and options_storage.restart_from and options_storage.restart_from.startswith('k'):
                    k_str = options_storage.restart_from[1:]
                    if k_str.find(":") != -1:
                        k_str = k_str[:k_str.find(":")]
                    support.error("failed to continue from K=%s because this K was not processed in the original run!" % k_str, log)
                log.info("\n===== %s finished. \n" % STAGE_NAME)
            if not options_storage.run_completed:
                if options_storage.stop_after == 'as' or options_storage.stop_after == 'scc' or (options_storage.stop_after and options_storage.stop_after.startswith('k')):
                    support.finish_here(log)

            #postprocessing
            if cfg["run_truseq_postprocessing"] and not options_storage.run_completed:
                if options_storage.continue_mode and os.path.isfile(truseq_long_reads_file_base + ".fastq") and not options_storage.restart_from == 'tpp':
                    log.info("\n===== Skipping %s (already processed). \n" % "TruSeq postprocessing")
                else:
                    support.continue_from_here(log)
                    if os.path.isfile(result_scaffolds_filename):
                        shutil.move(result_scaffolds_filename, assembled_scaffolds_filename)
                    reads_library = dataset_data[0]
                    alignment_bin = os.path.join(bin_home, "bwa-spades")
                    alignment_dir = os.path.join(cfg["common"].output_dir, "alignment")
                    sam_files = alignment.align_bwa(alignment_bin, assembled_scaffolds_filename, dataset_data, alignment_dir, log, options_storage.threads)
                    moleculo_postprocessing.moleculo_postprocessing(assembled_scaffolds_filename, truseq_long_reads_file_base, sam_files, log)
                if options_storage.stop_after == 'tpp':
                    support.finish_here(log)

            #corrector
            if "mismatch_corrector" in cfg and not options_storage.run_completed and \
                    (os.path.isfile(result_contigs_filename) or
                    (options_storage.continue_mode and os.path.isfile(assembled_contigs_filename))):
                STAGE_NAME = "Mismatch correction"
                to_correct = dict()
                to_correct["contigs"] = (result_contigs_filename, assembled_contigs_filename)
                if os.path.isfile(result_scaffolds_filename) or (options_storage.continue_mode and
                                                                 os.path.isfile(assembled_scaffolds_filename)):
                    to_correct["scaffolds"] = (result_scaffolds_filename, assembled_scaffolds_filename)

                # moving assembled contigs (scaffolds) to misc dir
                for assembly_type, (old, new) in to_correct.items():
                    if options_storage.continue_mode and os.path.isfile(new):
                        continue
                    if os.path.isfile(old):
                        shutil.move(old, new)

                if options_storage.continue_mode and os.path.isfile(result_contigs_filename) and \
                    (os.path.isfile(result_scaffolds_filename) or not os.path.isfile(assembled_scaffolds_filename)) \
                    and not options_storage.restart_from == 'mc':
                    log.info("\n===== Skipping %s (already processed). \n" % STAGE_NAME)
                else:
                    if options_storage.restart_from == 'mc':
                        support.continue_from_here(log)

                    log.info("\n===== %s started." % STAGE_NAME)
                    # detecting paired-end library with the largest insert size
                    cfg["mismatch_corrector"].__dict__["dataset"] = cfg["dataset"].yaml_filename
                    #TODO: add reads orientation

                    import corrector_logic
                    corrector_cfg = cfg["mismatch_corrector"]
                    # processing contigs and scaffolds (or only contigs)
                    for assembly_type, (corrected, assembled) in to_correct.items():
                        if options_storage.continue_mode and os.path.isfile(corrected):
                            log.info("\n== Skipping processing of " + assembly_type + " (already processed)\n")
                            continue

                        support.continue_from_here(log)
                        log.info("\n== Processing of " + assembly_type + "\n")

                        tmp_dir_for_corrector = os.path.join(cfg["common"].output_dir, "mismatch_corrector", assembly_type)

                        cfg["mismatch_corrector"].__dict__["output_dir"] = tmp_dir_for_corrector
                        # correcting
                        corr_cfg = merge_configs(cfg["mismatch_corrector"], cfg["common"])
                        
                        result_corrected_filename = os.path.join(tmp_dir_for_corrector, "corrected_contigs.fasta")

                        corrector_logic.run_corrector( tmp_configs_dir, bin_home, corr_cfg,
                        ext_python_modules_home, log, assembled, result_corrected_filename)

                        if os.path.isfile(result_corrected_filename):
                            shutil.copyfile(result_corrected_filename, corrected)
                        tmp_d = os.path.join(tmp_dir_for_corrector, "tmp")
                        if os.path.isdir(tmp_d) and not cfg["common"].developer_mode:
                            shutil.rmtree(tmp_d)
                    log.info("\n===== %s finished.\n" % STAGE_NAME)
                if options_storage.stop_after == 'mc':
                    support.finish_here(log)

        if not cfg["common"].developer_mode and os.path.isdir(tmp_configs_dir):
            shutil.rmtree(tmp_configs_dir)

        if not options_storage.run_completed:
            #log.info("")
            if "error_correction" in cfg and os.path.isdir(os.path.dirname(corrected_dataset_yaml_filename)):
                log.info(" * Corrected reads are in " + support.process_spaces(os.path.dirname(corrected_dataset_yaml_filename) + "/"))
            if "assembly" in cfg and os.path.isfile(result_contigs_filename):
                message = " * Assembled contigs are in " + support.process_spaces(result_contigs_filename)
                log.info(message)
            if "assembly" in cfg and os.path.isfile(result_scaffolds_filename):
                message = " * Assembled scaffolds are in " + support.process_spaces(result_scaffolds_filename)
                log.info(message)
            if "assembly" in cfg and os.path.isfile(result_assembly_graph_filename):
                message = " * Assembly graph is in " + support.process_spaces(result_assembly_graph_filename)
                log.info(message)
            #log.info("")

        #breaking scaffolds
        if os.path.isfile(result_scaffolds_filename):
            if not os.path.isdir(misc_dir):
                os.makedirs(misc_dir)
            result_broken_scaffolds = os.path.join(misc_dir, "broken_scaffolds.fasta")
            if not os.path.isfile(result_broken_scaffolds) or not options_storage.continue_mode:
                modified, broken_scaffolds = support.break_scaffolds(result_scaffolds_filename,
                    options_storage.THRESHOLD_FOR_BREAKING_SCAFFOLDS)
                if modified:
                    support.write_fasta(result_broken_scaffolds, broken_scaffolds)
                    #log.info(" * Scaffolds broken by " + str(options_storage.THRESHOLD_FOR_BREAKING_SCAFFOLDS) +
                    # " Ns are in " + result_broken_scaffolds)

        ### printing WARNINGS SUMMARY
        if not support.log_warnings(log):
            log.info("\n======= SPAdes pipeline finished.")  # otherwise it finished WITH WARNINGS

        if options_storage.test_mode:
            if options_storage.truseq_mode:
                if not os.path.isfile(truseq_long_reads_file):
                    support.error("TEST FAILED: %s does not exist!" % truseq_long_reads_file)
            else:
                for result_filename in [result_contigs_filename, result_scaffolds_filename]:
                    if os.path.isfile(result_filename):
                        result_fasta = list(support.read_fasta(result_filename))
                        # correctness check: should be one contig of length 1000 bp
                        correct_number = 1
                        correct_length = 1000
                        if not len(result_fasta):
                            support.error("TEST FAILED: %s does not contain contigs!" % result_filename)
                        elif len(result_fasta) > correct_number:
                            support.error("TEST FAILED: %s contains more than %d contig (%d)!" %
                                          (result_filename, correct_number, len(result_fasta)))
                        elif len(result_fasta[0][1]) != correct_length:
                            if len(result_fasta[0][1]) > correct_length:
                                relation = "more"
                            else:
                                relation = "less"
                            support.error("TEST FAILED: %s contains %s than %d bp (%d bp)!" %
                                          (result_filename, relation, correct_length, len(result_fasta[0][1])))
                    else:
                        support.error("TEST FAILED: " + result_filename + " does not exist!")
            log.info("\n========= TEST PASSED CORRECTLY.")


        log.info("\nSPAdes log can be found here: " + log_filename)
        log.info("")
        log.info("Thank you for using SPAdes!")
        log.removeHandler(log_handler)

    except Exception:
        exc_type, exc_value, _ = sys.exc_info()
        if exc_type == SystemExit:
            sys.exit(exc_value)
        else:
            if exc_type == OSError and exc_value.errno == errno.ENOEXEC: # Exec format error
                support.error("It looks like you are using SPAdes binaries for another platform.\n" +
                              support.get_spades_binaries_info_message())
            else:
                log.exception(exc_value)
                support.error("exception caught: %s" % exc_type, log)
    except BaseException:  # since python 2.5 system-exiting exceptions (e.g. KeyboardInterrupt) are derived from BaseException
        exc_type, exc_value, _ = sys.exc_info()
        if exc_type == SystemExit:
            sys.exit(exc_value)
        else:
            log.exception(exc_value)
            support.error("exception caught: %s" % exc_type, log)
Пример #4
0
def run_spades(configs_dir, execution_home, cfg, dataset_data, ext_python_modules_home, log):
    if not isinstance(cfg.iterative_K, list):
        cfg.iterative_K = [cfg.iterative_K]
    cfg.iterative_K = sorted(cfg.iterative_K)
    used_K = []

    # checking and removing conflicting K-mer directories
    if options_storage.restart_from and (options_storage.restart_k_mers != options_storage.original_k_mers):
        processed_K = []
        for k in range(options_storage.MIN_K, options_storage.MAX_K, 2):
            cur_K_dir = os.path.join(cfg.output_dir, "K%d" % k)
            if os.path.isdir(cur_K_dir) and os.path.isfile(os.path.join(cur_K_dir, "final_contigs.fasta")):
                processed_K.append(k)
        if processed_K:
            RL = get_read_length(cfg.output_dir, processed_K[0], ext_python_modules_home, log)
            needed_K = update_k_mers_in_special_cases(cfg.iterative_K, RL, log, silent=True)
            needed_K = [k for k in needed_K if k < RL]
            original_K = reveal_original_k_mers(RL)

            k_to_delete = []
            for id, k in enumerate(needed_K):
                if len(processed_K) == id:
                    if processed_K[-1] == original_K[-1]:  # the last K in the original run was processed in "last_one" mode
                        k_to_delete = [original_K[-1]]
                    break
                if processed_K[id] != k:
                    k_to_delete = processed_K[id:]
                    break
            if not k_to_delete and (len(processed_K) > len(needed_K)):
                k_to_delete = processed_K[len(needed_K) - 1:]
            if k_to_delete:
                log.info("Restart mode: removing previously processed directories for K=%s "
                         "to avoid conflicts with K specified with --restart-from" % (str(k_to_delete)))
                for k in k_to_delete:
                    shutil.rmtree(os.path.join(cfg.output_dir, "K%d" % k))

    bin_reads_dir = os.path.join(cfg.output_dir, ".bin_reads")
    if os.path.isdir(bin_reads_dir) and not options_storage.continue_mode:
        shutil.rmtree(bin_reads_dir)
    cfg.tmp_dir = support.get_tmp_dir(prefix="spades_")

    finished_on_stop_after = False
    K = cfg.iterative_K[0]
    if len(cfg.iterative_K) == 1:
        run_iteration(configs_dir, execution_home, cfg, log, K, None, True)
        used_K.append(K)
    else:
        run_iteration(configs_dir, execution_home, cfg, log, K, None, False)
        used_K.append(K)
        if options_storage.stop_after == "k%d" % K:
            finished_on_stop_after = True
        else:
            prev_K = K
            RL = get_read_length(cfg.output_dir, K, ext_python_modules_home, log)
            cfg.iterative_K = update_k_mers_in_special_cases(cfg.iterative_K, RL, log)
            if len(cfg.iterative_K) < 2 or cfg.iterative_K[1] + 1 > RL:
                if cfg.rr_enable:
                    if len(cfg.iterative_K) < 2:
                        log.info("== Rerunning for the first value of K (%d) with Repeat Resolving" %
                                 cfg.iterative_K[0])
                    else:
                        support.warning("Second value of iterative K (%d) exceeded estimated read length (%d). "
                                        "Rerunning for the first value of K (%d) with Repeat Resolving" %
                                        (cfg.iterative_K[1], RL, cfg.iterative_K[0]), log)
                    run_iteration(configs_dir, execution_home, cfg, log, cfg.iterative_K[0], None, True)
                    used_K.append(cfg.iterative_K[0])
                    K = cfg.iterative_K[0]
            else:
                rest_of_iterative_K = cfg.iterative_K
                rest_of_iterative_K.pop(0)
                count = 0
                for K in rest_of_iterative_K:
                    count += 1
                    last_one = count == len(cfg.iterative_K) or (rest_of_iterative_K[count] + 1 > RL)
                    run_iteration(configs_dir, execution_home, cfg, log, K, prev_K, last_one)
                    used_K.append(K)
                    prev_K = K
                    if last_one:
                        break
                    if options_storage.stop_after == "k%d" % K:
                        finished_on_stop_after = True
                        break
                if count < len(cfg.iterative_K) and not finished_on_stop_after:
                    support.warning("Iterations stopped. Value of K (%d) exceeded estimated read length (%d)" %
                                    (cfg.iterative_K[count], RL), log)

    if options_storage.stop_after and options_storage.stop_after.startswith('k'):
        support.finish_here(log)
    latest = os.path.join(cfg.output_dir, "K%d" % K)

    if cfg.correct_scaffolds and not options_storage.run_completed:
        if options_storage.continue_mode and os.path.isfile(os.path.join(cfg.output_dir, "SCC", "corrected_scaffolds.fasta")) and not options_storage.restart_from == "scc":
            log.info("\n===== Skipping %s (already processed). \n" % "scaffold correction")
        else:
            if options_storage.continue_mode:
                support.continue_from_here(log)
            run_scaffold_correction(configs_dir, execution_home, cfg, log, latest, 21)
        latest = os.path.join(os.path.join(cfg.output_dir, "SCC"), "K21")
        if options_storage.stop_after == 'scc':
            support.finish_here(log)

    if cfg.correct_scaffolds:
        correct_scaffolds_fpath = os.path.join(latest, "corrected_scaffolds.fasta")
        if os.path.isfile(correct_scaffolds_fpath):
            shutil.copyfile(correct_scaffolds_fpath, cfg.result_scaffolds)
    elif not finished_on_stop_after:  # interupted by --stop-after, so final K is not processed!
        if os.path.isfile(os.path.join(latest, "before_rr.fasta")):
            result_before_rr_contigs = os.path.join(os.path.dirname(cfg.result_contigs), "before_rr.fasta")
            if not os.path.isfile(result_before_rr_contigs) or not options_storage.continue_mode:
                shutil.copyfile(os.path.join(latest, "before_rr.fasta"), result_before_rr_contigs)
        if options_storage.rna:
            if os.path.isfile(os.path.join(latest, "transcripts.fasta")):
                if not os.path.isfile(cfg.result_transcripts) or not options_storage.continue_mode:
                    shutil.copyfile(os.path.join(latest, "transcripts.fasta"), cfg.result_transcripts)
            if os.path.isfile(os.path.join(latest, "transcripts.paths")):
                if not os.path.isfile(cfg.result_transcripts_paths) or not options_storage.continue_mode:
                    shutil.copyfile(os.path.join(latest, "transcripts.paths"), cfg.result_transcripts_paths)
            for filtering_type in options_storage.filtering_types:
                prefix = filtering_type + "_filtered_"
                result_filtered_transcripts = os.path.join(cfg.output_dir, prefix + options_storage.transcripts_name)
                latest_filtered_transcripts = os.path.join(latest, prefix + "final_paths.fasta")
                if os.path.isfile(latest_filtered_transcripts):
                    if not os.path.isfile(result_filtered_transcripts) or not options_storage.continue_mode:
                        shutil.copyfile(latest_filtered_transcripts, result_filtered_transcripts)
        else:
            if os.path.isfile(os.path.join(latest, "final_contigs.fasta")):
                if not os.path.isfile(cfg.result_contigs) or not options_storage.continue_mode:
                    shutil.copyfile(os.path.join(latest, "final_contigs.fasta"), cfg.result_contigs)
            if os.path.isfile(os.path.join(latest, "first_pe_contigs.fasta")):
                result_first_pe_contigs = os.path.join(os.path.dirname(cfg.result_contigs), "first_pe_contigs.fasta")
                if not os.path.isfile(result_first_pe_contigs) or not options_storage.continue_mode:
                    shutil.copyfile(os.path.join(latest, "first_pe_contigs.fasta"), result_first_pe_contigs)
            if cfg.rr_enable:
                if os.path.isfile(os.path.join(latest, "scaffolds.fasta")):
                    if not os.path.isfile(cfg.result_scaffolds) or not options_storage.continue_mode:
                        shutil.copyfile(os.path.join(latest, "scaffolds.fasta"), cfg.result_scaffolds)
                if os.path.isfile(os.path.join(latest, "scaffolds.paths")):
                    if not os.path.isfile(cfg.result_scaffolds_paths) or not options_storage.continue_mode:
                        shutil.copyfile(os.path.join(latest, "scaffolds.paths"), cfg.result_scaffolds_paths)
            if os.path.isfile(os.path.join(latest, "assembly_graph_with_scaffolds.gfa")):
                if not os.path.isfile(cfg.result_graph_gfa) or not options_storage.continue_mode:
                    shutil.copyfile(os.path.join(latest, "assembly_graph_with_scaffolds.gfa"), cfg.result_graph_gfa)
            if os.path.isfile(os.path.join(latest, "assembly_graph.fastg")):
                if not os.path.isfile(cfg.result_graph) or not options_storage.continue_mode:
                    shutil.copyfile(os.path.join(latest, "assembly_graph.fastg"), cfg.result_graph)
            if os.path.isfile(os.path.join(latest, "final_contigs.paths")):
                if not os.path.isfile(cfg.result_contigs_paths) or not options_storage.continue_mode:
                    shutil.copyfile(os.path.join(latest, "final_contigs.paths"), cfg.result_contigs_paths)


    if cfg.developer_mode:
        # saves
        saves_link = os.path.join(os.path.dirname(cfg.result_contigs), "saves")
        if os.path.lexists(saves_link):  # exists returns False for broken links! lexists return True
            os.remove(saves_link)
        os.symlink(os.path.join(latest, "saves"), saves_link)

    if os.path.isdir(bin_reads_dir):
        shutil.rmtree(bin_reads_dir)
    if os.path.isdir(cfg.tmp_dir):
        shutil.rmtree(cfg.tmp_dir)

    return used_K
Пример #5
0
def main(args):
    os.environ["LC_ALL"] = "C"

    if len(args) == 1:
        show_usage(0)

    log = logging.getLogger('spades')
    log.setLevel(logging.DEBUG)

    console = logging.StreamHandler(sys.stdout)
    console.setFormatter(logging.Formatter('%(message)s'))
    console.setLevel(logging.DEBUG)
    log.addHandler(console)

    support.check_binaries(bin_home, log)

    # auto detecting SPAdes mode (rna, meta, etc)
    mode = options_storage.get_mode()
    if mode is not None:
        args.append('--' + mode)

    # parse options and safe all parameters to cfg
    options = args
    cfg, dataset_data = fill_cfg(options, log)

    if options_storage.continue_mode:
        cmd_line, options = get_options_from_params(os.path.join(options_storage.output_dir, "params.txt"), args[0])
        if not options:
            support.error("failed to parse command line of the previous run! Please restart from the beginning or specify another output directory.")
        cfg, dataset_data = fill_cfg(options, log, secondary_filling=True)
        if options_storage.restart_from:
            check_cfg_for_partial_run(cfg, type='restart-from')
        options_storage.continue_mode = True
    if options_storage.stop_after:
        check_cfg_for_partial_run(cfg, type='stop-after')

    log_filename = os.path.join(cfg["common"].output_dir, "spades.log")
    if options_storage.continue_mode:
        log_handler = logging.FileHandler(log_filename, mode='a')
    else:
        log_handler = logging.FileHandler(log_filename, mode='w')
    log.addHandler(log_handler)

    if options_storage.continue_mode:
        log.info("\n======= SPAdes pipeline continued. Log can be found here: " + log_filename + "\n")
        log.info("Restored from " + cmd_line)
        if options_storage.restart_from:
            updated_params = ""
            skip_next = False
            for v in args[1:]:
                if v == '-o' or v == '--restart-from':
                    skip_next = True
                    continue
                if skip_next or v.startswith('--restart-from='):  # you can specify '--restart-from=k33' but not '-o=out_dir'
                    skip_next = False
                    continue
                updated_params += "\t" + v
            updated_params = updated_params.strip()
            log.info("with updated parameters: " + updated_params)
            cmd_line += "\t" + updated_params
        log.info("")

    params_filename = os.path.join(cfg["common"].output_dir, "params.txt")
    params_handler = logging.FileHandler(params_filename, mode='w')
    log.addHandler(params_handler)

    if options_storage.continue_mode:
        log.info(cmd_line)
    else:
        command = "Command line: "
        for v in args:
            # substituting relative paths with absolute ones (read paths, output dir path, etc)
            v, prefix = support.get_option_prefix(v)
            if v in options_storage.dict_of_rel2abs.keys():
                v = options_storage.dict_of_rel2abs[v]
            if prefix:
                command += prefix + ":"
            command += v + "\t"
        log.info(command)

    # special case
#    if "mismatch_corrector" in cfg and not support.get_lib_ids_by_type(dataset_data, 'paired-end'):
#        support.warning('cannot perform mismatch correction without at least one paired-end library! Skipping this step.', log)
#        del cfg["mismatch_corrector"]

    print_used_values(cfg, log)
    log.removeHandler(params_handler)

    support.check_single_reads_in_options(options, log)

    if not options_storage.continue_mode:
        log.info("\n======= SPAdes pipeline started. Log can be found here: " + log_filename + "\n")

    # splitting interlaced reads and processing Ns in additional contigs if needed
    if support.dataset_has_interlaced_reads(dataset_data) or support.dataset_has_additional_contigs(dataset_data)\
            or support.dataset_has_nxmate_reads(dataset_data):
        dir_for_split_reads = os.path.join(options_storage.output_dir, 'split_input')
        if support.dataset_has_interlaced_reads(dataset_data) or support.dataset_has_nxmate_reads(dataset_data):
            if not os.path.isdir(dir_for_split_reads):
                os.makedirs(dir_for_split_reads)
            if support.dataset_has_interlaced_reads(dataset_data):
                dataset_data = support.split_interlaced_reads(dataset_data, dir_for_split_reads, log)
            if support.dataset_has_nxmate_reads(dataset_data):
                dataset_data = support.process_nxmate_reads(dataset_data, dir_for_split_reads, log)
        if support.dataset_has_additional_contigs(dataset_data):
            dataset_data = support.process_Ns_in_additional_contigs(dataset_data, dir_for_split_reads, log)
        options_storage.dataset_yaml_filename = os.path.join(options_storage.output_dir, "input_dataset.yaml")
        pyyaml.dump(dataset_data, open(options_storage.dataset_yaml_filename, 'w'))
        cfg["dataset"].yaml_filename = options_storage.dataset_yaml_filename

    try:
        # copying configs before all computations (to prevent its changing at run time)
        tmp_configs_dir = os.path.join(cfg["common"].output_dir, "configs")
        if os.path.isdir(tmp_configs_dir) and not options_storage.continue_mode:
            shutil.rmtree(tmp_configs_dir)
        if not os.path.isdir(tmp_configs_dir):
            if options_storage.configs_dir:
                dir_util.copy_tree(options_storage.configs_dir, tmp_configs_dir, preserve_times=False, preserve_mode=False)
            else:
                dir_util.copy_tree(os.path.join(spades_home, "configs"), tmp_configs_dir, preserve_times=False, preserve_mode=False)

        corrected_dataset_yaml_filename = ''
        if "error_correction" in cfg:
            STAGE_NAME = "Read error correction"
            bh_cfg = merge_configs(cfg["error_correction"], cfg["common"])
            corrected_dataset_yaml_filename = os.path.join(bh_cfg.output_dir, "corrected.yaml")
            ec_is_needed = True
            only_compressing_is_needed = False
            if os.path.isfile(corrected_dataset_yaml_filename) and options_storage.continue_mode \
                    and not options_storage.restart_from == "ec":
                if not bh_cfg.gzip_output or \
                        support.dataset_has_gzipped_reads(pyyaml.load(open(corrected_dataset_yaml_filename, 'r'))):
                    log.info("\n===== Skipping %s (already processed). \n" % STAGE_NAME)
                    ec_is_needed = False
                else:
                    only_compressing_is_needed = True
            if ec_is_needed:
                if not only_compressing_is_needed:
                    support.continue_from_here(log)

                    if "HEAPCHECK" in os.environ:
                        del os.environ["HEAPCHECK"]
                    if "heap_check" in bh_cfg.__dict__:
                        os.environ["HEAPCHECK"] = bh_cfg.heap_check

                    if os.path.exists(bh_cfg.output_dir):
                        shutil.rmtree(bh_cfg.output_dir)
                    os.makedirs(bh_cfg.output_dir)

                bh_cfg.__dict__["dataset_yaml_filename"] = cfg["dataset"].yaml_filename
                log.info("\n===== %s started. \n" % STAGE_NAME)

                hammer_logic.run_hammer(corrected_dataset_yaml_filename, tmp_configs_dir, bin_home, bh_cfg, dataset_data,
                    ext_python_modules_home, only_compressing_is_needed, log)
                log.info("\n===== %s finished. \n" % STAGE_NAME)
            if options_storage.stop_after == 'ec':
                support.finish_here(log)

        result_contigs_filename = os.path.join(cfg["common"].output_dir, options_storage.contigs_name)
        result_scaffolds_filename = os.path.join(cfg["common"].output_dir, options_storage.scaffolds_name)
        result_assembly_graph_filename = os.path.join(cfg["common"].output_dir, options_storage.assembly_graph_name)
        result_contigs_paths_filename = os.path.join(cfg["common"].output_dir, options_storage.contigs_paths)
        result_scaffolds_paths_filename = os.path.join(cfg["common"].output_dir, options_storage.scaffolds_paths)
        result_transcripts_filename = os.path.join(cfg["common"].output_dir, options_storage.transcripts_name)
        result_transcripts_paths_filename = os.path.join(cfg["common"].output_dir, options_storage.transcripts_paths)
        truseq_long_reads_file_base = os.path.join(cfg["common"].output_dir, "truseq_long_reads")
        truseq_long_reads_file = truseq_long_reads_file_base + ".fasta"
        misc_dir = os.path.join(cfg["common"].output_dir, "misc")
        ### if mismatch correction is enabled then result contigs are copied to misc directory
        assembled_contigs_filename = os.path.join(misc_dir, "assembled_contigs.fasta")
        assembled_scaffolds_filename = os.path.join(misc_dir, "assembled_scaffolds.fasta")
        if "assembly" in cfg and not options_storage.run_completed:
            STAGE_NAME = "Assembling"
            spades_cfg = merge_configs(cfg["assembly"], cfg["common"])
            spades_cfg.__dict__["result_contigs"] = result_contigs_filename
            spades_cfg.__dict__["result_scaffolds"] = result_scaffolds_filename
            spades_cfg.__dict__["result_graph"] = result_assembly_graph_filename
            spades_cfg.__dict__["result_contigs_paths"] = result_contigs_paths_filename
            spades_cfg.__dict__["result_scaffolds_paths"] = result_scaffolds_paths_filename
            spades_cfg.__dict__["result_transcripts"] = result_transcripts_filename
            spades_cfg.__dict__["result_transcripts_paths"] = result_transcripts_paths_filename

            if options_storage.continue_mode and (os.path.isfile(spades_cfg.result_contigs)
                                                  or ("mismatch_corrector" in cfg and
                                                      os.path.isfile(assembled_contigs_filename))
                                                  or (options_storage.truseq_mode and os.path.isfile(assembled_scaffolds_filename)))\
                and not options_storage.restart_from == 'as' \
                and not options_storage.restart_from == 'scc' \
                and not (options_storage.restart_from and options_storage.restart_from.startswith('k')):

                log.info("\n===== Skipping %s (already processed). \n" % STAGE_NAME)
                # calculating latest_dir for the next stages
                latest_dir = support.get_latest_dir(os.path.join(spades_cfg.output_dir, "K*"))
                if not latest_dir:
                    support.error("failed to continue the previous run! Please restart from previous stages or from the beginning.", log)
            else:
                old_result_files = [result_contigs_filename, result_scaffolds_filename,
                                    assembled_contigs_filename, assembled_scaffolds_filename]
                for old_result_file in old_result_files:
                    if os.path.isfile(old_result_file):
                        os.remove(old_result_file)

                if options_storage.restart_from == 'as':
                    support.continue_from_here(log)

                if os.path.isfile(corrected_dataset_yaml_filename):
                    dataset_data = pyyaml.load(open(corrected_dataset_yaml_filename, 'r'))
                    dataset_data = support.relative2abs_paths(dataset_data, os.path.dirname(corrected_dataset_yaml_filename))
                if spades_cfg.disable_rr:
                    spades_cfg.__dict__["rr_enable"] = False
                else:
                    spades_cfg.__dict__["rr_enable"] = True

                if "HEAPCHECK" in os.environ:
                    del os.environ["HEAPCHECK"]
                if "heap_check" in spades_cfg.__dict__:
                    os.environ["HEAPCHECK"] = spades_cfg.heap_check

                log.info("\n===== %s started.\n" % STAGE_NAME)

                # creating dataset
                dataset_filename = os.path.join(spades_cfg.output_dir, "dataset.info")
                if not os.path.isfile(dataset_filename) or not options_storage.continue_mode:
                    dataset_file = open(dataset_filename, 'w')
                    import process_cfg
                    if os.path.isfile(corrected_dataset_yaml_filename):
                        dataset_file.write("reads" + '\t' + process_cfg.process_spaces(corrected_dataset_yaml_filename) + '\n')
                    else:
                        dataset_file.write("reads" + '\t' + process_cfg.process_spaces(cfg["dataset"].yaml_filename) + '\n')
                    if spades_cfg.developer_mode and "reference" in cfg["dataset"].__dict__:
                        dataset_file.write("reference_genome" + '\t')
                        dataset_file.write(process_cfg.process_spaces(cfg["dataset"].reference) + '\n')
                    dataset_file.close()
                spades_cfg.__dict__["dataset"] = dataset_filename

                used_K = spades_logic.run_spades(tmp_configs_dir, bin_home, spades_cfg, dataset_data, ext_python_modules_home, log)

                if os.path.isdir(misc_dir) and not options_storage.continue_mode:
                    shutil.rmtree(misc_dir)
                if not os.path.isdir(misc_dir):
                    os.makedirs(misc_dir)

                if options_storage.continue_mode and options_storage.restart_from and options_storage.restart_from.startswith('k'):
                    k_str = options_storage.restart_from[1:]
                    if k_str.find(":") != -1:
                        k_str = k_str[:k_str.find(":")]
                    support.error("failed to continue from K=%s because this K was not processed in the original run!" % k_str, log)
                log.info("\n===== %s finished. Used k-mer sizes: %s \n" % (STAGE_NAME, ', '.join(map(str, used_K))))
            if not options_storage.run_completed:
                if options_storage.stop_after == 'as' or options_storage.stop_after == 'scc' or (options_storage.stop_after and options_storage.stop_after.startswith('k')):
                    support.finish_here(log)

            #postprocessing
            if cfg["run_truseq_postprocessing"] and not options_storage.run_completed:
                if options_storage.continue_mode and os.path.isfile(truseq_long_reads_file_base + ".fastq") and not options_storage.restart_from == 'tpp':
                    log.info("\n===== Skipping %s (already processed). \n" % "TruSeq postprocessing")
                else:
                    support.continue_from_here(log)
                    if os.path.isfile(result_scaffolds_filename):
                        shutil.move(result_scaffolds_filename, assembled_scaffolds_filename)
                    reads_library = dataset_data[0]
                    alignment_bin = os.path.join(bin_home, "bwa-spades")
                    alignment_dir = os.path.join(cfg["common"].output_dir, "alignment")
                    sam_files = alignment.align_bwa(alignment_bin, assembled_scaffolds_filename, dataset_data, alignment_dir, log, options_storage.threads)
                    moleculo_postprocessing.moleculo_postprocessing(assembled_scaffolds_filename, truseq_long_reads_file_base, sam_files, log)
                if options_storage.stop_after == 'tpp':
                    support.finish_here(log)

            #corrector
            if "mismatch_corrector" in cfg and not options_storage.run_completed and \
                    (os.path.isfile(result_contigs_filename) or
                    (options_storage.continue_mode and os.path.isfile(assembled_contigs_filename))):
                STAGE_NAME = "Mismatch correction"
                to_correct = dict()
                to_correct["contigs"] = (result_contigs_filename, assembled_contigs_filename)
                if os.path.isfile(result_scaffolds_filename) or (options_storage.continue_mode and
                                                                 os.path.isfile(assembled_scaffolds_filename)):
                    to_correct["scaffolds"] = (result_scaffolds_filename, assembled_scaffolds_filename)

                # moving assembled contigs (scaffolds) to misc dir
                for assembly_type, (old, new) in to_correct.items():
                    if options_storage.continue_mode and os.path.isfile(new):
                        continue
                    if os.path.isfile(old):
                        shutil.move(old, new)

                if options_storage.continue_mode and os.path.isfile(result_contigs_filename) and \
                    (os.path.isfile(result_scaffolds_filename) or not os.path.isfile(assembled_scaffolds_filename)) \
                    and not options_storage.restart_from == 'mc':
                    log.info("\n===== Skipping %s (already processed). \n" % STAGE_NAME)
                else:
                    if options_storage.restart_from == 'mc':
                        support.continue_from_here(log)

                    log.info("\n===== %s started." % STAGE_NAME)
                    # detecting paired-end library with the largest insert size
                    cfg["mismatch_corrector"].__dict__["dataset"] = cfg["dataset"].yaml_filename
                    #TODO: add reads orientation

                    import corrector_logic
                    corrector_cfg = cfg["mismatch_corrector"]
                    # processing contigs and scaffolds (or only contigs)
                    for assembly_type, (corrected, assembled) in to_correct.items():
                        if options_storage.continue_mode and os.path.isfile(corrected):
                            log.info("\n== Skipping processing of " + assembly_type + " (already processed)\n")
                            continue

                        support.continue_from_here(log)
                        log.info("\n== Processing of " + assembly_type + "\n")

                        tmp_dir_for_corrector = os.path.join(cfg["common"].output_dir, "mismatch_corrector", assembly_type)

                        cfg["mismatch_corrector"].__dict__["output_dir"] = tmp_dir_for_corrector
                        # correcting
                        corr_cfg = merge_configs(cfg["mismatch_corrector"], cfg["common"])
                        
                        result_corrected_filename = os.path.join(tmp_dir_for_corrector, "corrected_contigs.fasta")

                        corrector_logic.run_corrector( tmp_configs_dir, bin_home, corr_cfg,
                        ext_python_modules_home, log, assembled, result_corrected_filename)

                        if os.path.isfile(result_corrected_filename):
                            shutil.copyfile(result_corrected_filename, corrected)
                        tmp_d = os.path.join(tmp_dir_for_corrector, "tmp")
                        if os.path.isdir(tmp_d) and not cfg["common"].developer_mode:
                            shutil.rmtree(tmp_d)
                    log.info("\n===== %s finished.\n" % STAGE_NAME)
                if options_storage.stop_after == 'mc':
                    support.finish_here(log)

        if not cfg["common"].developer_mode and os.path.isdir(tmp_configs_dir):
            shutil.rmtree(tmp_configs_dir)

        if not options_storage.run_completed:
            #log.info("")
            if "error_correction" in cfg and os.path.isdir(os.path.dirname(corrected_dataset_yaml_filename)):
                log.info(" * Corrected reads are in " + support.process_spaces(os.path.dirname(corrected_dataset_yaml_filename) + "/"))
            if "assembly" in cfg and os.path.isfile(result_contigs_filename):
                message = " * Assembled contigs are in " + support.process_spaces(result_contigs_filename)
                log.info(message)
            if options_storage.rna:
                if "assembly" in cfg and os.path.isfile(result_transcripts_filename):
                    message = " * Assembled transcripts are in " + support.process_spaces(result_transcripts_filename)
                    log.info(message)
                if "assembly" in cfg and os.path.isfile(result_transcripts_paths_filename):
                    message = " * Paths in the assembly graph corresponding to the transcripts are in " + \
                              support.process_spaces(result_transcripts_paths_filename)
                    log.info(message)
            else:
                if "assembly" in cfg and os.path.isfile(result_scaffolds_filename):
                    message = " * Assembled scaffolds are in " + support.process_spaces(result_scaffolds_filename)
                    log.info(message)
                if "assembly" in cfg and os.path.isfile(result_assembly_graph_filename):
                    message = " * Assembly graph is in " + support.process_spaces(result_assembly_graph_filename)
                    log.info(message)
                if "assembly" in cfg and os.path.isfile(result_contigs_paths_filename):
                    message = " * Paths in the assembly graph corresponding to the contigs are in " + \
                              support.process_spaces(result_contigs_paths_filename)
                    log.info(message)
                if "assembly" in cfg and os.path.isfile(result_scaffolds_paths_filename):
                    message = " * Paths in the assembly graph corresponding to the scaffolds are in " + \
                              support.process_spaces(result_scaffolds_paths_filename)
                    log.info(message)
            #log.info("")

        #breaking scaffolds
        if os.path.isfile(result_scaffolds_filename):
            if not os.path.isdir(misc_dir):
                os.makedirs(misc_dir)
            result_broken_scaffolds = os.path.join(misc_dir, "broken_scaffolds.fasta")
            if not os.path.isfile(result_broken_scaffolds) or not options_storage.continue_mode:
                modified, broken_scaffolds = support.break_scaffolds(result_scaffolds_filename,
                    options_storage.THRESHOLD_FOR_BREAKING_SCAFFOLDS)
                if modified:
                    support.write_fasta(result_broken_scaffolds, broken_scaffolds)
                    #log.info(" * Scaffolds broken by " + str(options_storage.THRESHOLD_FOR_BREAKING_SCAFFOLDS) +
                    # " Ns are in " + result_broken_scaffolds)

        ### printing WARNINGS SUMMARY
        if not support.log_warnings(log):
            log.info("\n======= SPAdes pipeline finished.")  # otherwise it finished WITH WARNINGS

        if options_storage.test_mode:
            if options_storage.truseq_mode:
                if not os.path.isfile(truseq_long_reads_file):
                    support.error("TEST FAILED: %s does not exist!" % truseq_long_reads_file)
            elif options_storage.rna:
                if not os.path.isfile(result_transcripts_filename):
                    support.error("TEST FAILED: %s does not exist!" % result_transcripts_filename)
            else:
                for result_filename in [result_contigs_filename, result_scaffolds_filename]:
                    if os.path.isfile(result_filename):
                        result_fasta = list(support.read_fasta(result_filename))
                        # correctness check: should be one contig of length 1000 bp
                        correct_number = 1
                        correct_length = 1000
                        if not len(result_fasta):
                            support.error("TEST FAILED: %s does not contain contigs!" % result_filename)
                        elif len(result_fasta) > correct_number:
                            support.error("TEST FAILED: %s contains more than %d contig (%d)!" %
                                          (result_filename, correct_number, len(result_fasta)))
                        elif len(result_fasta[0][1]) != correct_length:
                            if len(result_fasta[0][1]) > correct_length:
                                relation = "more"
                            else:
                                relation = "less"
                            support.error("TEST FAILED: %s contains %s than %d bp (%d bp)!" %
                                          (result_filename, relation, correct_length, len(result_fasta[0][1])))
                    else:
                        support.error("TEST FAILED: " + result_filename + " does not exist!")
            log.info("\n========= TEST PASSED CORRECTLY.")


        log.info("\nSPAdes log can be found here: " + log_filename)
        log.info("")
        log.info("Thank you for using SPAdes!")
        log.removeHandler(log_handler)

    except Exception:
        exc_type, exc_value, _ = sys.exc_info()
        if exc_type == SystemExit:
            sys.exit(exc_value)
        else:
            if exc_type == OSError and exc_value.errno == errno.ENOEXEC: # Exec format error
                support.error("It looks like you are using SPAdes binaries for another platform.\n" +
                              support.get_spades_binaries_info_message())
            else:
                log.exception(exc_value)
                support.error("exception caught: %s" % exc_type, log)
    except BaseException:  # since python 2.5 system-exiting exceptions (e.g. KeyboardInterrupt) are derived from BaseException
        exc_type, exc_value, _ = sys.exc_info()
        if exc_type == SystemExit:
            sys.exit(exc_value)
        else:
            log.exception(exc_value)
            support.error("exception caught: %s" % exc_type, log)