Esempio n. 1
0
    def run(self):
        super(JobConsensus, self).run()
        if not os.path.isdir(self.consensus_dir):
            os.mkdir(self.consensus_dir)

        #split into 1Mb chunks to reduce RAM usage
        CHUNK_SIZE = 1000000
        chunks_file = os.path.join(self.consensus_dir, "chunks.fasta")
        chunks = aln.split_into_chunks(fp.read_sequence_dict(self.in_contigs),
                                       CHUNK_SIZE)
        fp.write_fasta_dict(chunks, chunks_file)

        logger.info("Running Minimap2")
        out_alignment = os.path.join(self.consensus_dir, "minimap.bam")
        aln.make_alignment(chunks_file,
                           self.args.reads,
                           self.args.threads,
                           self.consensus_dir,
                           self.args.platform,
                           out_alignment,
                           reference_mode=True,
                           sam_output=True)

        contigs_info = aln.get_contigs_info(chunks_file)
        logger.info("Computing consensus")
        consensus_fasta = cons.get_consensus(out_alignment, chunks_file,
                                             contigs_info, self.args.threads,
                                             self.args.platform)

        #merge chunks back into single sequences
        merged_fasta = aln.merge_chunks(consensus_fasta)
        fp.write_fasta_dict(merged_fasta, self.out_consensus)
        os.remove(chunks_file)
        os.remove(out_alignment)
Esempio n. 2
0
def polish(contig_seqs, read_seqs, work_dir, num_iters, num_threads,
           error_mode, output_progress):
    """
    High-level polisher interface
    """

    logger_func = logger.info if output_progress else logger.debug

    subs_matrix = os.path.join(
        cfg.vals["pkg_root"], cfg.vals["err_modes"][error_mode]["subs_matrix"])
    hopo_matrix = os.path.join(
        cfg.vals["pkg_root"], cfg.vals["err_modes"][error_mode]["hopo_matrix"])

    prev_assembly = contig_seqs
    contig_lengths = None
    for i in xrange(num_iters):
        logger_func("Polishing genome ({0}/{1})".format(i + 1, num_iters))

        alignment_file = os.path.join(work_dir,
                                      "minimap_{0}.sam".format(i + 1))
        logger_func("Running minimap2")
        make_alignment(prev_assembly, read_seqs, num_threads, work_dir,
                       error_mode, alignment_file)

        logger_func("Separating alignment into bubbles")
        contigs_info = get_contigs_info(prev_assembly)
        bubbles_file = os.path.join(work_dir,
                                    "bubbles_{0}.fasta".format(i + 1))
        coverage_stats, mean_aln_error = \
            make_bubbles(alignment_file, contigs_info, prev_assembly,
                         error_mode, num_threads,
                         bubbles_file)
        logger_func("Alignment error rate: {0}".format(mean_aln_error))

        logger_func("Correcting bubbles")
        consensus_out = os.path.join(work_dir,
                                     "consensus_{0}.fasta".format(i + 1))
        polished_file = os.path.join(work_dir,
                                     "polished_{0}.fasta".format(i + 1))
        _run_polish_bin(bubbles_file, subs_matrix, hopo_matrix, consensus_out,
                        num_threads)
        polished_fasta, polished_lengths = _compose_sequence([consensus_out])
        fp.write_fasta_dict(polished_fasta, polished_file)

        contig_lengths = polished_lengths
        prev_assembly = polished_file

    stats_file = os.path.join(work_dir, "contigs_stats.txt")
    with open(stats_file, "w") as f:
        f.write("seq_name\tlength\tcoverage\n")
        for ctg_id in contig_lengths:
            f.write("{0}\t{1}\t{2}\n".format(ctg_id, contig_lengths[ctg_id],
                                             coverage_stats[ctg_id]))
Esempio n. 3
0
File: main.py Progetto: pythseq/Flye
    def run(self):
        if not os.path.isdir(self.consensus_dir):
            os.mkdir(self.consensus_dir)

        logger.info("Running Minimap2")
        out_alignment = os.path.join(self.consensus_dir, "minimap.sam")
        aln.make_alignment(self.in_contigs, self.args.reads, self.args.threads,
                           self.consensus_dir, self.args.platform,
                           out_alignment)

        contigs_info = aln.get_contigs_info(self.in_contigs)
        logger.info("Computing consensus")
        consensus_fasta = cons.get_consensus(out_alignment, self.in_contigs,
                                             contigs_info, self.args.threads,
                                             self.args.platform)
        fp.write_fasta_dict(consensus_fasta, self.out_consensus)
Esempio n. 4
0
    def run(self):
        super(JobConsensus, self).run()
        if not os.path.isdir(self.consensus_dir):
            os.mkdir(self.consensus_dir)

        logger.info("Running Minimap2")
        out_alignment = os.path.join(self.consensus_dir, "minimap.bam")
        aln.make_alignment(self.in_contigs,
                           self.args.reads,
                           self.args.threads,
                           self.consensus_dir,
                           self.args.platform,
                           out_alignment,
                           reference_mode=True,
                           sam_output=True)

        contigs_info = aln.get_contigs_info(self.in_contigs)
        logger.info("Computing consensus")
        consensus_fasta = cons.get_consensus(out_alignment, self.in_contigs,
                                             contigs_info, self.args.threads,
                                             self.args.platform)

        fp.write_fasta_dict(consensus_fasta, self.out_consensus)
        os.remove(out_alignment)
Esempio n. 5
0
def polish(contig_seqs, read_seqs, work_dir, num_iters, num_threads,
           error_mode, output_progress):
    """
    High-level polisher interface
    """
    logger_state = logger.disabled
    if not output_progress:
        logger.disabled = True

    subs_matrix = os.path.join(
        cfg.vals["pkg_root"], cfg.vals["err_modes"][error_mode]["subs_matrix"])
    hopo_matrix = os.path.join(
        cfg.vals["pkg_root"], cfg.vals["err_modes"][error_mode]["hopo_matrix"])
    stats_file = os.path.join(work_dir, "contigs_stats.txt")

    prev_assembly = contig_seqs
    contig_lengths = None
    coverage_stats = None
    for i in xrange(num_iters):
        logger.info("Polishing genome ({0}/{1})".format(i + 1, num_iters))

        #split into 1Mb chunks to reduce RAM usage
        #slightly vary chunk size between iterations
        CHUNK_SIZE = 1000000 - (i % 2) * 100000
        chunks_file = os.path.join(work_dir, "chunks_{0}.fasta".format(i + 1))
        chunks = split_into_chunks(fp.read_sequence_dict(prev_assembly),
                                   CHUNK_SIZE)
        fp.write_fasta_dict(chunks, chunks_file)

        ####
        logger.info("Running minimap2")
        alignment_file = os.path.join(work_dir,
                                      "minimap_{0}.sam".format(i + 1))
        make_alignment(chunks_file,
                       read_seqs,
                       num_threads,
                       work_dir,
                       error_mode,
                       alignment_file,
                       reference_mode=True,
                       sam_output=True)

        #####
        logger.info("Separating alignment into bubbles")
        contigs_info = get_contigs_info(chunks_file)
        bubbles_file = os.path.join(work_dir,
                                    "bubbles_{0}.fasta".format(i + 1))
        coverage_stats, mean_aln_error = \
            make_bubbles(alignment_file, contigs_info, chunks_file,
                         error_mode, num_threads,
                         bubbles_file)

        logger.info("Alignment error rate: {0}".format(mean_aln_error))
        consensus_out = os.path.join(work_dir,
                                     "consensus_{0}.fasta".format(i + 1))
        polished_file = os.path.join(work_dir,
                                     "polished_{0}.fasta".format(i + 1))
        if os.path.getsize(bubbles_file) == 0:
            logger.info("No reads were aligned during polishing")
            if not output_progress:
                logger.disabled = logger_state
            open(stats_file, "w").write("#seq_name\tlength\tcoverage\n")
            open(polished_file, "w")
            return polished_file, stats_file

        #####
        logger.info("Correcting bubbles")
        _run_polish_bin(bubbles_file, subs_matrix, hopo_matrix, consensus_out,
                        num_threads, output_progress)
        polished_fasta, polished_lengths = _compose_sequence(consensus_out)
        merged_chunks = merge_chunks(polished_fasta)
        fp.write_fasta_dict(merged_chunks, polished_file)

        #Cleanup
        os.remove(chunks_file)
        os.remove(bubbles_file)
        os.remove(consensus_out)
        os.remove(alignment_file)

        contig_lengths = polished_lengths
        prev_assembly = polished_file

    #merge information from chunks
    contig_lengths = merge_chunks(contig_lengths, fold_function=sum)
    coverage_stats = merge_chunks(coverage_stats,
                                  fold_function=lambda l: sum(l) / len(l))

    with open(stats_file, "w") as f:
        f.write("#seq_name\tlength\tcoverage\n")
        for ctg_id in contig_lengths:
            f.write("{0}\t{1}\t{2}\n".format(ctg_id, contig_lengths[ctg_id],
                                             coverage_stats[ctg_id]))

    if not output_progress:
        logger.disabled = logger_state

    return prev_assembly, stats_file
Esempio n. 6
0
def generate_polished_edges(edges_file, gfa_file, polished_contigs, work_dir,
                            error_mode, num_threads):
    """
    Generate polished graph edges sequences by extracting them from
    polished contigs
    """
    logger.debug("Generating polished GFA")

    alignment_file = os.path.join(work_dir, "edges_aln.sam")
    polished_dict = fp.read_sequence_dict(polished_contigs)
    make_alignment(polished_contigs, [edges_file],
                   num_threads,
                   work_dir,
                   error_mode,
                   alignment_file,
                   reference_mode=True,
                   sam_output=True)
    aln_reader = SynchronizedSamReader(alignment_file, polished_dict,
                                       cfg.vals["max_read_coverage"])
    aln_reader.init_reading()
    aln_by_edge = defaultdict(list)

    #getting one best alignment for each contig
    while not aln_reader.is_eof():
        _, ctg_aln = aln_reader.get_chunk()
        for aln in ctg_aln:
            aln_by_edge[aln.qry_id].append(aln)
    aln_reader.stop_reading()

    MIN_CONTAINMENT = 0.9
    updated_seqs = 0
    edges_dict = fp.read_sequence_dict(edges_file)
    for edge in edges_dict:
        if edge in aln_by_edge:
            main_aln = aln_by_edge[edge][0]
            map_start = main_aln.trg_start
            map_end = main_aln.trg_end
            for aln in aln_by_edge[edge]:
                if aln.trg_id == main_aln.trg_id and aln.trg_sign == main_aln.trg_sign:
                    map_start = min(map_start, aln.trg_start)
                    map_end = max(map_end, aln.trg_end)

            new_seq = polished_dict[main_aln.trg_id][map_start:map_end]
            if main_aln.qry_sign == "-":
                new_seq = fp.reverse_complement(new_seq)

            #print edge, main_aln.qry_len, len(new_seq), main_aln.qry_start, main_aln.qry_end
            if float(len(new_seq)) / aln.qry_len > MIN_CONTAINMENT:
                edges_dict[edge] = new_seq
                updated_seqs += 1

    #writes fasta file with polished egdes
    #edges_polished = os.path.join(work_dir, "polished_edges.fasta")
    #fp.write_fasta_dict(edges_dict, edges_polished)

    #writes gfa file with polished edges
    with open(os.path.join(work_dir, "polished_edges.gfa"), "w") as gfa_polished, \
         open(gfa_file, "r") as gfa_in:
        for line in gfa_in:
            if line.startswith("S"):
                seq_id = line.split()[1]
                coverage_tag = line.split()[3]
                gfa_polished.write("S\t{0}\t{1}\t{2}\n".format(
                    seq_id, edges_dict[seq_id], coverage_tag))
            else:
                gfa_polished.write(line)

    logger.debug("{0} sequences remained unpolished".format(
        len(edges_dict) - updated_seqs))
    os.remove(alignment_file)
Esempio n. 7
0
def assemble_short_plasmids(args, work_dir, contigs_path):
    logger.debug("Extracting unmapped reads")
    reads2contigs_mapping = os.path.join(work_dir, "reads2contigs.paf")
    make_alignment(contigs_path, args.reads, args.threads,
                   work_dir, args.platform, reads2contigs_mapping,
                   reference_mode=True, sam_output=False)

    unmapped_reads_path = os.path.join(work_dir, "unmapped_reads.fasta")
    unmapped.extract_unmapped_reads(args, reads2contigs_mapping,
                                    unmapped_reads_path,
                                    mapping_rate_threshold=0.5)

    logger.debug("Finding self-mappings for unmapped reads")
    unmapped_reads_mapping = os.path.join(work_dir, "unmapped_ava.paf")
    make_alignment(unmapped_reads_path, [unmapped_reads_path], args.threads,
                   work_dir, args.platform, unmapped_reads_mapping,
                   reference_mode=False, sam_output=False)

    logger.debug("Extracting circular reads")
    circular_reads = circular.extract_circular_reads(unmapped_reads_mapping)
    logger.debug("Extracted %d circular reads", len(circular_reads))

    logger.debug("Extracing circular pairs")
    circular_pairs = circular.extract_circular_pairs(unmapped_reads_mapping)
    logger.debug("Extracted %d circular pairs", len(circular_pairs))

    #extracting only the necesssary subset of reads (the entire file could be pretty big)
    interesting_reads = {}
    for read in circular_reads:
        interesting_reads[read] = None
    for pair in circular_pairs:
        interesting_reads[pair[0].query] = None
        interesting_reads[pair[0].target] = None
    for hdr, seq in fp.stream_sequence(unmapped_reads_path):
        if hdr in interesting_reads:
            interesting_reads[hdr] = seq

    trimmed_circular_reads = \
        circular.trim_circular_reads(circular_reads, interesting_reads)
    trimmed_circular_pairs = \
        circular.trim_circular_pairs(circular_pairs, interesting_reads)
    trimmed_sequences_path = os.path.join(work_dir, "trimmed_sequences.fasta")
    fp.write_fasta_dict(dict(list(trimmed_circular_reads.items()) +
                             list(trimmed_circular_pairs.items())),
                        trimmed_sequences_path)

    logger.debug("Clustering circular sequences")
    trimmed_sequences_mapping = os.path.join(work_dir, "trimmed.paf")
    make_alignment(trimmed_sequences_path, [trimmed_sequences_path], args.threads,
                   work_dir, args.platform, trimmed_sequences_mapping,
                   reference_mode=False, sam_output=False)

    plasmids = \
        circular.extract_unique_plasmids(trimmed_sequences_mapping,
                                         trimmed_sequences_path)

    plasmids_raw = os.path.join(work_dir, "plasmids_raw.fasta")
    fp.write_fasta_dict(plasmids, plasmids_raw)
    _, polished_stats = \
        pol.polish(plasmids_raw, [unmapped_reads_path], work_dir, 1,
                   args.threads, args.platform, output_progress=False)

    #extract coverage
    plasmids_with_coverage = {}
    if os.path.isfile(polished_stats):
        with open(polished_stats, "r") as f:
            for line in f:
                if line.startswith("#"): continue
                tokens = line.strip().split()
                seq_id, coverage = tokens[0], int(tokens[2])
                if coverage > 0:
                    plasmids_with_coverage[seq_id] = plasmids[seq_id], coverage

    logger.info("Added %d extra contigs", len(plasmids_with_coverage))

    # remove all unnecesarry files
    os.remove(reads2contigs_mapping)
    os.remove(unmapped_reads_path)
    os.remove(unmapped_reads_mapping)
    os.remove(trimmed_sequences_path)
    os.remove(trimmed_sequences_mapping)

    return plasmids_with_coverage
Esempio n. 8
0
def polish(contig_seqs, read_seqs, work_dir, num_iters, num_threads,
           read_platform, read_type, output_progress):
    """
    High-level polisher interface
    """
    logger_state = logger.disabled
    if not output_progress:
        logger.disabled = True

    subs_matrix = os.path.join(
        cfg.vals["pkg_root"],
        cfg.vals["err_modes"][read_platform]["subs_matrix"])
    hopo_matrix = os.path.join(
        cfg.vals["pkg_root"],
        cfg.vals["err_modes"][read_platform]["hopo_matrix"])
    use_hopo = cfg.vals["err_modes"][read_platform]["hopo_enabled"]
    use_hopo = use_hopo and (read_type == "raw")
    stats_file = os.path.join(work_dir, "contigs_stats.txt")

    bam_input = read_seqs[0].endswith("bam")

    prev_assembly = contig_seqs
    contig_lengths = None
    coverage_stats = None
    for i in range(num_iters):
        logger.info("Polishing genome (%d/%d)", i + 1, num_iters)

        ####
        if not bam_input:
            logger.info("Running minimap2")
            alignment_file = os.path.join(work_dir,
                                          "minimap_{0}.bam".format(i + 1))
            make_alignment(prev_assembly,
                           read_seqs,
                           num_threads,
                           work_dir,
                           read_platform,
                           alignment_file,
                           reference_mode=True,
                           sam_output=True)
        else:
            logger.info("Polishing with provided bam")
            alignment_file = read_seqs[0]

        #####
        logger.info("Separating alignment into bubbles")
        contigs_info = get_contigs_info(prev_assembly)
        bubbles_file = os.path.join(work_dir,
                                    "bubbles_{0}.fasta".format(i + 1))
        coverage_stats, mean_aln_error = \
            make_bubbles(alignment_file, contigs_info, prev_assembly,
                         read_platform, num_threads,
                         bubbles_file)

        logger.info("Alignment error rate: %f", mean_aln_error)
        consensus_out = os.path.join(work_dir,
                                     "consensus_{0}.fasta".format(i + 1))
        polished_file = os.path.join(work_dir,
                                     "polished_{0}.fasta".format(i + 1))
        if os.path.getsize(bubbles_file) == 0:
            logger.info("No reads were aligned during polishing")
            if not output_progress:
                logger.disabled = logger_state
            open(stats_file, "w").write("#seq_name\tlength\tcoverage\n")
            open(polished_file, "w")
            return polished_file, stats_file

        #####
        logger.info("Correcting bubbles")
        _run_polish_bin(bubbles_file, subs_matrix, hopo_matrix, consensus_out,
                        num_threads, output_progress, use_hopo)
        polished_fasta, polished_lengths = _compose_sequence(consensus_out)
        fp.write_fasta_dict(polished_fasta, polished_file)

        #Cleanup
        os.remove(bubbles_file)
        os.remove(consensus_out)
        if not bam_input:
            os.remove(alignment_file)

        contig_lengths = polished_lengths
        prev_assembly = polished_file

    with open(stats_file, "w") as f:
        f.write("#seq_name\tlength\tcoverage\n")
        for ctg_id in contig_lengths:
            f.write("{0}\t{1}\t{2}\n".format(ctg_id, contig_lengths[ctg_id],
                                             coverage_stats[ctg_id]))

    if not output_progress:
        logger.disabled = logger_state

    return prev_assembly, stats_file
Esempio n. 9
0
def generate_polished_edges(edges_file, gfa_file, polished_contigs, work_dir,
                            error_mode, polished_stats, num_threads):
    """
    Generate polished graph edges sequences by extracting them from
    polished contigs
    """
    logger.debug("Generating polished GFA")

    edges_new_coverage = {}
    with open(polished_stats, "r") as f:
        for line in f:
            if line.startswith("#"):
                continue
            ctg, _len, coverage = line.strip().split()
            ctg_id = ctg.split("_")[1]
            edges_new_coverage[ctg_id] = int(coverage)

    alignment_file = os.path.join(work_dir, "edges_aln.bam")
    polished_dict = fp.read_sequence_dict(polished_contigs)
    make_alignment(polished_contigs, [edges_file],
                   num_threads,
                   work_dir,
                   error_mode,
                   alignment_file,
                   reference_mode=True,
                   sam_output=True)
    aln_reader = SynchronizedSamReader(alignment_file, polished_dict,
                                       multiprocessing.Manager(),
                                       cfg.vals["max_read_coverage"])
    aln_by_edge = defaultdict(list)

    #getting one best alignment for each contig
    #for ctg in polished_dict:
    #    ctg_aln = aln_reader.get_alignments(ctg)
    for aln in aln_reader.get_all_alignments():
        aln_by_edge[aln.qry_id].append(aln)
    #logger.debug("Bam parsing done")

    MIN_CONTAINMENT = 0.9
    updated_seqs = 0
    edges_dict = fp.read_sequence_dict(edges_file)
    for edge in edges_dict:
        if edge in aln_by_edge:
            aln_by_edge[edge].sort(key=lambda a: a.qry_end - a.qry_start,
                                   reverse=True)
            main_aln = aln_by_edge[edge][0]
            map_start = main_aln.trg_start
            map_end = main_aln.trg_end
            for aln in aln_by_edge[edge]:
                if aln.trg_id == main_aln.trg_id and aln.trg_sign == main_aln.trg_sign:
                    map_start = min(map_start, aln.trg_start)
                    map_end = max(map_end, aln.trg_end)

            new_seq = polished_dict[main_aln.trg_id][map_start:map_end]
            if main_aln.qry_sign == "-":
                new_seq = fp.reverse_complement(new_seq)

            #print(edge, main_aln.qry_len, len(new_seq), main_aln.qry_start, main_aln.qry_end)
            if len(new_seq) / aln.qry_len > MIN_CONTAINMENT:
                edges_dict[edge] = new_seq
                updated_seqs += 1

    #writes fasta file with polished egdes
    #edges_polished = os.path.join(work_dir, "polished_edges.fasta")
    #fp.write_fasta_dict(edges_dict, edges_polished)

    #writes gfa file with polished edges
    with open(os.path.join(work_dir, "polished_edges.gfa"), "w") as gfa_polished, \
         open(gfa_file, "r") as gfa_in:
        for line in gfa_in:
            if line.startswith("S"):
                seq_id = line.split()[1]
                coverage_tag = line.split()[3]
                seq_num = seq_id.split("_")[1]
                if seq_num in edges_new_coverage:
                    #logger.info("from {0} to {1}".format(coverage_tag, edges_new_coverage[seq_num]))
                    coverage_tag = "dp:i:{0}".format(
                        edges_new_coverage[seq_num])
                gfa_polished.write("S\t{0}\t{1}\t{2}\n".format(
                    seq_id, edges_dict[seq_id], coverage_tag))
            else:
                gfa_polished.write(line)

    logger.debug("%d sequences remained unpolished",
                 len(edges_dict) - updated_seqs)
    os.remove(alignment_file)
Esempio n. 10
0
def assemble_short_plasmids(args, work_dir, contigs_path):
    logger.debug("Assembling short plasmids")

    reads2contigs_mapping = os.path.join(work_dir, "reads2contigs.paf")
    make_alignment(contigs_path,
                   args.reads,
                   args.threads,
                   work_dir,
                   args.platform,
                   reads2contigs_mapping,
                   reference_mode=True,
                   sam_output=False)

    logger.debug("Extracting unmapped reads")
    unmapped_reads, n_processed_reads = \
        unmapped.extract_unmapped_reads(args, reads2contigs_mapping,
                                        mapping_rate_threshold=0.5)

    n_unmapped_reads = len(unmapped_reads)
    unmapped_reads_ratio = 100 * float(len(unmapped_reads)) / n_processed_reads
    unmapped_reads_ratio = round(unmapped_reads_ratio, 1)
    logger.debug("Extracted {} unmapped reads ({} %)".format(
        n_unmapped_reads, unmapped_reads_ratio))

    unmapped_reads_path = os.path.join(work_dir, "unmapped_reads.fasta")
    fp.write_fasta_dict(unmapped_reads, unmapped_reads_path)

    unmapped_reads_mapping = os.path.join(work_dir, "unmapped_ava.paf")

    logger.debug("Finding self-mappings for unmapped reads")
    make_alignment(unmapped_reads_path, [unmapped_reads_path],
                   args.threads,
                   work_dir,
                   args.platform,
                   unmapped_reads_mapping,
                   reference_mode=False,
                   sam_output=False)

    logger.debug("Extracting circular reads")
    circular_reads = circular.extract_circular_reads(unmapped_reads_mapping)
    logger.debug("Extracted {} circular reads".format(len(circular_reads)))

    logger.debug("Extracing circular pairs")
    circular_pairs = circular.extract_circular_pairs(unmapped_reads_mapping)
    logger.debug("Extracted {} circular pairs".format(len(circular_pairs)))

    logger.debug("Extracting unique plasmids from circular sequences")
    trimmed_circular_reads = \
        circular.trim_circular_reads(circular_reads, unmapped_reads)
    trimmed_circular_pairs = \
        circular.trim_circular_pairs(circular_pairs, unmapped_reads)
    trimmed_sequences_path = os.path.join(work_dir, "trimmed_sequences.fasta")

    fp.write_fasta_dict(
        dict(trimmed_circular_reads.items() + trimmed_circular_pairs.items()),
        trimmed_sequences_path)

    trimmed_sequences_mapping = os.path.join(work_dir, "trimmed.paf")

    make_alignment(trimmed_sequences_path, [trimmed_sequences_path],
                   args.threads,
                   work_dir,
                   args.platform,
                   trimmed_sequences_mapping,
                   reference_mode=False,
                   sam_output=False)

    plasmids = \
        circular.extract_unique_plasmids(trimmed_sequences_mapping,
                                         trimmed_sequences_path)

    plasmids_raw = os.path.join(work_dir, "plasmids_raw.fasta")
    fp.write_fasta_dict(plasmids, plasmids_raw)
    pol.polish(plasmids_raw, [unmapped_reads_path],
               work_dir,
               1,
               args.threads,
               args.platform,
               output_progress=False)

    #extract coverage
    plasmids_with_coverage = {}
    if os.path.isfile(os.path.join(work_dir, "contigs_stats.txt")):
        with open(os.path.join(work_dir, "contigs_stats.txt"), "r") as f:
            for line in f:
                if line.startswith("seq"): continue
                tokens = line.strip().split()
                seq_id, coverage = tokens[0], int(tokens[2])
                if coverage > 0:
                    plasmids_with_coverage[seq_id] = plasmids[seq_id], coverage

    logger.info("Added {} extra contigs".format(len(plasmids_with_coverage)))

    # remove all unnecesarry files
    os.remove(reads2contigs_mapping)
    os.remove(unmapped_reads_path)
    os.remove(unmapped_reads_mapping)
    os.remove(trimmed_sequences_path)
    os.remove(trimmed_sequences_mapping)

    return plasmids_with_coverage