def annotate_mvf(args): """Main method""" args.qprint("Running AnnotateMVF") mvf = MultiVariantFile(args.mvf, 'read') args.qprint("Input MVF header processed.") args.qprint("MVF flavor: {}".format(mvf.metadata['flavor'])) gff, geneids = parse_gff_annotate(args.gff, mvf.metadata['contigs'], gene_prefix=args.gene_prefix) args.qprint("GFF processed.") outmvf = MultiVariantFile(args.out, 'write', overwrite=args.overwrite, flavor=mvf.metadata['flavor']) outmvf.metadata = deepcopy(mvf.metadata) if args.nongenic_mode is False: outmvf.metadata['contigs'] = geneids outmvf.write_data(outmvf.get_header()) args.qprint("Output MVF established.") entrybuffer = [] nentry = 0 args.qprint("Processing MVF entries.") for contigid, pos, allelesets in mvf.iterentries(decode=False): annotated_pos = False if contigid in gff: if pos in gff[contigid]: annotated_pos = True elif args.nongenic_mode is True and args.unmargin > 0: for xpos in range(pos - args.unmargin, pos + args.unmargin + 1): if xpos in gff[contigid]: annotated_pos = True break if annotated_pos and not args.nongenic_mode: entrybuffer.append((gff[contigid][pos], pos, allelesets)) elif args.nongenic_mode and not annotated_pos: entrybuffer.append((contigid, pos, allelesets)) if args.nongenic_mode or annotated_pos: nentry += 1 if nentry == args.line_buffer: args.qprint("Writing block of entries.") outmvf.write_entries(entrybuffer) entrybuffer = [] nentry = 0 if entrybuffer: outmvf.write_entries(entrybuffer) args.qprint("Writing final block of entries.") entrybuffer = [] nentry = 0 return ''
def annotate_mvf(args): """Main method""" mvf = MultiVariantFile(args.mvf, 'read') gff, geneids = parse_gff_annotate(args.gff, mvf.metadata['contigs']) if args.quiet is False: print("gff_processed") outmvf = MultiVariantFile(args.out, 'write', overwrite=args.overwrite) outmvf.metadata = deepcopy(mvf.metadata) if args.nongenic_mode is False: outmvf.metadata['contigs'] = geneids outmvf.write_data(outmvf.get_header()) entrybuffer = [] nentry = 0 for contigid, pos, allelesets in mvf.iterentries(decode=False): annotated_pos = False if contigid in gff: if pos in gff[contigid]: annotated_pos = True elif args.nongenic_mode is True and args.unmargin > 0: for xpos in range(pos - args.unmargin, pos + args.unmargin + 1): if xpos in gff[contigid]: annotated_pos = True break if args.nongenic_mode is False and annotated_pos is True: entrybuffer.append((gff[contigid][pos], pos, allelesets)) nentry += 1 if nentry == args.line_buffer: outmvf.write_entries(entrybuffer) entrybuffer = [] nentry = 0 elif args.nongenic_mode is True and annotated_pos is False: entrybuffer.append((contigid, pos, allelesets)) nentry += 1 if nentry == args.line_buffer: outmvf.write_entries(entrybuffer) entrybuffer = [] nentry = 0 if entrybuffer: outmvf.write_entries(entrybuffer) entrybuffer = [] nentry = 0 return ''
def filter_mvf(args): """Main method""" if args.more_help is True: modulehelp() sys.exit() if args.mvf is None and args.test is None: raise RuntimeError("No input file specified with --mvf") if args.out is None and args.test is None: raise RuntimeError("No output file specified with --out") # Establish Input MVF if args.test is not None: ncol = args.test_nchar or len(args.test.split()[1]) else: mvf = MultiVariantFile(args.mvf, 'read') ncol = mvf.metadata['ncol'] # Create Actionset if args.labels: labels = mvf.get_sample_labels()[:] for i in range(len(args.actions)): action = args.actions[i] arr = action.split(':') if arr[0] in ('columns', 'collapsepriority', 'collapsemerge', 'allelegroup', 'notmultigroup'): for j in range(1, len(arr)): arr[j] = ','.join( [str(labels.index(x)) for x in arr[j].split(',')]) args.actions[i] = ':'.join(arr) actionset = build_actionset(args.actions, ncol) # TESTING MODE if args.test: loc, alleles = args.test.split() linefail = False transformed = False # invar = invariant (single character) # refvar (all different than reference, two chars) # onecov (single coverage, + is second character) # onevar (one variable base, + is third character) # full = full alleles (all chars) if args.verbose: print(alleles) linetype = get_linetype(alleles) sys.stdout.write("MVF Encoding type '{}' detected\n".format(linetype)) for actionname, actiontype, actionfunc, actionarg in actionset: sys.stdout.write("Applying action {} ({}): ".format( actionname, actiontype)) if actiontype == 'filter': if not actionfunc(alleles, linetype): linefail = True sys.stdout.write("Filter Fail\n") break else: sys.stdout.write("Filter Pass\n") elif actiontype == 'transform': transformed = True alleles = actionfunc(alleles, linetype) linetype = get_linetype(alleles) if linetype == 'empty': linefail = True sys.stdout.write("Transform removed all alleles\n") break else: sys.stdout.write("Transform result {}\n".format(alleles)) elif actiontype == 'location': loc = loc.split(':') loc[1] = int(loc[1]) if actionfunc(loc) is False: linefail = True sys.stdout.write("Location Fail\n") break else: sys.stdout.write("Location Pass\n") if linefail is False: if transformed: if linetype == 'full': alleles = encode_mvfstring(alleles) if alleles: test_output = "{}\t{}\n".format(loc, alleles) sys.stdout.write("Final output = {}\n".format(test_output)) else: sys.stdout.write("Transform removed all alleles\n") else: sys.stdout.write("No changes applied\n") sys.stdout.write("Final output = {}\n".format(args.test)) sys.exit() # MAIN MODE # Set up file handler outmvf = MultiVariantFile(args.out, 'write', overwrite=args.overwrite) outmvf.metadata = deepcopy(mvf.metadata) # reprocess header if actions are used that filter columns if any(x == y[0] for x in ('columns', 'collapsepriority', 'collapsemerge') for y in actionset): if args.labels: labels = outmvf.metadata['labels'][:] else: labels = [x for x in outmvf.metadata['samples']] for actionname, actiontype, actionfunc, actionarg in actionset: if actionname == 'columns': labels = [labels[x] for x in actionarg[0]] elif actionname in ('collapsepriority', 'collapsemerge'): labels = [ labels[x] for x in range(len(labels)) if x not in actionarg[0][1:] ] if args.labels: oldindices = mvf.get_sample_indices(labels) else: oldindices = labels[:] newsamples = {} for i, _ in enumerate(labels): newsamples[i] = mvf.metadata['samples'][oldindices[i]] outmvf.metadata['samples'] = newsamples.copy() outmvf.metadata['labels'] = labels[:] outmvf.write_data(outmvf.get_header()) # End header editing linebuffer = [] nbuffer = 0 for chrom, pos, allelesets in mvf.iterentries(decode=False): linefail = False transformed = False # invar = invariant (single character) # refvar (all different than reference, two chars) # onecov (single coverage, + is second character) # onevar (one variable base, + is third character) # full = full alleles (all chars) alleles = allelesets[0] linetype = get_linetype(alleles) if linetype == 'empty': continue if args.verbose is True: sys.stdout.write(" {} {}".format(alleles, linetype)) for actionname, actiontype, actionfunc, actionargs in actionset: if actiontype == 'filter': if not actionfunc(alleles, linetype): linefail = True elif actiontype == 'transform': transformed = True alleles = actionfunc(alleles, linetype) linetype = get_linetype(alleles) if linetype == 'empty': linefail = True elif actiontype == 'location': if actionfunc([chrom, pos]) is False: linefail = True if linefail: break if linefail is False: if transformed: if linetype == 'full': alleles = mvf.encode(alleles) if not alleles: linefail = True nbuffer += 1 linebuffer.append((chrom, pos, (alleles, ))) if args.verbose: sys.stdout.write("{}\n".format(alleles)) if nbuffer == args.line_buffer: outmvf.write_entries(linebuffer) linebuffer = [] nbuffer = 0 elif args.verbose: sys.stdout.write("FAIL\n") if linebuffer: outmvf.write_entries(linebuffer) linebuffer = [] return ''
def mvf_join(args): """Main method""" concatmvf = MultiVariantFile(args.out, 'write', overwrite=args.overwrite) # Copy the first file's metadata if args.main_header_file: if args.main_header_file not in args.mvf: raise RuntimeError("{} not found in files".format( args.main_header_file)) else: args.main_header_file = args.mvf.index(args.main_header_file) else: args.main_header_file = 0 first_mvf = MultiVariantFile(args.mvf[args.main_header_file], 'read') concatmvf.metadata = first_mvf.metadata.copy() # Open each MVF file, read headers to make unified header transformers = [] for mvfname in args.mvf: # This will create a dictionary of samples{old:new}, contigs{old:new} transformer = MvfTransformer() mvf = MultiVariantFile(mvfname, 'read') for i, label in enumerate(mvf.get_sample_labels()): if label not in concatmvf.get_sample_labels(): concatmvf.metadata['labels'].append(label) concatmvf.metadata['samples'][ concatmvf.metadata['labels'].index(label)] = { 'label': label } if concatmvf.metadata['labels'].index(label) != i: transformer.set_label( i, concatmvf.metadata['labels'].index(label)) for contigid, contigdata in iter(mvf.metadata['contigs'].items()): if contigdata['label'] not in [ concatmvf.metadata['contigs'][x]['label'] for x in concatmvf.metadata['contigs'] ]: newid = (contigid not in concatmvf.metadata['contigs'] and contigid or concatmvf.get_next_contig_id()) concatmvf.metadata['contigs'][newid] = contigdata else: for concatid, concatdata in ( concatmvf.metadata['contigs'].items()): if contigdata['label'] == concatdata['label']: newid = concatid break if newid != contigid: transformer.set_contig(contigid, newid) transformers.append(transformer) # Write output header concatmvf.write_data(concatmvf.get_header()) # Now loop through each file entries = [] nentries = 0 for ifile, mvfname in enumerate(args.mvf): if not args.quiet: sys.stderr.write("Processing {} ...\n".format(mvfname)) transformer = transformers[ifile] mvf = MultiVariantFile(mvfname, 'read') for contigid, pos, allelesets in mvf.iterentries(decode=False, quiet=args.quiet): if transformer.labels: allelesets = [mvf.decode(x) for x in allelesets] for j, alleles in enumerate(allelesets): allelesets[j] = concatmvf.encode(''.join([ x in transformer.labels and alleles[transformer.labels[x]] or alleles[x] for x in range(len(alleles)) ])) if transformer.contigs: contigid = (contigid in transformer['contigs'] and transformer['contigs'][contigid] or contigid) entries.append((contigid, pos, allelesets)) nentries += 1 if nentries == args.line_buffer: concatmvf.write_entries(entries) entries = [] nentries = 0 if entries: concatmvf.write_entries(entries) entries = [] nentries = 0 if not args.quiet: sys.stderr.write("done\n") return ''
def merge_mvf(args): """Main method""" args.qprint("Running MergeMVF") if any(fpath.endswith('.gz') for fpath in args.mvf): print("WARNING! Running MergeMVF with gzipped input files is " "extremely slow and strongly discouraged.") concatmvf = MultiVariantFile(args.out, 'write', overwrite=args.overwrite) # Copy the first file's metadata args.qprint("Reading First File and Establishing Output") if args.main_header_file: if args.main_header_file not in args.mvf: raise RuntimeError("{} not found in files".format( args.main_header_file)) else: args.main_header_file = args.mvf.index(args.main_header_file) else: args.main_header_file = 0 first_mvf = MultiVariantFile(args.mvf[args.main_header_file], 'read') concatmvf.metadata = first_mvf.metadata.copy() # Open each MVF file, read headers to make unified header transformers = [] mvfmetadata = [] concatmvf_reverse_contig = dict( (x['label'], k) for (k, x) in concatmvf.metadata['contigs'].items()) inputfiles = [] for mvfname in args.mvf: args.qprint("Reading headers from {}".format(mvfname)) # This will create a dictionary of samples{old:new}, contigs{old:new} args.qprint("Processing Headers and Indexing: {}".format(mvfname)) transformer = MvfTransformer() mvf = MultiVariantFile(mvfname, 'read', contigindex=(not args.skip_index)) if args.skip_index: mvf.read_index_file() mvf.reset_max_contig_id() mvfmetadata.append(mvf.metadata) for i, label in enumerate(mvf.get_sample_labels()): if label not in concatmvf.get_sample_labels(): concatmvf.metadata['labels'].append(label) concatmvf.metadata['samples'][ concatmvf.metadata['labels'].index(label)] = { 'label': label } # if concatmvf.metadata['labels'].index(label) != i: transformer.set_label(i, concatmvf.metadata['labels'].index(label)) for contigid, contigdata in iter(mvf.metadata['contigs'].items()): if contigdata['label'] not in concatmvf_reverse_contig: newid = (contigid if contigid not in concatmvf.metadata['contigs'] else concatmvf.get_next_contig_id()) concatmvf.metadata['contigs'][newid] = contigdata concatmvf_reverse_contig[contigdata['label']] = newid else: newid = concatmvf_reverse_contig[contigdata['label']] transformer.set_contig(contigid, newid) transformers.append(transformer) inputfiles.append(mvf) # Write output header args.qprint("Writing headers to merge output") concatmvf.reset_ncol() concatmvf.write_data(concatmvf.get_header()) contigs = concatmvf.metadata['contigs'] # Now loop through each file blank_entry = '-' * len(concatmvf.metadata['samples']) for current_contig in contigs: contig_merged_entries = {} args.qprint("Merging Contig: {}".format(current_contig)) for ifile, mvffile in enumerate(inputfiles): if current_contig not in transformers[ifile].contigs: continue localcontig = transformers[ifile].contigs[current_contig] for chrom, pos, allelesets in mvffile.itercontigentries( localcontig, decode=True): if pos not in contig_merged_entries: contig_merged_entries[pos] = blank_entry[:] for j, base in enumerate(allelesets[0]): xcoord = transformers[ifile].labels_rev[j] if contig_merged_entries[pos][xcoord] != '-': if contig_merged_entries[pos][xcoord] == base: continue if base == '-' or base == 'X': continue raise RuntimeError( "Merging columns have two different bases: {} {} {}" .format(pos, contig_merged_entries[pos][xcoord], base)) contig_merged_entries[pos] = ( contig_merged_entries[pos][:xcoord] + base + contig_merged_entries[pos][xcoord + 1:]) concatmvf.write_entries( ((current_contig, coord, (entry, )) for coord, entry in sorted(contig_merged_entries.items())), encoded=False) args.qprint("Entries written for contig {}: {}".format( current_contig, len(contig_merged_entries))) return ''
def translate_mvf(args): """Main method""" mvf = MultiVariantFile(args.mvf, 'read') if mvf.flavor != 'dna': raise RuntimeError("MVF must be flavor=dna to translate") if args.gff: gff = parse_gff_translate(args.gff, args) if not args.quiet: print("gff_processed") outmvf = MultiVariantFile(args.out, 'write', overwrite=args.overwrite) outmvf.metadata = deepcopy(mvf.metadata) outmvf.flavor = args.output_data outmvf.write_data(outmvf.get_header()) entrybuffer = [] nentry = 0 if not args.gff: inputbuffer = [] current_contig = '' for contigid, pos, allelesets in mvf.iterentries(decode=False): if current_contig == '': current_contig = contigid[:] if contigid == current_contig: inputbuffer.append((pos, allelesets)) else: for _, amino_acids, alleles in iter_codons(inputbuffer, mvf): if all([x in '-X' for x in amino_acids]): continue if args.output_data == 'protein': entrybuffer.append( (current_contig, pos, (amino_acids, ))) else: entrybuffer.append( (current_contig, pos, (amino_acids, alleles[0], alleles[1], alleles[2]))) nentry += 1 if nentry == args.line_buffer: outmvf.write_entries(entrybuffer) entrybuffer = [] nentry = 0 inputbuffer = [(pos, allelesets)] current_contig = contigid[:] if inputbuffer: for _, amino_acids, alleles in iter_codons(inputbuffer, mvf): if all([x in '-X' for x in amino_acids]): continue if args.output_data == 'protein': entrybuffer.append((current_contig, pos, (amino_acids, ))) else: entrybuffer.append( (current_contig, pos, (amino_acids, alleles[0], alleles[1], alleles[2]))) nentry += 1 if nentry == args.line_buffer: outmvf.write_entries(entrybuffer) entrybuffer = [] nentry = 0 else: mvf_entries = {} for contigid, pos, allelesets in mvf.iterentries(decode=False): if contigid not in mvf_entries: mvf_entries[contigid] = {} mvf_entries[contigid][pos] = allelesets[0] for contigname in sorted(gff): contigid = mvf.get_contig_ids(labels=contigname)[0] for coords in sorted(gff[contigname]): reverse_strand = False if coords[3] == '-': reverse_strand = True alleles = [ mvf_entries[contigid].get(x, '-') for x in coords[2::-1] ] else: alleles = [ mvf_entries[contigid].get(x, '-') for x in coords[0:3] ] if all(len(x) == 1 for x in alleles): if reverse_strand: alleles = [MLIB.complement_bases[x] for x in alleles] decoded_alleles = alleles amino_acids = translate(''.join(alleles))[0] else: if reverse_strand: decoded_alleles = [[ MLIB.complement_bases[y] for y in mvf.decode(x) ] for x in alleles] alleles = [ mvf.encode(''.join(x)) for x in decoded_alleles ] else: decoded_alleles = [mvf.decode(x) for x in alleles] amino_acids = [ translate(''.join(x)) for x in zip(*decoded_alleles) ] amino_acids = mvf.encode(''.join( [x[0] for x in amino_acids])) if all([x in '-X' for x in amino_acids]): continue if args.output_data == 'protein': entrybuffer.append((contigid, coords[0], (amino_acids, ))) else: entrybuffer.append( (contigid, coords[0], (amino_acids, alleles[0], alleles[1], alleles[2]))) nentry += 1 if nentry == args.line_buffer: outmvf.write_entries(entrybuffer) entrybuffer = [] nentry = 0 if entrybuffer: outmvf.write_entries(entrybuffer) entrybuffer = [] nentry = 0 return ''
def translate_mvf(args): """Main method""" args.qprint("Running TranslateMVF") if args.gff: args.qprint("Reading and Indexing MVF.") else: args.qprint("Reading MVF.") mvf = MultiVariantFile(args.mvf, 'read', contigindex=bool(args.gff)) if mvf.flavor != 'dna': raise RuntimeError("MVF must be flavor=dna to translate") if args.gff: args.qprint("Processing MVF Index File.") mvf.read_index_file() gff = parse_gff_translate(args.gff, args, parent_gene_prefix=args.parent_gene_prefix) args.qprint("GFF processed.") outmvf = MultiVariantFile(args.out, 'write', overwrite=args.overwrite) outmvf.metadata = deepcopy(mvf.metadata) outmvf.flavor = args.output_data outmvf.write_data(outmvf.get_header()) args.qprint("Output MVF Established.") entrybuffer = [] nentry = 0 pos = None if not args.gff: args.qprint("No GFF used, translating sequences as pre-aligned in " "coding frame.") inputbuffer = [] current_contig = '' for contigid, pos, allelesets in mvf.iterentries(decode=False): if current_contig == '': current_contig = contigid[:] if contigid == current_contig: inputbuffer.append((pos, allelesets)) else: for _, amino_acids, alleles in iter_codons( inputbuffer, mvf): if all([x in '-X' for x in amino_acids]): continue if args.output_data == 'protein': entrybuffer.append( (current_contig, pos, (amino_acids,))) else: entrybuffer.append(( current_contig, pos, ( amino_acids, alleles[0], alleles[1], alleles[2]))) nentry += 1 if nentry == args.line_buffer: outmvf.write_entries(entrybuffer) entrybuffer = [] nentry = 0 inputbuffer = [(pos, allelesets)] current_contig = contigid[:] if inputbuffer: for _, amino_acids, alleles in iter_codons( inputbuffer, mvf): if all([x in '-X' for x in amino_acids]): continue if args.output_data == 'protein': entrybuffer.append( (current_contig, pos, (amino_acids,))) else: entrybuffer.append(( current_contig, pos, ( amino_acids, alleles[0], alleles[1], alleles[2]))) nentry += 1 if nentry == args.line_buffer: outmvf.write_entries(entrybuffer) entrybuffer = [] nentry = 0 else: args.qprint("Indexing GFF gene names.") #mvfid_to_gffname = outmvf.get_contig_reverse_dict() for xcontigid in outmvf.get_contig_ids(): mvf_entries = {} contigname = outmvf.metadata['contigs'][xcontigid]['label'] if contigname not in gff: if args.verbose: print("No entries in GFF, skipping contig: {} {}".format( xcontigid, contigname)) continue if not int(xcontigid) % 100: args.qprint("Processing contig: {} {}".format( xcontigid, contigname)) #contig_cds_bases = chain(x[:3] for x in gff[contigname]) for contigid, pos, allelesets in mvf.itercontigentries( xcontigid, decode=False): # if pos in contig_cds_bases: mvf_entries[pos] = allelesets[0] for coords in sorted(gff[contigname]): reverse_strand = coords[3] == '-' alleles = (tuple(mvf_entries.get(x, '-') for x in coords[2::-1]) if reverse_strand else tuple(mvf_entries.get(x, '-') for x in coords[0:3])) if all(len(x) == 1 for x in alleles): if reverse_strand: alleles = tuple( MLIB.complement_bases[x] for x in alleles) decoded_alleles = alleles amino_acids = translate_single_codon(''.join(alleles)) else: if reverse_strand: decoded_alleles = tuple(tuple(MLIB.complement_bases[y] for y in mvf.decode(x)) for x in alleles) alleles = tuple(mvf.encode(''.join(x)) for x in decoded_alleles) else: decoded_alleles = tuple(mvf.decode(x) for x in alleles) amino_acids = tuple(translate_single_codon(''.join(x)) for x in zip(*decoded_alleles)) #print("aminx", amino_acids) amino_acids = mvf.encode(''.join(amino_acids)) # if all(x in '-X' for x in amino_acids): # continue # print("amino", amino_acids) # print("translated", amino_acids, alleles) if args.output_data == 'protein': entrybuffer.append((xcontigid, coords[0], (amino_acids,))) else: entrybuffer.append(( xcontigid, coords[0], ( amino_acids, alleles[0], alleles[1], alleles[2]))) nentry += 1 if nentry >= args.line_buffer: args.qprint("Writing a block of {} entries.".format( args.line_buffer)) outmvf.write_entries(entrybuffer) entrybuffer = [] nentry = 0 if entrybuffer: outmvf.write_entries(entrybuffer) entrybuffer = [] nentry = 0 return ''