def collect_coverages(fyle_list, fasta_fyle, specificity='AC'):
    '''Applies rtsc_coverage to files'''
    fasta_index, coverage_data = sfio.read_fasta(fasta_fyle), {}
    for fyle in fyle_list:
        coverage_data[fyle.replace('.rtsc', '')] = rtsc_coverage(
            fyle, fasta_index, specificity)
    return coverage_data
def main():
    parser = argparse.ArgumentParser(description='Converts <.sam> into reverse transcriptase stop files <.rtsc>')
    in_files = parser.add_argument_group('Input')
    in_files.add_argument('fasta',default=None,help='Index Fasta File')
    in_files.add_argument('sam',default=None,help='Input SAM file(s)',nargs='+')
    settings = parser.add_argument_group('Settings')
    settings.add_argument('-mismatches',type=int,default=3,metavar='<number>',help='[default = 3] Maximum allowed mismatches/indels')
    settings.add_argument('-firstmm',action='store_true',default=False,help='Accept alignments with first base mismatches')
    settings.add_argument('-reverse',action='store_true',default=False,help='Accept alignments to the reverse strand')
    settings.add_argument('-rm_secondary',action='store_false',default=True,help='Remove secondary alignments',dest='secondary')
    out_files = parser.add_argument_group('Output')
    out_files.add_argument('-logname',type=str,default='filter_log.csv',help='[default = filter_log.csv] Log file name')
    parser.set_defaults(r1_unmap=False)
    args = parser.parse_args()

    #Generate Seq Limits
    reference = sfio.read_fasta(args.fasta)
    limits = {name:len(seq) for name, seq in reference.items()}

    #Generate SAM filterflag
    keys = {'r1_unmap':args.r1_unmap,'r1_reverse':args.reverse,'secondary':args.secondary}
    passing_keys = [k for k,v in keys.items() if not v]
    keyflag = sum([sf3sam.flag_values[R] for R in passing_keys])

    #Iterate through files
    log_data = {}
    for fyle in args.sam:
        entry = sam_to_rtsc(fyle,limits,args.mismatches,args.firstmm,keyflag)
        log_data[fyle] = entry

    #Write out log
    write_sam_filter_report(log_data,sfio.check_extension(args.logname,'.csv'))
def collect_specificity(fyle_lyst, fasta):
    '''Generates a nested dictionary of specificities'''
    ref = sfio.read_fasta(fasta)
    all_data = {
        f.replace('.rtsc', ''): rtsc_specificity(f, ref)
        for f in fyle_lyst
    }
    return all_data
Exemplo n.º 4
0
def main():
    parser = argparse.ArgumentParser(description='Splits sequences into sliding window segments')
    in_files = parser.add_argument_group('Input')
    in_files.add_argument('fasta',type=str,help='Input Fasta File')
    settings = parser.add_argument_group('Settings')
    settings.add_argument('-size',default=10,type=int,help='[default = 10] Size of the Windows')
    settings.add_argument('-step',default=5,type=int,help='[default = 5] Step of the Windows')
    out_files = parser.add_argument_group('Output')
    out_files.add_argument('-name',default=None,help='Specify output file name')
    out_files.add_argument('-sort_file',action='store_true',help='Sort the output by name')
    args = parser.parse_args()

    #Get Sequences
    seqs = sfio.read_fasta(args.fasta)

    #Create Segments
    segements = fasta_stepwise(seqs,args.size,args.step)

    #Output
    base = sfio.rm_ext(args.fasta,'.fasta','.fas','.fa')
    default_name = '_'.join([base,str(args.size)+'window',str(args.step)+'step'])+'.fasta'
    out_name = sfio.check_extension(args.name,'.fasta') if args.name else default_name
    sfio.write_fasta(segements,out_name,args.sort_file)
Exemplo n.º 5
0
def main():
    parser = argparse.ArgumentParser(
        description='Reformats reactivity values to <.csv> format')
    in_files = parser.add_argument_group('Input')
    in_files.add_argument('react', type=str, help='File to pull values from')
    in_files.add_argument('fasta',
                          type=str,
                          help='File used to generate the <.react>')
    settings = parser.add_argument_group('Settings')
    settings.add_argument('-mode',
                          type=str.upper,
                          default=None,
                          choices=['S', 'M'],
                          help='Single or Multi-Transcript mode')
    settings.add_argument('-transcript',
                          default=None,
                          type=str,
                          help='[S] Specific Transcript to reformat',
                          dest='name')
    settings.add_argument(
        '-restrict',
        default=None,
        metavar='<.txt>',
        help='[M] Limit analysis to these specific transcripts')
    settings.add_argument(
        '-outdir',
        type=str,
        default=None,
        help='[M] Name of the out directory, overrides default')
    args = parser.parse_args()

    #Read in files
    sequences, reactivities = sfio.read_fasta(args.fasta), sfio.read_react(
        args.react)

    #Single Transcript Mode
    if args.mode == 'S':
        #Get data
        outname = '_'.join([args.name,
                            args.react.replace('.react', '')]) + '.csv'
        seq = sequences[args.name] if args.name in sequences else None
        reacts = reactivities[args.name] if args.name in reactivities else None
        #Write
        if seq and reacts:
            if len(seq) == len(reacts):
                write_out_csv(seq, reacts, outname)
            else:
                print(
                    'Sequence length does not match number of reactivities for {}'
                    .format(args.transcript))
        else:
            print('One or more of the files did not contain entry {}'.format(
                args.transcript))

    #Multi Mode
    elif args.mode == 'M':
        #Directory
        new_dir = args.outdir if args.outdir else args.react.replace(
            '.react', '') + '_' + 'all_csvs'
        if args.restrict:
            covered = sfio.read_restrict(args.restrict)
            sequences = {n: s for n, s in sequences.items() if n in covered}
            reactivities = {
                n: s
                for n, s in reactivities.items() if n in covered
            }
        if new_dir not in os.listdir('.'):
            os.mkdir(new_dir)
            batch_write_out_csv(sequences, reactivities,
                                args.react.replace('.react', ''), new_dir)
        else:
            print('{} folder already exists. Remove or rename and try again.'.
                  format(new_dir))

    else:
        print('Check options! Must have a valid mode!')
def generate_composition(fasta):
    '''Creates a frequency and length dictionary from a fasta'''
    return {
        k: collections.Counter(v)
        for k, v in sfio.read_fasta(fasta).items()
    }
Exemplo n.º 7
0
def main():
    parser = argparse.ArgumentParser(description='Batch runs folding programs')
    in_files = parser.add_argument_group('Input')
    in_files.add_argument('fasta', default=None, help='Reference Fasta File')
    settings = parser.add_argument_group('Settings')
    settings.add_argument('-mode',
                          type=str.upper,
                          default='R',
                          choices=['R', 'V'],
                          help='RNAStructure or Vienna')
    settings.add_argument('-react',
                          default=None,
                          metavar='<.react>',
                          help='React file to use as restraints/constraints')
    settings.add_argument('-restrict',
                          default=None,
                          metavar='<.txt>',
                          help='Limit folding to these specific transcripts')
    settings.add_argument(
        '-temp',
        type=float,
        default=310.15,
        help='[default = 310.15] Temperature to use for folding')
    settings.add_argument('-cores',
                          default='4',
                          type=str,
                          help="[default = 4] Number of cores to use")
    settings.add_argument('-distance',
                          default=99999,
                          type=int,
                          help='[default = 99999] Maximum pairing distance')
    settings.add_argument('-minlen',
                          default=10,
                          type=int,
                          help='[default = 10] Minimum length to fold')
    settings.add_argument('-maxlen',
                          default=5000,
                          type=int,
                          help='[default = 5000] Maximum length to fold')
    settings.add_argument(
        '-truncate',
        default=0,
        type=int,
        help='[default = 0] Ignore <.react> for last N nucleotides')
    settings.add_argument('-threshold',
                          default=None,
                          type=float,
                          help='Apply hard constraints using this threshold',
                          dest='th')
    rna = parser.add_argument_group('RNAStructure Settings')
    rna.add_argument('-slope',
                     default=1.8,
                     type=float,
                     help='[default = 1.8] Parameter for RNAstructure')
    rna.add_argument('-intercept',
                     default=-0.6,
                     type=float,
                     help='[default = -0.6] Parameter for RNAstructure')
    rna.add_argument('-partition',
                     action='store_true',
                     help='Use the partition function')
    rna.add_argument('-multiple',
                     action='store_true',
                     help='Output all structures rather than just MFE')
    out_files = parser.add_argument_group('Output')
    out_files.add_argument('-errorname',
                           default='errors.csv',
                           help='Name the error log')
    out_files.add_argument('-paraname',
                           default='parameters.csv',
                           help='Name the parameter log')
    out_files.add_argument('-outdir',
                           default='folded',
                           help='Name the out directory')
    out_files.add_argument('-ct', default='CT', help='Name the ct folder')
    out_files.add_argument('-ps', default='PS', help='Name the ps folder')
    out_files.add_argument('-bp',
                           default='PS',
                           help='Name the bp folder (Partition Only)')
    out_files.add_argument('-pfs',
                           default='PFS',
                           help='Name the pfs folder (Partition Only)')
    args = parser.parse_args()

    #Prepare out directory
    if os.path.isdir(args.outdir):
        print('Out directory already exists, quitting...')
        quit()
    else:
        os.mkdir(args.outdir)

    #Input
    sequences = sfio.read_fasta(args.fasta)
    xstraints = sfio.read_react(args.react) if args.react else None
    restrict = sfio.read_restrict(args.restrict) if args.restrict else None

    #Populate and filter errors
    errors = {}
    gen_length_errors(errors, sequences, args.minlen, args.maxlen)
    gen_xstraint_errors(errors, sequences, xstraints)
    if errors:
        sl = len(sequences)
        sequences = {z: y for z, y in sequences.items() if z not in errors}
        el = len(sequences)
        print('After screening errors, {} of {} folds remain'.format(el, sl))
    if restrict:
        sl = len(sequences)
        sequences = {n: s for n, s in sequences.items() if n in restrict}
        el = len(sequences)
        print('After restricting analysis, {} of {} folds remain'.format(
            el, sl))

    #Housekeeping in base directory
    write_params_log(
        args,
        os.path.join(args.outdir, sfio.check_extension(args.paraname, '.csv')))
    write_error_log(
        errors,
        os.path.join(args.outdir, sfio.check_extension(args.errorname,
                                                       '.csv')))

    #RNAStructure Suite
    if args.mode == 'R':
        rna_env = {**os.environ, 'OMP_NUM_THREADS': args.cores}
        options = {'-t': args.temp, '-md': args.distance}

        #partition-smp Suite
        if args.partition:
            paths = gen_paths(args.outdir, args.pfs, args.bp, args.ps, args.ct)
            extensions = ['.pfs', '.bp', '.ps', '.ct']

            #Restrained/Constrained
            if xstraints:
                opts = {} if args.th else {
                    '-si': args.intercept,
                    '-sm': args.slope
                }
                options.update(opts)
                out_type = 'constraint' if args.th else 'restraint'
                suffixes = [[str(args.temp), out_type + X] for X in extensions]
                params = sfio.flatten_list([[k, str(v)]
                                            for k, v in options.items()])
                #Iterate through sequences
                for name, seqeunce in sequences.items():
                    temp_seq = gen_temp_sequence(name, seq, args.outdir)
                    gargs = [name, xstraints[name], args.outdir, args.truncate]
                    guide = gen_cons(*gargs, args.th) if args.th else gen_rest(
                        *gargs)
                    #Out Files
                    pf_out, bp_out, ps_out, ct_out = gen_file_paths(
                        name, paths, suffixes)
                    #Run Commands
                    gflag = '-c' if args.th else '-sh'
                    pf_command = [
                        'partition-smp', temp_seq, pf_out, gflag, guide
                    ] + params
                    bp_command = ['ProbabilityPlot', pf_out, bp_out, '-t']
                    ps_command = ['ProbabilityPlot', pf_out, ps_out]
                    subprocess.run(pf_command,
                                   stdout=subprocess.DEVNULL,
                                   env=rna_env)
                    subprocess.run(bp_command,
                                   env=rna_env,
                                   stdout=subprocess.DEVNULL)
                    subprocess.run(ps_command,
                                   env=rna_env,
                                   stdout=subprocess.DEVNULL)
                    #Generate CT file
                    if file_len(bp_out) > 2:
                        ct_command = ['MaxExpect', pf_out, ct_out]
                        subprocess.run(ct_command, stdout=subprocess.DEVNULL)
                    remove_temp(temp_seq, guide)

            #in-silico
            else:
                suffixes = [[str(args.temp), 'silico' + X] for X in extensions]
                params = sfio.flatten_list([[k, str(v)]
                                            for k, v in options.items()])
                #Iterate through sequences
                for name, seq in sequences.items():
                    temp_seq = gen_temp_sequence(name, seq, args.outdir)
                    #Out Files
                    pf_out, bp_out, ps_out, ct_out = gen_file_paths(
                        name, paths, suffixes)
                    #Run Commands
                    pf_command = ['partition-smp', temp_seq, out_pfs] + params
                    bp_command = ['ProbabilityPlot', pf_out, bp_out, '-t']
                    ps_command = ['ProbabilityPlot', pf_out, ps_out]
                    subprocess.run(pf_command,
                                   stdout=subprocess.DEVNULL,
                                   env=rna_env)
                    subprocess.run(bp_command, stdout=subprocess.DEVNULL)
                    subprocess.run(ps_command, stdout=subprocess.DEVNULL)
                    #Generate CT file
                    if file_len(out_bp) > 2:
                        ct_command = ['MaxExpect', pf_out, out_ct]
                        subprocess.run(ct_command, stdout=subprocess.DEVNULL)
                    remove_temp(temp_seq)

        #Fold-smp Suite
        else:
            paths = gen_paths(args.outdir, args.ct, args.ps)
            extensions = ['.ct', '.ps']
            flags = {'-mfe': not args.multiple}
            applied_flags = [k for k, v in flags.items() if v]

            #Restrained/Constrained
            if xstraints:
                out_type = 'constraint' if args.th else 'restraint'
                suffixes = [[str(args.temp), out_type + X] for X in extensions]
                opts = {} if args.th else {
                    '-si': args.intercept,
                    '-sm': args.slope
                }
                options.update(opts)
                params = sfio.flatten_list([[k, str(v)]
                                            for k, v in options.items()])
                params.extend(applied_flags)
                #Iterate through sequences
                for name, seq in sequences.items():
                    temp_seq = gen_temp_sequence(name, seq, args.outdir)
                    gargs = [name, xstraints[name], args.outdir, args.truncate]
                    guide = gen_cons(*gargs, args.th) if args.th else gen_rest(
                        *gargs)
                    gflag = '-c' if args.th else '-sh'
                    #Out Files
                    ct_out, ps_out = gen_file_paths(name, paths, suffixes)
                    #Run Commands
                    fold_command = [
                        'Fold-smp', temp_seq, ct_out, gflag, guide
                    ] + params
                    draw_command = ['draw', ct_out, ps_out]
                    subprocess.run(fold_command,
                                   env=rna_env,
                                   stdout=subprocess.DEVNULL)
                    subprocess.run(draw_command, stdout=subprocess.DEVNULL)
                    #Remove Temporary Files
                    remove_temp(temp_seq, guide)

            #in-silico
            else:
                out_type = 'silico'
                suffixes = [[str(args.temp), out_type + X] for X in extensions]
                params = sfio.flatten_list([[k, str(v)]
                                            for k, v in options.items()])
                params.extend(applied_flags)
                #Iterate through sequences
                for name, seq in sequences.items():
                    #Generate Temporary Files
                    temp_seq = gen_temp_sequence(name, seq, args.outdir)
                    #Out Files
                    ct_out, ps_out = gen_file_paths(name, paths, suffixes)
                    #Run Commands
                    fold_command = ['Fold-smp', temp_seq, ct_out] + params
                    draw_command = ['draw', ct_out, ps_out]
                    subprocess.run(fold_command,
                                   env=rna_env,
                                   stdout=subprocess.DEVNULL)
                    subprocess.run(draw_command, stdout=subprocess.DEVNULL)
                    #Remove Temporary Files
                    remove_temp(temp_seq)

    #Vienna Package Suite
    if args.mode == 'V':
        print('Vienna Package not yet supported')
def main():
    parser = argparse.ArgumentParser(
        description='Reformats <.rtsc>/<.react> for easy correlation analysis')
    in_files = parser.add_argument_group('Input')
    in_files.add_argument('fasta', help='Reference Fasta')
    in_files.add_argument('rx', help='Input <.rx> files', nargs='+')
    settings = parser.add_argument_group('Settings')
    settings.add_argument('-verbose',
                          action='store_true',
                          help='Display metrics')
    settings.add_argument('-bases',
                          default='AGCT',
                          metavar='ACGT',
                          help='[default = ACGT] Nucleotide specifictiy')
    settings.add_argument('-restrict',
                          default=None,
                          metavar='<.txt>',
                          help='Filter to these transcripts via coverage file')
    out_files = parser.add_argument_group('Output')
    out_files.add_argument('-name',
                           default=None,
                           help='Specify output file name')
    args = parser.parse_args()

    #Set up
    covered = sfio.read_restrict(args.restrict) if args.restrict else None
    fasta_dict = sfio.read_fasta(args.fasta)
    desc = [sfio.rm_ext(x, '.rtsc', '.react') for x in args.rx]

    #All files are <.rtsc>
    if all(fyle.split('.')[-1] == 'rtsc' for fyle in args.rx):
        rx_data = sfio.read_rx_files(args.rx,
                                     mode='rtsc',
                                     verbose=args.verbose)
        default_name = '_'.join(desc + ['rtsc'] + ['correlation.csv'])
        out_name = sfio.check_extension(args.name,
                                        '.csv') if args.name else default_name
        if args.restrict:
            rx_data = {n: s for n, s in rx_data.items() if n in covered}
        if args.verbose:
            print('Remaining after filtering', len(rx_data), sep=',')
            print('Writing File:', out_name, sep=',')
        write_rtsc_repeatability(rx_data, fasta_dict, out_name, args.bases)

    #All files are <.react>
    elif all(fyle.split('.')[-1] == 'react' for fyle in args.rx):
        rx_data = sfio.read_rx_files(args.rx,
                                     mode='react',
                                     verbose=args.verbose)
        default_name = '_'.join(desc + ['react'] + ['correlation.csv'])
        out_name = sfio.check_extension(args.name,
                                        '.csv') if args.name else default_name
        if args.restrict:
            rx_data = {n: s for n, s in rx_data.items() if n in covered}
        if args.verbose:
            print('Remaining after filtering', len(rx_data), sep=',')
            print('Writing File:', out_name, sep=',')
        write_react_repeatability(rx_data, fasta_dict, out_name, args.bases)

    #Files do not make sense
    else:
        print('All Files must be the same type, and either .rtsc or .react!')
Exemplo n.º 9
0
def main():
    parser = argparse.ArgumentParser(
        description='Generates a <.react> file from two <.rtsc> files')
    in_files = parser.add_argument_group('Input')
    in_files.add_argument('control', type=str, help='Control <.rtsc> file')
    in_files.add_argument('treatment', type=str, help='Reagent <.rtsc> file')
    in_files.add_argument('fasta', type=str, help='Transcript <.fasta> file')
    settings = parser.add_argument_group('Settings')
    settings.add_argument('-restrict',
                          default=None,
                          metavar='<.txt>',
                          help='Limit analysis to these specific transcripts')
    settings.add_argument('-scale',
                          type=str,
                          default=None,
                          metavar='<.scale>',
                          help='Provide a normalizaiton file for calculation')
    settings.add_argument('-threshold',
                          type=float,
                          default=7.0,
                          help='[default = 7.0] Maximum Reactivity Cap')
    settings.add_argument(
        '-ln_off',
        action='store_true',
        help='Do not take the natural log of the stop counts')
    settings.add_argument('-nrm_off',
                          action='store_true',
                          help='Do not perform final 2-8' + u"\uFF05" +
                          ' reactivity normalization')
    settings.add_argument('-bases',
                          type=str,
                          default='AC',
                          metavar='ACGT',
                          help='[default = AC] Reaction Specificity')
    out_files = parser.add_argument_group('Output')
    out_files.add_argument('-save_fails',
                           action='store_true',
                           help='Log transcripts with zero or missing scales')
    out_files.add_argument('-name',
                           type=str,
                           default=None,
                           help='Specify output file name')
    args = parser.parse_args()

    #Create output name
    base_tag = [
        x.split(os.sep)[-1].replace('.rtsc', '')
        for x in [args.control, args.treatment]
    ]
    log_tag = ['ln'] if args.ln_off == False else []
    nrm_tag = ['nrm'] if args.ln_off == False else []
    base_name = '_'.join(base_tag + log_tag + nrm_tag)
    out_name = base_name + '.react' if args.name == None else sfio.check_extension(
        args.name, '.react')

    #Read in data
    control_data, treatment_data = map(sfio.read_rtsc,
                                       [args.control, args.treatment])

    #Apply filter if enabled
    if args.restrict:
        covered = sfio.read_restrict(args.restrict)
        control_data = {n: s for n, s in control_data.items() if n in covered}
        treatment_data = {
            n: s
            for n, s in treatment_data.items() if n in covered
        }

    #Calculate Derived Reactivity
    data = calculate_raw_reactivity(control_data, treatment_data, args.ln_off)

    #Read in transcript sequences
    seqs = sfio.read_fasta(args.fasta)

    #Generate and write scale, or read a <.scale> file in
    normalizaiton_scale = generate_normalization_scale(
        data, seqs, args.bases) if args.scale == None else read_norm_scale(
            args.scale)
    if args.scale == None:
        write_norm_scale(normalizaiton_scale,
                         out_name.replace('.react', '.scale'))

    #Calculate Final Reactivity
    out_reactivity, out_missing = calculate_final_reactivity(
        data, seqs, args.bases, args.threshold, normalizaiton_scale,
        args.nrm_off)

    #Write Out
    sfio.write_react(out_reactivity, out_name, sort_flag=True)

    #Write Out Fails
    if args.save_fails:
        sfio.write_keys(
            out_missing,
            out_name.replace('.react', '_unresolvable_transcripts.txt'))