def wrapper(args): #validate some of the input arguments qc.validate_input_arguments_for_learn_model( foreground=args.foreground,background=args.background,alpha=args.penalty, modeltype=args.modeltype,learningmethod=args.learningmethod, start=args.start,end=args.end,iteration=args.iteration, burnin=args.burnin,thin=args.thin,pseudocounts=args.pseudocounts,) inloc = io.validate_file_for_reading(args.i) if args.i else sys.stdin input_df = io.load_dataset(inloc) outloc = io.validate_file_for_writing(args.out) if args.out else sys.stdout #pdb.set_trace() output_df = main(input_df,lm=args.learningmethod,\ modeltype=args.modeltype,db=args.db_filename,\ LS_means_std=args.LS_means_std,\ iteration=args.iteration,\ burnin=args.burnin,thin=args.thin,start=args.start,end=args.end,\ runnum=args.runnum,initialize=args.initialize,\ foreground=args.foreground,background=args.background,\ alpha=args.penalty,pseudocounts=args.pseudocounts, verbose=args.verbose) io.write(output_df,outloc)
def wrapper(args): """ Commandline wrapper for main() """ inloc = io.validate_file_for_reading(args.i) if args.i else sys.stdin outloc = io.validate_file_for_writing(args.out) if args.out else sys.stdout input_df = io.load_dataset(inloc) output_df = main(input_df,bin=args.bin,start=args.start,end=args.end) io.write(output_df,outloc)
def wrapper(args): """ Commandline wrapper for main() """ output_df = main(wtseq=args.wtseq, mutrate=args.mutrate,\ numseq=args.numseqs,dicttype=args.type,tags=args.tags,\ tag_length=args.tag_length) outloc = io.validate_file_for_writing(args.out) if args.out else sys.stdout io.write(output_df,outloc)
def wrapper(args): inloc = io.validate_file_for_reading(args.i) if args.i else sys.stdin dataset_df = io.load_dataset(inloc) model_df = io.load_model(args.model) output_df = main(dataset_df=dataset_df, model_df=model_df,\ left=args.left, right=args.right) outloc = io.validate_file_for_writing(args.out) if args.out else sys.stdout io.write(output_df,outloc,fast=args.fast)
def wrapper(args): """ Commandline wrapper for main() """ inloc = io.validate_file_for_reading(args.i) if args.i else sys.stdin outloc = io.validate_file_for_writing(args.out) if args.out else sys.stdout input_df = io.load_dataset(inloc) output_df = main(input_df, start=args.start,end=args.end,\ err=args.err, method=args.method, pseudocount=args.pseudocount) io.write(output_df,outloc)
def wrapper(args): """ Wrapper for functions io.load_* and io.write """ # Determine input and output inloc = io.validate_file_for_reading(args.i) if args.i else sys.stdin outloc = io.validate_file_for_writing(args.out) if args.out else sys.stdout try: # Get load function corresponding to file type func = filetype_to_loadfunc_dict[str(args.type)] # Run load function on input df = func(inloc) # Write df to stdout or to outfile io.write(df,outloc,fast=args.fast) except SortSeqError: raise
def wrapper(args): """ Commandline wrapper for main() """ inloc = io.validate_file_for_reading(args.i) if args.i else sys.stdin outloc = io.validate_file_for_writing(args.out) if args.out else sys.stdout # Get filelist filelist_df = io.load_filelist(inloc) inloc.close() # Get tagkeys dataframe if provided if args.tagkeys: tagloc = io.validate_file_for_reading(args.tagkeys) tags_df = io.load_tagkey(tagloc) tagloc.close() else: tags_df = None output_df = main(filelist_df,tags_df=tags_df,seq_type=args.seqtype) io.write(output_df,outloc,fast=args.fast)
def wrapper(args): """ Wrapper for function for scan_model.main() """ # Prepare input to main model_df = io.load_model(args.model) seqtype, modeltype = qc.get_model_type(model_df) L = model_df.shape[0] if modeltype=='NBR': L += 1 chunksize = args.chunksize if not chunksize>0: raise SortSeqError(\ 'chunksize=%d must be positive'%chunksize) if args.numsites <= 0: raise SortSeqError('numsites=%d must be positive.'%args.numsites) if args.i and args.seq: raise SortSeqError('Cannot use flags -i and -s simultaneously.') # If sequence is provided manually if args.seq: pos_offset=0 contig_str = args.seq # Add a bit on end if circular if args.circular: contig_str += contig_str[:L-1] contig_list = [(contig_str,'manual',pos_offset)] # Otherwise, read sequence from FASTA file else: contig_list = [] inloc = io.validate_file_for_reading(args.i) if args.i else sys.stdin for i,record in enumerate(SeqIO.parse(inloc,'fasta')): name = record.name if record.name else 'contig_%d'%i # Split contig up into chunk)size bits full_contig_str = str(record.seq) # Add a bit on end if circular if args.circular: full_contig_str += full_contig_str[:L-1] # Define chunks containing chunksize sites start = 0 end = start+chunksize+L-1 while end < len(full_contig_str): contig_str = full_contig_str[start:end] contig_list.append((contig_str,name,start)) start += chunksize end = start+chunksize+L-1 contig_str = full_contig_str[start:] contig_list.append((contig_str,name,start)) if len(contig_list)==0: raise SortSeqError('No input sequences to read.') # Compute results outloc = io.validate_file_for_writing(args.out) if args.out else sys.stdout output_df = main(model_df,contig_list,numsites=args.numsites,\ verbose=args.verbose) # Write df to stdout or to outfile io.write(output_df,outloc,fast=args.fast)