def main(): option_parser, opts, args = parse_command_line_parameters(**script_info) verbose = opts.verbose input_fp = opts.input_fp output_dir = opts.output_dir verbose = opts.verbose print_only = opts.print_only parallel = opts.parallel # No longer checking that jobs_to_start > 2, but # commenting as we may change our minds about this. # if parallel: raise_error_on_parallel_unavailable() if opts.parameter_fp: try: parameter_f = open(opts.parameter_fp) except IOError: raise IOError, "Can't open parameters file (%s). Does it exist? Do you have read access?" % opts.parameter_fp params = parse_qiime_parameters(parameter_f) else: params = parse_qiime_parameters([]) # empty list returns empty defaultdict for now jobs_to_start = opts.jobs_to_start default_jobs_to_start = qiime_config["jobs_to_start"] validate_and_set_jobs_to_start(params, jobs_to_start, default_jobs_to_start, parallel, option_parser) try: makedirs(output_dir) except OSError: if opts.force: pass else: # Since the analysis can take quite a while, I put this check # in to help users avoid overwriting previous output. print "Output directory already exists. Please choose " + "a different directory, or force overwrite with -f." exit(1) if print_only: command_handler = print_commands else: command_handler = call_commands_serially if verbose: status_update_callback = print_to_stdout else: status_update_callback = no_status_updates run_qiime_data_preparation( input_fp, output_dir, command_handler=command_handler, params=params, qiime_config=qiime_config, parallel=parallel, status_update_callback=status_update_callback, )
def main(): option_parser, opts, args = \ parse_command_line_parameters(**script_info) verbose = opts.verbose otu_table_fp = opts.otu_table_fp output_dir = opts.output_dir tree_fp = opts.tree_fp seqs_per_sample = opts.seqs_per_sample verbose = opts.verbose print_only = opts.print_only master_tree = opts.master_tree parallel = opts.parallel # No longer checking that jobs_to_start > 2, but # commenting as we may change our minds about this. #if parallel: raise_error_on_parallel_unavailable() if opts.parameter_fp: try: parameter_f = open(opts.parameter_fp) except IOError: raise IOError,\ "Can't open parameters file (%s). Does it exist? Do you have read access?"\ % opts.parameter_fp params = parse_qiime_parameters(parameter_f) else: params = parse_qiime_parameters([]) # empty list returns empty defaultdict for now jobs_to_start = opts.jobs_to_start default_jobs_to_start = qiime_config['jobs_to_start'] validate_and_set_jobs_to_start(params, jobs_to_start, default_jobs_to_start, parallel, option_parser) try: makedirs(output_dir) except OSError: if opts.force: pass else: # Since the analysis can take quite a while, I put this check # in to help users avoid overwriting previous output. print "Output directory already exists. Please choose "+\ "a different directory, or force overwrite with -f." exit(1) if print_only: command_handler = print_commands else: command_handler = call_commands_serially if verbose: status_update_callback = print_to_stdout else: status_update_callback = no_status_updates run_jackknifed_beta_diversity(otu_table_fp=otu_table_fp,\ tree_fp=tree_fp,\ seqs_per_sample=seqs_per_sample,\ output_dir=output_dir, command_handler=command_handler, params=params, qiime_config=qiime_config,\ mapping_fp=opts.mapping_fp,\ parallel=parallel,\ status_update_callback=status_update_callback, master_tree=master_tree)
def main(): option_parser, opts, args = parse_command_line_parameters(**script_info) verbose = opts.verbose otu_table_fp = opts.otu_table_fp output_dir = opts.output_dir mapping_fp = opts.mapping_fp tree_fp = opts.tree_fp num_steps = opts.num_steps verbose = opts.verbose print_only = opts.print_only parallel = opts.parallel min_rare_depth = opts.min_rare_depth max_rare_depth = opts.max_rare_depth if opts.parameter_fp: try: parameter_f = open(opts.parameter_fp) except IOError: raise IOError,\ "Can't open parameters file (%s). Does it exist? Do you have read access?"\ % opts.parameter_fp params = parse_qiime_parameters(parameter_f) else: params = parse_qiime_parameters([]) # empty list returns empty defaultdict for now jobs_to_start = opts.jobs_to_start default_jobs_to_start = qiime_config['jobs_to_start'] validate_and_set_jobs_to_start(params, jobs_to_start, default_jobs_to_start, parallel, option_parser) try: makedirs(output_dir) except OSError: if opts.force: pass else: # Since the analysis can take quite a while, I put this check # in to help users avoid overwriting previous output. option_parser.error("Output directory already exists. Please choose" " a different directory, or force overwrite with -f.") if print_only: command_handler = print_commands else: command_handler = call_commands_serially if verbose: status_update_callback = print_to_stdout else: status_update_callback = no_status_updates run_qiime_alpha_rarefaction(otu_table_fp=otu_table_fp,\ mapping_fp=mapping_fp,\ output_dir=output_dir,\ command_handler=command_handler,\ params=params, qiime_config=qiime_config,\ tree_fp=tree_fp,\ num_steps=num_steps,\ parallel=parallel,\ min_rare_depth=min_rare_depth, max_rare_depth=max_rare_depth, status_update_callback=status_update_callback)
def main(): option_parser, opts, args = parse_command_line_parameters(**script_info) verbose = opts.verbose otu_table_fp = opts.otu_table_fp output_dir = opts.output_dir mapping_fp = opts.mapping_fp tree_fp = opts.tree_fp verbose = opts.verbose print_only = opts.print_only seqs_per_sample = opts.seqs_per_sample histogram_categories = opts.histogram_categories if histogram_categories != None: histogram_categories = histogram_categories.split(',') parallel = opts.parallel # No longer checking that jobs_to_start > 2, but # commenting as we may change our minds about this. #if parallel: raise_error_on_parallel_unavailable() if opts.parameter_fp: try: parameter_f = open(opts.parameter_fp) except IOError: raise IOError,\ "Can't open parameters file (%s). Does it exist? Do you have read access?"\ % opts.parameter_fp params = parse_qiime_parameters(parameter_f) else: params = parse_qiime_parameters([]) # empty list returns empty defaultdict for now jobs_to_start = opts.jobs_to_start default_jobs_to_start = qiime_config['jobs_to_start'] validate_and_set_jobs_to_start(params, jobs_to_start, default_jobs_to_start, parallel, option_parser) try: makedirs(output_dir) except OSError: if opts.force: pass else: # Since the analysis can take quite a while, I put this check # in to help users avoid overwriting previous output. print "Output directory already exists. Please choose "+\ "a different directory, or force overwrite with -f." exit(1) if print_only: command_handler = print_commands else: command_handler = call_commands_serially if verbose: status_update_callback = print_to_stdout else: status_update_callback = no_status_updates run_beta_diversity_through_plots(otu_table_fp=otu_table_fp, mapping_fp=mapping_fp, output_dir=output_dir, command_handler=command_handler, params=params, qiime_config=qiime_config, color_by_interesting_fields_only=not opts.color_by_all_fields, sampling_depth=seqs_per_sample, histogram_categories=histogram_categories, tree_fp=tree_fp, parallel=parallel, suppress_3d_plots=opts.suppress_3d_plots, suppress_2d_plots=opts.suppress_2d_plots, status_update_callback=status_update_callback)
def main(): option_parser, opts, args =\ parse_command_line_parameters(**script_info) verbose = opts.verbose input_fps = opts.input_fps refseqs_fp = opts.reference_fp output_dir = opts.output_dir verbose = opts.verbose print_only = False percent_subsample = opts.percent_subsample new_ref_set_id = opts.new_ref_set_id prefilter_refseqs_fp = opts.prefilter_refseqs_fp prefilter_percent_id = opts.prefilter_percent_id if prefilter_percent_id == 0.0: prefilter_percent_id = None parallel = opts.parallel # No longer checking that jobs_to_start > 2, but # commenting as we may change our minds about this. #if parallel: raise_error_on_parallel_unavailable() if opts.parameter_fp: try: parameter_f = open(opts.parameter_fp) except IOError: raise IOError,\ "Can't open parameters file (%s). Does it exist? Do you have read access?"\ % opts.parameter_fp params = parse_qiime_parameters(parameter_f) else: params = parse_qiime_parameters([]) # empty list returns empty defaultdict for now jobs_to_start = opts.jobs_to_start default_jobs_to_start = qiime_config['jobs_to_start'] validate_and_set_jobs_to_start(params, jobs_to_start, default_jobs_to_start, parallel, option_parser) try: makedirs(output_dir) except OSError: if opts.force: pass else: print "Output directory already exists. Please choose "+\ "a different directory, or force overwrite with -f." exit(1) if print_only: command_handler = print_commands else: command_handler = call_commands_serially if verbose: status_update_callback = print_to_stdout else: status_update_callback = no_status_updates if len(input_fps) == 1: pick_subsampled_open_referenence_otus( input_fp=input_fps[0], refseqs_fp=refseqs_fp, output_dir=output_dir, percent_subsample=percent_subsample, new_ref_set_id=new_ref_set_id, command_handler=command_handler, params=params, min_otu_size=opts.min_otu_size, qiime_config=qiime_config, prefilter_refseqs_fp=prefilter_refseqs_fp, prefilter_percent_id=prefilter_percent_id, step1_otu_map_fp=opts.step1_otu_map_fp, step1_failures_fasta_fp=opts.step1_failures_fasta_fp, parallel=parallel, suppress_step4=opts.suppress_step4, logger=None, status_update_callback=status_update_callback) else: iterative_pick_subsampled_open_referenence_otus( input_fps=input_fps, refseqs_fp=refseqs_fp, output_dir=output_dir, percent_subsample=percent_subsample, new_ref_set_id=new_ref_set_id, command_handler=command_handler, params=params, min_otu_size=opts.min_otu_size, qiime_config=qiime_config, prefilter_refseqs_fp=prefilter_refseqs_fp, prefilter_percent_id=prefilter_percent_id, step1_otu_map_fp=opts.step1_otu_map_fp, step1_failures_fasta_fp=opts.step1_failures_fasta_fp, parallel=parallel, suppress_step4=opts.suppress_step4, logger=None, status_update_callback=status_update_callback)
def main(): option_parser, opts, args = parse_command_line_parameters(**script_info) verbose = opts.verbose otu_table_fp = opts.otu_table_fp output_dir = opts.output_dir mapping_fp = opts.mapping_fp tree_fp = opts.tree_fp num_steps = opts.num_steps verbose = opts.verbose print_only = opts.print_only parallel = opts.parallel min_rare_depth = opts.min_rare_depth max_rare_depth = opts.max_rare_depth if opts.parameter_fp: try: parameter_f = open(opts.parameter_fp) except IOError: raise IOError,\ "Can't open parameters file (%s). Does it exist? Do you have read access?"\ % opts.parameter_fp params = parse_qiime_parameters(parameter_f) else: params = parse_qiime_parameters([]) # empty list returns empty defaultdict for now jobs_to_start = opts.jobs_to_start default_jobs_to_start = qiime_config['jobs_to_start'] validate_and_set_jobs_to_start(params, jobs_to_start, default_jobs_to_start, parallel, option_parser) try: makedirs(output_dir) except OSError: if opts.force: pass else: # Since the analysis can take quite a while, I put this check # in to help users avoid overwriting previous output. print "Output directory already exists. Please choose "+\ "a different directory, or force overwrite with -f." exit(1) if print_only: command_handler = print_commands else: command_handler = call_commands_serially if verbose: status_update_callback = print_to_stdout else: status_update_callback = no_status_updates run_qiime_alpha_rarefaction(otu_table_fp=otu_table_fp,\ mapping_fp=mapping_fp,\ output_dir=output_dir,\ command_handler=command_handler,\ params=params, qiime_config=qiime_config,\ tree_fp=tree_fp,\ num_steps=num_steps,\ parallel=parallel,\ min_rare_depth=min_rare_depth, max_rare_depth=max_rare_depth, status_update_callback=status_update_callback)
def main(): option_parser, opts, args = parse_command_line_parameters(**script_info) verbose = opts.verbose input_fnas = opts.input_fnas input_quals = opts.input_quals output_dir = opts.output_dir sampling_depth = opts.seqs_per_sample categories = opts.categories reference_tree_fp = opts.reference_tree_fp mapping_fp = opts.mapping_fp verbose = opts.verbose print_only = False # This feature is not currently supported suppress_split_libraries = opts.suppress_split_libraries even_sampling_keeps_all_samples = opts.even_sampling_keeps_all_samples parallel = opts.parallel if suppress_split_libraries and len(input_fnas) > 1: option_parser.error("Only a single fasta file can be passed with "+\ "--suppress_split_libraries") if opts.parameter_fp != None: try: parameter_f = open(opts.parameter_fp) except IOError: raise IOError,\ "Can't open parameters file (%s). Does it exist? Do you have read access?"\ % opts.parameter_fp params = parse_qiime_parameters(parameter_f) else: params = parse_qiime_parameters([]) jobs_to_start = opts.jobs_to_start default_jobs_to_start = qiime_config['jobs_to_start'] validate_and_set_jobs_to_start(params, jobs_to_start, default_jobs_to_start, parallel, option_parser) try: makedirs(output_dir) except OSError: if opts.force: pass else: # Since the analysis can take quite a while, I put this check # in to help users avoid overwriting previous output. print "Output directory already exists. Please choose "+\ "a different directory, or force overwrite with -f." exit(1) if print_only: command_handler = print_commands else: command_handler = call_commands_serially if verbose: status_update_callback = print_to_stdout else: status_update_callback = no_status_updates input_fnas_string = input_fnas[0] for inp_fna in input_fnas[1:]: input_fnas_string = input_fnas_string + ',' + inp_fna input_quals_string = None if input_quals: input_quals_string = input_quals[0] for inp_qual in input_quals[1:]: input_quals_string = input_quals_string + ',' + inp_qual run_core_qiime_analyses( fna_fps=input_fnas_string, qual_fps=input_quals_string, mapping_fp=mapping_fp, output_dir=output_dir, command_handler=command_handler, params=params, qiime_config=qiime_config, categories=categories, sampling_depth=sampling_depth, suppress_split_libraries=suppress_split_libraries, even_sampling_keeps_all_samples=even_sampling_keeps_all_samples, arare_min_rare_depth=10, arare_num_steps=10, reference_tree_fp=reference_tree_fp, parallel=parallel, status_update_callback=status_update_callback)
def main(): option_parser, opts, args = parse_command_line_parameters(**script_info) verbose = opts.verbose input_fnas = opts.input_fnas input_quals = opts.input_quals output_dir = opts.output_dir sampling_depth = opts.seqs_per_sample categories = opts.categories reference_tree_fp = opts.reference_tree_fp mapping_fp = opts.mapping_fp verbose = opts.verbose print_only = False # This feature is not currently supported suppress_split_libraries = opts.suppress_split_libraries even_sampling_keeps_all_samples = opts.even_sampling_keeps_all_samples parallel = opts.parallel if suppress_split_libraries and len(input_fnas) > 1: option_parser.error("Only a single fasta file can be passed with "+\ "--suppress_split_libraries") if opts.parameter_fp != None: try: parameter_f = open(opts.parameter_fp) except IOError: raise IOError,\ "Can't open parameters file (%s). Does it exist? Do you have read access?"\ % opts.parameter_fp params = parse_qiime_parameters(parameter_f) else: params = parse_qiime_parameters([]) jobs_to_start = opts.jobs_to_start default_jobs_to_start = qiime_config['jobs_to_start'] validate_and_set_jobs_to_start(params, jobs_to_start, default_jobs_to_start, parallel, option_parser) try: makedirs(output_dir) except OSError: if opts.force: pass else: # Since the analysis can take quite a while, I put this check # in to help users avoid overwriting previous output. print "Output directory already exists. Please choose "+\ "a different directory, or force overwrite with -f." exit(1) if print_only: command_handler = print_commands else: command_handler = call_commands_serially if verbose: status_update_callback = print_to_stdout else: status_update_callback = no_status_updates input_fnas_string = input_fnas[0] for inp_fna in input_fnas[1:]: input_fnas_string = input_fnas_string + ',' + inp_fna input_quals_string = None if input_quals: input_quals_string = input_quals[0] for inp_qual in input_quals[1:]: input_quals_string = input_quals_string + ',' + inp_qual run_core_qiime_analyses( fna_fps=input_fnas_string, qual_fps=input_quals_string, mapping_fp=mapping_fp, output_dir=output_dir, command_handler=command_handler, params=params, qiime_config=qiime_config, categories=categories, sampling_depth=sampling_depth, suppress_split_libraries=suppress_split_libraries, even_sampling_keeps_all_samples=even_sampling_keeps_all_samples, arare_min_rare_depth=10, arare_num_steps=10, reference_tree_fp=reference_tree_fp, parallel=parallel, status_update_callback=status_update_callback)
def main(): option_parser, opts, args =\ parse_command_line_parameters(**script_info) verbose = opts.verbose input_fps = opts.input_fps refseqs_fp = opts.reference_fp output_dir = opts.output_dir verbose = opts.verbose print_only = False percent_subsample = opts.percent_subsample new_ref_set_id = opts.new_ref_set_id prefilter_refseqs_fp = opts.prefilter_refseqs_fp prefilter_percent_id = opts.prefilter_percent_id if prefilter_percent_id == 0.0: prefilter_percent_id = None parallel = opts.parallel # No longer checking that jobs_to_start > 2, but # commenting as we may change our minds about this. #if parallel: raise_error_on_parallel_unavailable() if opts.parameter_fp: try: parameter_f = open(opts.parameter_fp) except IOError: raise IOError,\ "Can't open parameters file (%s). Does it exist? Do you have read access?"\ % opts.parameter_fp params = parse_qiime_parameters(parameter_f) else: params = parse_qiime_parameters([]) # empty list returns empty defaultdict for now jobs_to_start = opts.jobs_to_start default_jobs_to_start = qiime_config['jobs_to_start'] validate_and_set_jobs_to_start(params, jobs_to_start, default_jobs_to_start, parallel, option_parser) try: makedirs(output_dir) except OSError: if opts.force: pass else: option_parser.error("Output directory already exists. Please choose" " a different directory, or force overwrite with -f.") if print_only: command_handler = print_commands else: command_handler = call_commands_serially if verbose: status_update_callback = print_to_stdout else: status_update_callback = no_status_updates if len(input_fps) == 1: pick_subsampled_open_reference_otus(input_fp=input_fps[0], refseqs_fp=refseqs_fp, output_dir=output_dir, percent_subsample=percent_subsample, new_ref_set_id=new_ref_set_id, command_handler=command_handler, params=params, min_otu_size=opts.min_otu_size, run_assign_tax=not opts.suppress_taxonomy_assignment, run_align_and_tree=not opts.suppress_align_and_tree, qiime_config=qiime_config, prefilter_refseqs_fp=prefilter_refseqs_fp, prefilter_percent_id=prefilter_percent_id, step1_otu_map_fp=opts.step1_otu_map_fp, step1_failures_fasta_fp=opts.step1_failures_fasta_fp, parallel=parallel, suppress_step4=opts.suppress_step4, logger=None, status_update_callback=status_update_callback) else: iterative_pick_subsampled_open_reference_otus(input_fps=input_fps, refseqs_fp=refseqs_fp, output_dir=output_dir, percent_subsample=percent_subsample, new_ref_set_id=new_ref_set_id, command_handler=command_handler, params=params, min_otu_size=opts.min_otu_size, run_assign_tax=not opts.suppress_taxonomy_assignment, run_align_and_tree=not opts.suppress_align_and_tree, qiime_config=qiime_config, prefilter_refseqs_fp=prefilter_refseqs_fp, prefilter_percent_id=prefilter_percent_id, step1_otu_map_fp=opts.step1_otu_map_fp, step1_failures_fasta_fp=opts.step1_failures_fasta_fp, parallel=parallel, suppress_step4=opts.suppress_step4, logger=None, status_update_callback=status_update_callback)
def main(): option_parser, opts, args = parse_command_line_parameters(**script_info) verbose = opts.verbose input_fp = opts.input_fp reference_fp = opts.reference_fp taxonomy_fp = opts.taxonomy_fp output_dir = opts.output_dir verbose = opts.verbose print_only = opts.print_only parallel = opts.parallel # No longer checking that jobs_to_start > 2, but # commenting as we may change our minds about this. #if parallel: raise_error_on_parallel_unavailable() if opts.parameter_fp: try: parameter_f = open(opts.parameter_fp) except IOError: raise IOError,\ "Can't open parameters file (%s). Does it exist? Do you have read access?"\ % opts.parameter_fp params = parse_qiime_parameters(parameter_f) else: params = parse_qiime_parameters([]) # empty list returns empty defaultdict for now jobs_to_start = opts.jobs_to_start default_jobs_to_start = qiime_config['jobs_to_start'] validate_and_set_jobs_to_start(params, jobs_to_start, default_jobs_to_start, parallel, option_parser) try: makedirs(output_dir) except OSError: if opts.force: pass else: option_parser.error("Output directory already exists. Please choose" " a different directory, or force overwrite with -f.") if print_only: command_handler = print_commands else: command_handler = call_commands_serially if verbose: status_update_callback = print_to_stdout else: status_update_callback = no_status_updates run_pick_reference_otus_through_otu_table( input_fp, reference_fp, output_dir, taxonomy_fp, command_handler=command_handler, params=params, qiime_config=qiime_config, parallel=parallel, status_update_callback=status_update_callback)
def main(): option_parser, opts, args = parse_command_line_parameters(**script_info) verbose = opts.verbose input_fp = opts.input_fp reference_fp = opts.reference_fp taxonomy_fp = opts.taxonomy_fp output_dir = opts.output_dir verbose = opts.verbose print_only = opts.print_only parallel = opts.parallel # No longer checking that jobs_to_start > 2, but # commenting as we may change our minds about this. #if parallel: raise_error_on_parallel_unavailable() if opts.parameter_fp: try: parameter_f = open(opts.parameter_fp) except IOError: raise IOError,\ "Can't open parameters file (%s). Does it exist? Do you have read access?"\ % opts.parameter_fp params = parse_qiime_parameters(parameter_f) else: params = parse_qiime_parameters([]) # empty list returns empty defaultdict for now jobs_to_start = opts.jobs_to_start default_jobs_to_start = qiime_config['jobs_to_start'] validate_and_set_jobs_to_start(params, jobs_to_start, default_jobs_to_start, parallel, option_parser) try: makedirs(output_dir) except OSError: if opts.force: pass else: print "Output directory already exists. Please choose "+\ "a different directory, or force overwrite with -f." exit(1) if print_only: command_handler = print_commands else: command_handler = call_commands_serially if verbose: status_update_callback = print_to_stdout else: status_update_callback = no_status_updates run_pick_reference_otus_through_otu_table( input_fp, reference_fp, output_dir, taxonomy_fp, command_handler=command_handler, params=params, qiime_config=qiime_config, parallel=parallel, status_update_callback=status_update_callback)
def main(): option_parser, opts, args = parse_command_line_parameters(**script_info) verbose = opts.verbose input_biom_fp = opts.input_biom_fp output_dir = opts.output_dir categories = opts.categories if categories != None: categories = categories.split(',') tree_fp = opts.tree_fp mapping_fp = opts.mapping_fp verbose = opts.verbose parallel = opts.parallel sampling_depth = opts.sampling_depth nonphylogenetic_diversity = opts.nonphylogenetic_diversity if opts.parameter_fp != None: params = parse_qiime_parameters(open(opts.parameter_fp,'U')) else: params = parse_qiime_parameters([]) if nonphylogenetic_diversity: # if the user specified --nonphylogenetic_diversity and they # didn't define metrics in a parameters file, define them here if 'metrics' not in params['beta_diversity']: params['beta_diversity']['metrics'] = 'bray_curtis' if 'metrics' not in params['alpha_diversity']: params['alpha_diversity']['metrics'] = 'observed_species,chao1' jobs_to_start = opts.jobs_to_start default_jobs_to_start = qiime_config['jobs_to_start'] validate_and_set_jobs_to_start(params, jobs_to_start, default_jobs_to_start, parallel, option_parser) create_dir(output_dir,fail_on_exist=True) command_handler = call_commands_serially if verbose: status_update_callback = print_to_stdout else: status_update_callback = no_status_updates run_core_diversity_analyses( biom_fp=input_biom_fp, mapping_fp=mapping_fp, sampling_depth=sampling_depth, output_dir=output_dir, qiime_config=load_qiime_config(), command_handler=command_handler, tree_fp=tree_fp, params=params, categories=categories, arare_min_rare_depth=10, arare_num_steps=10, parallel=parallel, status_update_callback=status_update_callback)