def setUp(self): self.data = dict({ "0": "ab", "1": "abababa", "2": "abab", "3": "baba", "4": "ababaa", "5": "a", "6": "abababa", "7": "bab", "8": "babba" }) self.mapping = { "1": ["0", "2", "5", "6"], "3": [], "4": [], "8": ["7"] } self.test_map = {'1': ('a', 'b', 'c'), '2': ('d', 'e', 'f')} # realistic test file self.tiny_test = get_qiime_project_dir() +\ "/qiime/support_files/denoiser/TestData/tiny_test.sff.txt" # set up test file open("/tmp/denoiser_utils_dummy.tmp", "w") self.files_to_remove = ["/tmp/denoiser_utils_dummy.tmp"] self.tmpdir = ""
def setUp(self): """define some top-level data""" qiime_dir = get_qiime_project_dir() self.key = 'qiime_test' self.project_id = 'qiime_test' self.sample_id = 'qiime_sample1' self.params = [('key', self.key), ('sample', self.sample_id), ('project', self.project_id)] test_dir = path.dirname(path.abspath(__file__)) self.seq_file = path.join(test_dir, 'test_support_files', 'qiime_tutorial_split_lib_seqs_subset.fna') self.output_dir = mkdtemp() self.sample_file = [('file', 'qiime_test.fna', fasta_example)] self._paths_to_clean_up = [] self._dirs_to_clean_up = [] # make the webfile directory try: mkdir(self.output_dir) except OSError: pass # define directory to clean up self._dirs_to_clean_up = [self.output_dir]
def setUp(self): self.test_map = {'1': ('a', 'b', 'c'), '2': ('d', 'e', 'f')} self.labels = [ 'Uneven1_1 FV9NWLF.01.EVGI8 orig_bc=TCGAGCGAATCT new_bc=TCGAGCGAATCT bc_diffs=0', 'Even1_2 FV9NWLF.01.DROG9 orig_bc=TAGTTGCGAGTC new_bc=TAGTTGCGAGTC bc_diffs=0', 'Uneven1_3 FV9NWLF.01.DZTVJ orig_bc=TCGAGCGAATCT new_bc=TCGAGCGAATCT bc_diffs=0', 'Uneven3_4 FV9NWLF.01.DI8SC orig_bc=TCTGCTAGATGT new_bc=TCTGCTAGATGT bc_diffs=0', 'Even3_5 FV9NWLF.01.DW381 orig_bc=TCATCGCGATAT new_bc=TCATCGCGATAT bc_diffs=0', 'Even3_6 FV9NWLF01DP96S orig_bc=TCATCGCGATAT new_bc=TCATCGCGATAT bc_diffs=0', 'Uneven2_7 FV9NWLF01BOY7E orig_bc=TCGTTCACATGA new_bc=TCGTTCACATGA bc_diffs=0', 'Even1_8 FV9NWLF01A0OG1 orig_bc=TAGTTGCGAGTC new_bc=TAGTTGCGAGTC bc_diffs=0', 'Even2_9 FV9NWLF01DJZFF orig_bc=TCACGATTAGCG new_bc=TCACGATTAGCG bc_diffs=0', 'Uneven1_10 FV9NWLF01D4LTB orig_bc=TCGAGCGAATCT new_bc=TCGAGCGAATCT bc_diffs=0'] self.invalid_sequence_identifiers = [ ['Uneven1_1 FV9NWLF_01_EVGI8 orig_bc=TCGAGCGAATCT new_bc=TCGAGCGAATCT bc_diffs=0'], ['Even1_2 FV9NWLF_01_DROG9 orig_bc=TAGTTGCGAGTC new_bc=TAGTTGCGAGTC bc_diffs=0'], ['Even1_8 FV9NWLF-01-A0OG1 orig_bc=TAGTTGCGAGTC new_bc=TAGTTGCGAGTC bc_diffs=0'], ['Even2_9 FV9NWLF_01-DJZFF orig_bc=TCACGATTAGCG new_bc=TCACGATTAGCG bc_diffs=0'], ['Uneven1_10 FV9NWLF_01.D4LTB orig_bc=TCGAGCGAATCT new_bc=TCGAGCGAATCT bc_diffs=0']] self.tiny_test = get_qiime_project_dir() +\ "/qiime/support_files/denoiser/TestData/tiny_test.sff.txt"
def setUp(self): """define some top-level data""" qiime_dir = get_qiime_project_dir() self.key = 'qiime_test' self.project_id = 'qiime_test' self.sample_id = 'qiime_sample1' self.params=[('key', self.key), ('sample', self.sample_id), \ ('project', self.project_id)] test_dir = path.dirname(path.abspath(__file__)) self.seq_file=path.join(test_dir,'test_support_files',\ 'qiime_tutorial_split_lib_seqs_subset.fna') self.output_dir = get_random_directory_name(output_dir='/tmp/') self.sample_file = [('file', 'qiime_test.fna', fasta_example)] self._paths_to_clean_up = [] self._dirs_to_clean_up = [] #make the webfile directory try: mkdir(self.output_dir) except OSError: pass #define directory to clean up self._dirs_to_clean_up = [self.output_dir]
def test_for_obsolete_values(self): """local qiime_config has no extra params""" qiime_project_dir = get_qiime_project_dir() orig_config = parse_qiime_config_file(open(qiime_project_dir + '/qiime/support_files/qiime_config')) #check the env qiime_config qiime_config_env_filepath = getenv('QIIME_CONFIG_FP') if qiime_config_env_filepath: qiime_config_via_env= parse_qiime_config_file(open(qiime_config_env_filepath)) extra_vals = [] for key in qiime_config_via_env: if key not in orig_config: extra_vals.append(key) if extra_vals: self.fail("The qiime_config file set via QIIME_CONFIG_FP"+ "enviroment variable contains obsolete parameters:\n"+ ", ".join(extra_vals)) # check the qiime_config in $HOME/.qiime_config home_dir = getenv('HOME') if (exists(home_dir+"/.qiime_config")): qiime_config_home = parse_qiime_config_file(open(home_dir+"/.qiime_config")) extra_vals = [] for key in qiime_config_home: if key not in orig_config: extra_vals.append(key) if extra_vals: self.fail("The .qiime_config in your HOME contains obsolete "+ "parameters:\n" + ", ".join(extra_vals))
def setUp(self): self.test_map = {"1": ("a", "b", "c"), "2": ("d", "e", "f")} self.labels = [ "Uneven1_1 FV9NWLF.01.EVGI8 orig_bc=TCGAGCGAATCT new_bc=TCGAGCGAATCT bc_diffs=0", "Even1_2 FV9NWLF.01.DROG9 orig_bc=TAGTTGCGAGTC new_bc=TAGTTGCGAGTC bc_diffs=0", "Uneven1_3 FV9NWLF.01.DZTVJ orig_bc=TCGAGCGAATCT new_bc=TCGAGCGAATCT bc_diffs=0", "Uneven3_4 FV9NWLF.01.DI8SC orig_bc=TCTGCTAGATGT new_bc=TCTGCTAGATGT bc_diffs=0", "Even3_5 FV9NWLF.01.DW381 orig_bc=TCATCGCGATAT new_bc=TCATCGCGATAT bc_diffs=0", "Even3_6 FV9NWLF01DP96S orig_bc=TCATCGCGATAT new_bc=TCATCGCGATAT bc_diffs=0", "Uneven2_7 FV9NWLF01BOY7E orig_bc=TCGTTCACATGA new_bc=TCGTTCACATGA bc_diffs=0", "Even1_8 FV9NWLF01A0OG1 orig_bc=TAGTTGCGAGTC new_bc=TAGTTGCGAGTC bc_diffs=0", "Even2_9 FV9NWLF01DJZFF orig_bc=TCACGATTAGCG new_bc=TCACGATTAGCG bc_diffs=0", "Uneven1_10 FV9NWLF01D4LTB orig_bc=TCGAGCGAATCT new_bc=TCGAGCGAATCT bc_diffs=0", ] self.invalid_sequence_identifiers = [ ["Uneven1_1 FV9NWLF_01_EVGI8 orig_bc=TCGAGCGAATCT new_bc=TCGAGCGAATCT bc_diffs=0"], ["Even1_2 FV9NWLF_01_DROG9 orig_bc=TAGTTGCGAGTC new_bc=TAGTTGCGAGTC bc_diffs=0"], ["Even1_8 FV9NWLF-01-A0OG1 orig_bc=TAGTTGCGAGTC new_bc=TAGTTGCGAGTC bc_diffs=0"], ["Even2_9 FV9NWLF_01-DJZFF orig_bc=TCACGATTAGCG new_bc=TCACGATTAGCG bc_diffs=0"], ["Uneven1_10 FV9NWLF_01.D4LTB orig_bc=TCGAGCGAATCT new_bc=TCGAGCGAATCT bc_diffs=0"], ] self.tiny_test = get_qiime_project_dir() + "/qiime/support_files/denoiser/TestData/tiny_test.sff.txt"
def test_for_unrecognized_values(self): """qiime_config has no extra values""" error_msg_fragment = (" contains unrecognized values:\n%s\nYou can " "safely remove these values from your QIIME " "config file as they will be ignored by QIIME.") qiime_project_dir = get_qiime_project_dir() orig_config = parse_qiime_config_file( open(qiime_project_dir + '/qiime/support_files/qiime_config')) # check the env qiime_config qiime_config_env_filepath = getenv('QIIME_CONFIG_FP') if qiime_config_env_filepath: qiime_config_via_env = parse_qiime_config_file( open(qiime_config_env_filepath)) extra_vals = [] for key in qiime_config_via_env: if key not in orig_config: extra_vals.append(key) if extra_vals: self.fail("The QIIME config file set via the QIIME_CONFIG_FP " "environment variable" + error_msg_fragment % ", ".join(extra_vals)) # check the qiime_config in $HOME/.qiime_config home_dir = getenv('HOME') if (exists(home_dir + "/.qiime_config")): qiime_config_home = parse_qiime_config_file( open(home_dir + "/.qiime_config")) extra_vals = [] for key in qiime_config_home: if key not in orig_config: extra_vals.append(key) if extra_vals: self.fail("The .qiime_config in your HOME" + error_msg_fragment % ", ".join(extra_vals))
def test_for_unrecognized_values(self): """qiime_config has no extra values""" error_msg_fragment = (" contains unrecognized values:\n%s\nYou can " "safely remove these values from your QIIME " "config file as they will be ignored by QIIME.") qiime_project_dir = get_qiime_project_dir() orig_config = parse_qiime_config_file(open(qiime_project_dir + '/qiime/support_files/qiime_config')) # check the env qiime_config qiime_config_env_filepath = getenv('QIIME_CONFIG_FP') if qiime_config_env_filepath: qiime_config_via_env = parse_qiime_config_file( open(qiime_config_env_filepath)) extra_vals = [] for key in qiime_config_via_env: if key not in orig_config: extra_vals.append(key) if extra_vals: self.fail("The QIIME config file set via the QIIME_CONFIG_FP " "environment variable" + error_msg_fragment % ", ".join(extra_vals)) # check the qiime_config in $HOME/.qiime_config home_dir = getenv('HOME') if (exists(home_dir + "/.qiime_config")): qiime_config_home = parse_qiime_config_file( open(home_dir + "/.qiime_config")) extra_vals = [] for key in qiime_config_home: if key not in orig_config: extra_vals.append(key) if extra_vals: self.fail("The .qiime_config in your HOME" + error_msg_fragment % ", ".join(extra_vals))
def test_support_files_available(self): """support_files are available """ # check that the qiime/support_files directory exists support_files_dir = \ join(get_qiime_project_dir(),'qiime','support_files') self.assertTrue(exists(support_files_dir)) # check that a file in qiime/support_files exists default_qiime_config_fp = join(support_files_dir,'qiime_config') self.assertTrue(exists(default_qiime_config_fp))
def test_support_files_available(self): """support_files are available """ # check that the qiime/support_files directory exists support_files_dir = \ join(get_qiime_project_dir(),'qiime','support_files') self.assertTrue(exists(support_files_dir)) # check that a file in qiime/support_files exists default_qiime_config_fp = join(support_files_dir, 'qiime_config') self.assertTrue(exists(default_qiime_config_fp))
def test_generate_heatmap_plots(self): """generate_heatmap_plots: create default output files""" # create directories and move js files to verify everything works # in the script file dir_path = join(self.output_dir, 'test') create_dir(dir_path) js_dir_path = join(dir_path, 'js') create_dir(js_dir_path) self._folders_to_cleanup.append(dir_path) qiime_dir = get_qiime_project_dir() js_path = join(qiime_dir, 'qiime/support_files/js') shutil.copyfile(join(js_path, 'overlib.js'), join(js_dir_path, 'overlib.js')) shutil.copyfile(join(js_path, 'otu_count_display.js'), join(js_dir_path, 'otu_count_display.js')) shutil.copyfile(join(js_path, 'jquery.js'), join(js_dir_path, 'jquery.js')) shutil.copyfile(join(js_path, 'jquery.tablednd_0_5.js'), join(js_dir_path, 'jquery.tablednd_0_5.js')) # generate otu_table object orig_data = array([[0, 1, 2], [1000, 0, 0]]) orig_otu_table = table_factory(orig_data, ['Sample1', 'Sample2', 'Sample3'], ['OTU1', 'OTU2'], [None, None, None], [{ "taxonomy": ["Bacteria"] }, { "taxonomy": ["Archaea"] }]) # put in an OTU sort order and sample order otu_sort = ['OTU2', 'OTU1'] sample_sort = ['Sample2', 'Sample1', 'Sample3'] num_otu_hits = 3 # generate test files generate_heatmap_plots(num_otu_hits, orig_otu_table, otu_sort, sample_sort, dir_path, js_dir_path, 'test', fractional_values=False) self.assertEqual( open(join(js_dir_path, 'test.js'), 'U').read(), exp_js_output_file)
def test_generate_heatmap_plots(self): """generate_heatmap_plots: create default output files""" # create directories and move js files to verify everything works # in the script file dir_path = join(self.output_dir, 'test') create_dir(dir_path) js_dir_path = join(dir_path, 'js') create_dir(js_dir_path) self._folders_to_cleanup.append(dir_path) qiime_dir = get_qiime_project_dir() js_path = join(qiime_dir, 'qiime/support_files/js') shutil.copyfile(join(js_path, 'overlib.js'), join(js_dir_path, 'overlib.js')) shutil.copyfile(join(js_path, 'otu_count_display.js'), join(js_dir_path, 'otu_count_display.js')) shutil.copyfile(join(js_path, 'jquery.js'), join(js_dir_path, 'jquery.js')) shutil.copyfile(join(js_path, 'jquery.tablednd_0_5.js'), join(js_dir_path, 'jquery.tablednd_0_5.js')) # generate otu_table object orig_data = array([[0, 1, 2], [1000, 0, 0]]) orig_otu_table = table_factory(orig_data, ['Sample1', 'Sample2', 'Sample3'], ['OTU1', 'OTU2'], [None, None, None], [{"taxonomy": ["Bacteria"]}, {"taxonomy": ["Archaea"]}]) # put in an OTU sort order and sample order otu_sort = ['OTU2', 'OTU1'] sample_sort = ['Sample2', 'Sample1', 'Sample3'] num_otu_hits = 3 # generate test files generate_heatmap_plots(num_otu_hits, orig_otu_table, otu_sort, sample_sort, dir_path, js_dir_path, 'test', fractional_values=False) self.assertEqual(open(join(js_dir_path, 'test.js'), 'U').read(), exp_js_output_file)
def setUp(self): self.data = dict({"0": "ab", "1":"abababa", "2":"abab", "3":"baba", "4":"ababaa","5":"a", "6":"abababa", "7":"bab", "8":"babba"}) self.mapping = {"1":["0","2","5","6"], "3":[], "4":[], "8":["7"]} self.test_map = {'1': ('a','b','c'), '2': ('d','e','f')} #realistic test file self.tiny_test = get_qiime_project_dir() +\ "/qiime/support_files/denoiser/TestData/tiny_test.sff.txt" #set up test file open("/tmp/denoiser_utils_dummy.tmp","w") self.files_to_remove=["/tmp/denoiser_utils_dummy.tmp"] self.tmpdir=""
def test_get_qiime_project_dir(self): """getting the qiime project directory functions as expected """ # Do an explicit check on whether the file system containing # the current file is case insensitive. This is in response # to SF bug #2945548, where this test would fail on certain # unusual circumstances on case-insensitive file systems # because the case of abspath(__file__) was inconsistent. # (If you don't believe this, set case_insensitive_filesystem # to False, and rename your top-level Qiime directory as # qiime on OS X. That sould cause this test to fail as # actual will be path/to/qiime and expected will be # path/to/Qiime.) Note that we don't need to change anything # in the get_qiime_project_dir() function as if the # file system is case insenstive, the case of the returned # string is irrelevant. case_insensitive_filesystem = \ exists(__file__.upper()) and exists(__file__.lower()) actual = get_qiime_project_dir() # I base the expected here off the imported location of # qiime/util.py here, to handle cases where either the user has # Qiime in their PYTHONPATH, or when they've installed it with # setup.py. # If util.py moves this test will fail -- that # is what we want in this case, as the get_qiime_project_dir() # function would need to be modified. import qiime.util util_py_filepath = abspath(abspath(qiime.util.__file__)) expected = dirname(dirname(util_py_filepath)) if case_insensitive_filesystem: # make both lowercase if the file system is case insensitive actual = actual.lower() expected = expected.lower() self.assertEqual(actual,expected)
def test_get_qiime_project_dir(self): """getting the qiime project directory functions as expected """ # Do an explicit check on whether the file system containing # the current file is case insensitive. This is in response # to SF bug #2945548, where this test would fail on certain # unusual circumstances on case-insensitive file systems # because the case of abspath(__file__) was inconsistent. # (If you don't believe this, set case_insensitive_filesystem # to False, and rename your top-level Qiime directory as # qiime on OS X. That sould cause this test to fail as # actual will be path/to/qiime and expected will be # path/to/Qiime.) Note that we don't need to change anything # in the get_qiime_project_dir() function as if the # file system is case insenstive, the case of the returned # string is irrelevant. case_insensitive_filesystem = \ exists(__file__.upper()) and exists(__file__.lower()) actual = get_qiime_project_dir() # I base the expected here off the imported location of # qiime/util.py here, to handle cases where either the user has # Qiime in their PYTHONPATH, or when they've installed it with # setup.py. # If util.py moves this test will fail -- that # is what we want in this case, as the get_qiime_project_dir() # function would need to be modified. import qiime.util util_py_filepath = abspath(abspath(qiime.util.__file__)) expected = dirname(dirname(util_py_filepath)) if case_insensitive_filesystem: # make both lowercase if the file system is case insensitive actual = actual.lower() expected = expected.lower() self.assertEqual(actual, expected)
def setUp(self): """define some top-level data""" qiime_dir = get_qiime_project_dir() self.key = "qiime_test" self.project_id = "qiime_test" self.sample_id = "qiime_sample1" self.params = [("key", self.key), ("sample", self.sample_id), ("project", self.project_id)] test_dir = path.dirname(path.abspath(__file__)) self.seq_file = path.join(test_dir, "test_support_files", "qiime_tutorial_split_lib_seqs_subset.fna") self.output_dir = get_random_directory_name(output_dir="/tmp/") self.sample_file = [("file", "qiime_test.fna", fasta_example)] self._paths_to_clean_up = [] self._dirs_to_clean_up = [] # make the webfile directory try: mkdir(self.output_dir) except OSError: pass # define directory to clean up self._dirs_to_clean_up = [self.output_dir]
def main(): option_parser, opts, args = parse_command_line_parameters(**script_info) prefs, data, background_color, label_color, ball_scale, arrow_colors = \ sample_color_prefs_and_map_data_from_options(opts) if len(opts.coord_fnames) < 2 and opts.edges_file is None: option_parser.error('Please provide at least two ' +\ 'coordinate files or a custom edges file') #Open and get coord data (for multiple coords files) coord_files = opts.coord_fnames coord_files_valid = validate_coord_files(coord_files) if not coord_files_valid: option_parser.error('Every line of every coord file must ' +\ 'have the same number of columns.') num_coord_files = len(coord_files) data['edges'], data['coord'] = \ get_multiple_coords(coord_files, opts.edges_file, opts.serial) # if the edges file wasn't supplied, we appended _i to each file's samples # therefore we now add duplicated samples with _0, _1,... to mapping file if opts.edges_file is None: newmap = [data['map'][0]] for i in xrange(len(coord_files)): for sample in data['map'][1:]: newsample = ['%s_%d' %(sample[0],i)] newsample.extend(sample[1:]) newmap.append(newsample) data['map'] = newmap # remove any samples not present in mapping file remove_unmapped_samples(data['map'],data['coord'],data['edges']) if(len(data['coord'][1]) == 0): raise ValueError, '\n\nError: None of the sample IDs in the coordinates files were present in the mapping file.\n' # process custom axes, if present. custom_axes = None if opts.custom_axes: custom_axes = process_custom_axes(opts.custom_axes) get_custom_coords(custom_axes, data['map'], data['coord']) remove_nans(data['coord']) scale_custom_coords(custom_axes,data['coord']) # Generate random output file name and create directories if opts.output_dir: create_dir(opts.output_dir) dir_path = opts.output_dir else: dir_path='./' qiime_dir=get_qiime_project_dir() jar_path=os.path.join(qiime_dir,'qiime/support_files/jar/') data_dir_path = get_random_directory_name(output_dir=dir_path, return_absolute_path=False) try: os.mkdir(data_dir_path) except OSError: pass jar_dir_path = os.path.join(dir_path,'jar') try: os.mkdir(jar_dir_path) except OSError: pass shutil.copyfile(os.path.join(jar_path,'king.jar'), os.path.join(jar_dir_path,'king.jar')) filepath=coord_files[0] filename=filepath.strip().split('/')[-1] try: action = generate_3d_plots except NameError: action = None #Place this outside try/except so we don't mask NameError in action if action: generate_3d_plots(prefs, data, custom_axes, background_color, label_color, dir_path, data_dir_path, filename, ball_scale=ball_scale, arrow_colors=arrow_colors, user_supplied_edges=not(opts.edges_file is None))
def main(): option_parser, opts, args = parse_command_line_parameters(**script_info) matplotlib_version = re.split("[^\d]", matplotlib.__version__) matplotlib_version_info = tuple([int(i) for i in matplotlib_version if \ i.isdigit()]) if matplotlib_version_info != (1,1,0): print "This code was only tested with Matplotlib-1.1.0" data = {} prefs,data,background_color,label_color,ball_scale, arrow_colors= \ sample_color_prefs_and_map_data_from_options(opts) data['ellipsoid_method']=opts.ellipsoid_method if 0.00 <= opts.ellipsoid_opacity <= 1.00: data['alpha']=opts.ellipsoid_opacity else: raise ValueError, 'The opacity must be a value between 0 and 1!' #Open and get coord data if os.path.isdir(opts.coord_fname) and opts.master_pcoa: data['coord'],data['support_pcoas'] = load_pcoa_files(opts.coord_fname) data['coord']=get_coord(opts.master_pcoa) elif os.path.isdir(opts.coord_fname): data['coord'],data['support_pcoas'] = load_pcoa_files(opts.coord_fname) else: data['coord'] = get_coord(opts.coord_fname) filepath=opts.coord_fname basename,extension=os.path.splitext(filepath) filename='%s_2D_PCoA_plots' % (basename) qiime_dir=get_qiime_project_dir() js_path=os.path.join(qiime_dir,'qiime','support_files','js') if opts.output_dir: if os.path.exists(opts.output_dir): dir_path=opts.output_dir else: try: os.mkdir(opts.output_dir) dir_path=opts.output_dir except OSError: pass else: dir_path='./' html_dir_path=dir_path data_dir_path = get_random_directory_name(output_dir=dir_path) try: os.mkdir(data_dir_path) except OSError: pass js_dir_path = os.path.join(html_dir_path,'js') try: os.mkdir(js_dir_path) except OSError: pass shutil.copyfile(os.path.join(js_path,'overlib.js'), \ os.path.join(js_dir_path,'overlib.js')) try: action = generate_2d_plots except NameError: action = None #Place this outside try/except so we don't mask NameError in action if action: action(prefs,data,html_dir_path,data_dir_path,filename,background_color, label_color,opts.scree)
def main(): option_parser, opts, args = parse_command_line_parameters(**script_info) # get QIIME directory qiime_dir = get_qiime_project_dir() if not opts.counts_fname: option_parser.error("A list of input files must be specified") # get color preferences color_prefs, color_data, background_color, label_color = taxonomy_color_prefs_and_map_data_from_options(opts) colorby = opts.colorby if colorby == None: colorby = [] for c in color_data["counts"].values(): colorby.extend(c[0]) else: colorby = colorby.strip().strip("'").split(",") counts_fname = opts.counts_fname # Define labels to use labels = opts.labels if not opts.labels: new_labels = [] # create an empty list since the user didn't specify labels for i in counts_fname: new_labels.append("") labels = ",".join(new_labels) data = [(label, f.strip()) for f, label in zip(counts_fname, labels.split(","))] filepath = data[0][1] filename = filepath.strip().rpartition("/")[0] num_categories = int(opts.num_categories) if num_categories <= 0: raise ValueError, "The number of categories has to be greater than 0!" # create directory path dir_path = os.getcwd() if opts.dir_path: dir_path = opts.dir_path try: create_dir(opts.dir_path) except OSError: pass # make javascript output directory javascript_path = os.path.join(dir_path, "js") try: create_dir(javascript_path) except OSError: # raised if dir exists pass # make raw_data output directory raw_data_path = os.path.join(dir_path, "raw_data") try: create_dir(raw_data_path) except OSError: # raised if dir exists pass # move javascript file to javascript output directory shutil.copyfile( os.path.join(qiime_dir, "qiime", "support_files", "js/overlib.js"), os.path.join(javascript_path, "overlib.js") ) # make css output directory css_path = os.path.join(dir_path, "css") try: create_dir(css_path) except OSError: # raised if dir exists pass # move css file to css output directory shutil.copyfile( os.path.join(qiime_dir, "qiime", "support_files", "css/qiime_style.css"), os.path.join(css_path, "qiime_style.css"), ) # verify all parameters are valid plot_width = float(opts.x_width) if plot_width <= 0: raise ValueError, "The width of the plot has to be greater than 0!" plot_height = float(opts.y_height) if plot_height <= 0: raise ValueError, "The height of the plot has to be greater than 0!" bar_width = float(opts.bar_width) if bar_width <= 0 or bar_width > 1: raise ValueError, "The bar width of the plot has to be between 0 and 1!" dpi = float(opts.dpi) if dpi <= 0: raise ValueError, "The dpi of the plot has to be greater than 0!" resize_nth_label = int(opts.resize_nth_label) if resize_nth_label < 0: raise ValueError, "The resize_nth_label of the plot has to be greater\ than 0!" generate_image_type = opts.type_of_file label_type = opts.label_type include_html_legend = opts.include_html_legend include_html_counts = opts.include_html_counts plots_to_make = opts.chart_type for chart_type in plots_to_make: # make pie chart output path charts_path = os.path.join(dir_path, "charts") try: create_dir(charts_path) except OSError: # raised if dir exists pass make_all_charts( data, dir_path, filename, num_categories, colorby, args, color_data, color_prefs, background_color, label_color, chart_type, generate_image_type, plot_width, plot_height, bar_width, dpi, resize_nth_label, label_type, include_html_legend, include_html_counts, )
def get_denoiser_data_dir(): """Return the directory of the denoiser error profiles. """ dir = get_qiime_project_dir() + "/qiime/support_files/denoiser/Data/" return dir
def main(): print "\nWarning: make_3d_plots.py is being deprecated in favor of make_emperor.py, and will no longer be available in QIIME 1.8.0-dev.\n" option_parser, opts, args = parse_command_line_parameters(**script_info) prefs, data, background_color, label_color, ball_scale, arrow_colors= \ sample_color_prefs_and_map_data_from_options(opts) plot_scaled= 'scaled' in opts.scaling_method plot_unscaled= 'unscaled' in opts.scaling_method if opts.output_format == 'invue': # validating the number of points for interpolation if (opts.interpolation_points<0): option_parser.error('The --interpolation_points should be ' +\ 'greater or equal to 0.') # make sure that coord file has internally consistent # of columns coord_files_valid = validate_coord_files(opts.coord_fname) if not coord_files_valid: option_parser.error('Every line of every coord file must ' +\ 'have the same number of columns.') #Open and get coord data data['coord'] = get_coord(opts.coord_fname, opts.ellipsoid_method) # remove any samples not present in mapping file remove_unmapped_samples(data['map'],data['coord']) # if no samples overlapped between mapping file and otu table, exit if len(data['coord'][0]) == 0: print "\nError: OTU table and mapping file had no samples in common\n" exit(1) if opts.output_dir: create_dir(opts.output_dir,False) dir_path=opts.output_dir else: dir_path='./' filepath=opts.coord_fname if os.path.isdir(filepath): coord_files = [fname for fname in os.listdir(filepath) if not \ fname.startswith('.')] filename = os.path.split(coord_files[0])[-1] else: filename = os.path.split(filepath)[-1] generate_3d_plots_invue(prefs, data, dir_path, filename, \ opts.interpolation_points, opts.polyhedron_points, \ opts.polyhedron_offset) #finish script return # Potential conflicts if not opts.custom_axes is None and os.path.isdir(opts.coord_fname): # can't do averaged pcoa plots _and_ custom axes in the same plot option_parser.error("Please supply either custom axes or multiple coordinate \ files, but not both.") # check that smoothness is an integer between 0 and 3 try: ellipsoid_smoothness = int(opts.ellipsoid_smoothness) except: option_parser.error("Please supply an integer ellipsoid smoothness \ value.") if ellipsoid_smoothness < 0 or ellipsoid_smoothness > 3: option_parser.error("Please supply an ellipsoid smoothness value \ between 0 and 3.") # check that opacity is a float between 0 and 1 try: ellipsoid_alpha = float(opts.ellipsoid_opacity) except: option_parser.error("Please supply a number for ellipsoid opacity.") if ellipsoid_alpha < 0 or ellipsoid_alpha > 1: option_parser.error("Please supply an ellipsoid opacity value \ between 0 and 1.") # check that ellipsoid method is valid ellipsoid_methods = ['IQR','sdev'] if not opts.ellipsoid_method in ellipsoid_methods: option_parser.error("Please supply a valid ellipsoid method. \ Valid methods are: " + ', '.join(ellipsoid_methods) + ".") # gather ellipsoid drawing preferences ellipsoid_prefs = {} ellipsoid_prefs["smoothness"] = ellipsoid_smoothness ellipsoid_prefs["alpha"] = ellipsoid_alpha # make sure that coord file has internally consistent # of columns coord_files_valid = validate_coord_files(opts.coord_fname) if not coord_files_valid: option_parser.error('Every line of every coord file must ' +\ 'have the same number of columns.') #Open and get coord data data['coord'] = get_coord(opts.coord_fname, opts.ellipsoid_method) # remove any samples not present in mapping file remove_unmapped_samples(data['map'],data['coord']) # if no samples overlapped between mapping file and otu table, exit if len(data['coord'][0]) == 0: print "\nError: OTU table and mapping file had no samples in common\n" exit(1) # process custom axes, if present. custom_axes = None if opts.custom_axes: custom_axes = process_custom_axes(opts.custom_axes) get_custom_coords(custom_axes, data['map'], data['coord']) remove_nans(data['coord']) scale_custom_coords(custom_axes,data['coord']) # process vectors if requested if opts.add_vectors: add_vectors={} add_vectors['vectors'] = opts.add_vectors.split(',') add_vectors['weight_by_vector'] = opts.weight_by_vector if len(add_vectors)>3: raise ValueError, 'You must add maximum 3 columns but %s' % opts.add_vectors # Validating Vectors values if opts.vectors_algorithm: axes_number = len(data['coord'][1][1]) if opts.vectors_axes<0 or opts.vectors_axes>axes_number: raise ValueError, 'vectors_algorithm should be between 0 and the max number' +\ 'of samples/pcoa-axes: %d' % len(data['coord'][1][1]) if opts.vectors_axes == 0: opts.vectors_axes = axes_number add_vectors['vectors_axes'] = opts.vectors_axes valid_chars = '_.abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789' for c in opts.vectors_path: if c not in valid_chars: raise ValueError, 'vectors_path (%s) has invalid chars' % opts.vectors_path add_vectors['vectors_output'] = {} add_vectors['vectors_algorithm']=opts.vectors_algorithm add_vectors['eigvals'] = data['coord'][3] add_vectors['window_size'] = None # checks specific for the modified first difference algorithm if add_vectors['vectors_algorithm'] == 'wdiff': try: add_vectors['window_size'] = int(opts.window_size) except TypeError: raise TypeError, 'Specify --window_size as an integer' # sanity check as the value can only be greater or equal to one if add_vectors['window_size'] < 1: raise ValueError, 'The value of window_size is invalid, '+\ 'the value must be greater than zero, not %d' % add_vectors['window_size'] else: add_vectors['vectors_algorithm'] = None add_vectors['vectors_path'] = opts.vectors_path else: add_vectors = None if opts.taxa_fname != None: # get taxonomy counts # get list of sample_ids that haven't been removed sample_ids = data['coord'][0] # get taxa summaries for all sample_ids lineages, taxa_counts = get_taxa(opts.taxa_fname, sample_ids) data['taxa'] = {} data['taxa']['lineages'] = lineages data['taxa']['counts'] = taxa_counts # get average relative abundance of taxa data['taxa']['prevalence'] = get_taxa_prevalence(data['taxa']['counts']) # get coordinates of taxa (weighted mean of sample scores) data['taxa']['coord'] = get_taxa_coords(data['taxa']['counts'], data['coord'][1]) # trim results, do NOT change order # check: https://github.com/qiime/qiime/issues/677 remove_rare_taxa(data['taxa'],nkeep=opts.n_taxa_keep) # write taxa coords if requested if not opts.biplot_output_file is None: output = make_biplot_scores_output(data['taxa']) fout = open(opts.biplot_output_file,'w') fout.write('\n'.join(output)) fout.close() if opts.output_dir: create_dir(opts.output_dir,False) dir_path=opts.output_dir else: dir_path='./' qiime_dir=get_qiime_project_dir() jar_path=os.path.join(qiime_dir,'qiime/support_files/jar/') data_dir_path = get_random_directory_name(output_dir=dir_path, return_absolute_path=False) try: os.mkdir(data_dir_path) except OSError: pass data_file_path=data_dir_path jar_dir_path = os.path.join(dir_path,'jar') try: os.mkdir(jar_dir_path) except OSError: pass shutil.copyfile(os.path.join(jar_path,'king.jar'), os.path.join(jar_dir_path,'king.jar')) filepath=opts.coord_fname if os.path.isdir(filepath): coord_files = [fname for fname in os.listdir(filepath) if not \ fname.startswith('.')] filename = os.path.split(coord_files[0])[-1] else: filename = os.path.split(filepath)[-1] try: action = generate_3d_plots except NameError: action = None #Place this outside try/except so we don't mask NameError in action if action: action(prefs,data,custom_axes,background_color,label_color,dir_path, \ data_file_path,filename,ellipsoid_prefs=ellipsoid_prefs, \ add_vectors=add_vectors, plot_scaled=plot_scaled, \ plot_unscaled=plot_unscaled)
def main(): print "\nWarning: compare_3d_plots.py is being deprecated in favor of make_emperor.py, and will no longer be available in QIIME 1.8.0-dev.\n" option_parser, opts, args = parse_command_line_parameters(**script_info) prefs, data, background_color, label_color, ball_scale, arrow_colors = sample_color_prefs_and_map_data_from_options( opts ) if len(opts.coord_fnames) < 2 and opts.edges_file is None: option_parser.error("Please provide at least two " + "coordinate files or a custom edges file") # Open and get coord data (for multiple coords files) coord_files = opts.coord_fnames coord_files_valid = validate_coord_files(coord_files) if not coord_files_valid: option_parser.error("Every line of every coord file must " + "have the same number of columns.") num_coord_files = len(coord_files) data["edges"], data["coord"] = get_multiple_coords(coord_files, opts.edges_file, opts.serial) # if the edges file wasn't supplied, we appended _i to each file's samples # therefore we now add duplicated samples with _0, _1,... to mapping file if opts.edges_file is None: newmap = [data["map"][0]] for i in xrange(len(coord_files)): for sample in data["map"][1:]: newsample = ["%s_%d" % (sample[0], i)] newsample.extend(sample[1:]) newmap.append(newsample) data["map"] = newmap # remove any samples not present in mapping file remove_unmapped_samples(data["map"], data["coord"], data["edges"]) if len(data["coord"][1]) == 0: raise ValueError, "\n\nError: None of the sample IDs in the coordinates files were present in the mapping file.\n" # process custom axes, if present. custom_axes = None if opts.custom_axes: custom_axes = process_custom_axes(opts.custom_axes) get_custom_coords(custom_axes, data["map"], data["coord"]) remove_nans(data["coord"]) scale_custom_coords(custom_axes, data["coord"]) # Generate random output file name and create directories if opts.output_dir: create_dir(opts.output_dir) dir_path = opts.output_dir else: dir_path = "./" qiime_dir = get_qiime_project_dir() jar_path = os.path.join(qiime_dir, "qiime/support_files/jar/") data_dir_path = get_random_directory_name(output_dir=dir_path, return_absolute_path=False) try: os.mkdir(data_dir_path) except OSError: pass jar_dir_path = os.path.join(dir_path, "jar") try: os.mkdir(jar_dir_path) except OSError: pass shutil.copyfile(os.path.join(jar_path, "king.jar"), os.path.join(jar_dir_path, "king.jar")) filepath = coord_files[0] filename = filepath.strip().split("/")[-1] try: action = generate_3d_plots except NameError: action = None # Place this outside try/except so we don't mask NameError in action if action: generate_3d_plots( prefs, data, custom_axes, background_color, label_color, dir_path, data_dir_path, filename, ball_scale=ball_scale, arrow_colors=arrow_colors, user_supplied_edges=not (opts.edges_file is None), )
def setUp(self): """ """ self.qiime_config = load_qiime_config() self.dirs_to_remove = [] self.files_to_remove = [] # Cannot use get_qiime_project_dir() due to test errors in virtual box test_dir = os.path.join(get_qiime_project_dir(),'tests') sff_original_fp = os.path.join(test_dir, 'support_files', \ 'Fasting_subset.sff') # copy sff file to working directory self.sff_dir = tempfile.mkdtemp() self.dirs_to_remove.append(self.sff_dir) self.sff_fp = os.path.join(self.sff_dir, 'Fasting_subset.sff') copy(sff_original_fp, self.sff_fp) self.files_to_remove.append(self.sff_fp) tmp_dir = self.qiime_config['temp_dir'] or '/tmp/' if not exists(tmp_dir): makedirs(tmp_dir) # if test creates the temp dir, also remove it self.dirs_to_remove.append(tmp_dir) self.wf_out = get_tmp_filename(tmp_dir=tmp_dir, prefix='qiime_wf_out',suffix='',result_constructor=str) self.dirs_to_remove.append(self.wf_out) self.fasting_mapping_fp = get_tmp_filename(tmp_dir=tmp_dir, prefix='qiime_wf_mapping',suffix='.txt') fasting_mapping_f = open(self.fasting_mapping_fp,'w') fasting_mapping_f.write(fasting_map) fasting_mapping_f.close() self.files_to_remove.append(self.fasting_mapping_fp) ''' self.fasting_seqs_fp = get_tmp_filename(tmp_dir=tmp_dir, prefix='qiime_wf_seqs',suffix='.fasta') fasting_seqs_f = open(self.fasting_seqs_fp,'w') fasting_seqs_f.write(fasting_seqs_subset) fasting_seqs_f.close() self.files_to_remove.append(self.fasting_seqs_fp) self.fasting_seqs_denoiser_fp = get_tmp_filename(tmp_dir=tmp_dir, prefix='qiime_wf_seqs',suffix='.fasta') fasting_seqs_f = open(self.fasting_seqs_denoiser_fp,'w') fasting_seqs_f.write('\n'.join(fasting_seqs_subset.split('\n')[:44])) fasting_seqs_f.close() self.files_to_remove.append(self.fasting_seqs_denoiser_fp) ''' working_dir = self.qiime_config['working_dir'] or './' jobs_dir = join(working_dir,'jobs') if not exists(jobs_dir): # only clean up the jobs dir if it doesn't already exist self.dirs_to_remove.append(jobs_dir) self.params = parse_qiime_parameters(qiime_parameters_f) signal.signal(signal.SIGALRM, timeout) # set the 'alarm' to go off in allowed_seconds seconds signal.alarm(allowed_seconds_per_test)
def main(): option_parser, opts, args = parse_command_line_parameters(**script_info) # Some code for error checking of input args: # Check if distance_matrix_file is valid: try: d_header, d_mat = parse_distmat(open(opts.distance_matrix_file, "U")) except: option_parser.error( "This does not look like a valid distance matrix file. Please supply a valid distance matrix file using the -d option." ) if not is_symmetric_and_hollow(d_mat): option_parser.error("The distance matrix must be symmetric and " "hollow.") # Check if map_fname is valid: try: mapping, m_header, m_comments = parse_mapping_file(open(opts.map_fname, "U")) except QiimeParseError: option_parser.error( "This does not look like a valid metadata mapping file. Please supply a valid mapping file using the -m option." ) # make sure background_color is valid if opts.background_color not in ["black", "white"]: option_parser.error( "'%s' is not a valid background color. Please pass in either 'black' or 'white' using the -k option." % (opts.background_color) ) # make sure prefs file is valid if it exists if opts.prefs_path is not None: try: prefs_file = open(opts.prefs_path, "U").read() except IOError: option_parser.error( "Provided prefs file, '%s', does not exist. Please pass in a valid prefs file with the -p option." % (opts.prefs_path) ) if opts.prefs_path is not None: prefs = parse_prefs_file(prefs_file) else: prefs = None color_prefs, color_data, background_color, label_color, ball_scale, arrow_colors = sample_color_prefs_and_map_data_from_options( opts ) # list of labelname, groups, colors, data_colors, data_color_order groups_and_colors = list(iter_color_groups(mapping=color_data["map"], prefs=color_prefs)) # dict mapping labelname to list of: [groups, colors, data_colors, # data_color_order] field_to_colors = {} for color_info in groups_and_colors: field_to_colors[color_info[0]] = color_info[1:] qiime_dir = get_qiime_project_dir() + "/qiime/support_files/" fields = opts.fields if fields is not None: fields = map(strip, fields.split(",")) fields = [i.strip('"').strip("'") for i in fields] elif prefs is not None: fields = prefs.get("FIELDS", None) else: fields = get_interesting_mapping_fields(mapping, m_header) # Check that all provided fields are valid: if fields is not None: for f in fields: if f not in m_header: option_parser.error( "The field, %s, is not in the provided mapping file. Please supply correct fields (using the -f option or providing a 'FIELDS' list in the prefs file) corresponding to fields in mapping file." % (f) ) within_distances, between_distances, dmat = group_distances( mapping_file=opts.map_fname, dmatrix_file=opts.distance_matrix_file, fields=fields, dir_prefix=get_random_directory_name(output_dir=opts.dir_path, prefix="distances"), ) if not opts.suppress_html_output: # histograms output path histograms_path = path.join(opts.dir_path, "histograms") try: mkdir(histograms_path) except OSError: # raised if dir exists pass # draw all histograms distances_dict, label_to_histogram_filename = draw_all_histograms( single_field=within_distances, paired_field=between_distances, dmat=dmat, histogram_dir=histograms_path, field_to_color_prefs=field_to_colors, background_color=background_color, ) # Get relative path to histogram files. label_to_histogram_filename_relative = _make_relative_paths(label_to_histogram_filename, opts.dir_path) dm_fname = path.split(opts.distance_matrix_file)[-1] basename = path.splitext(dm_fname)[0] outfile_name = basename + "_distance_histograms.html" make_main_html( distances_dict=distances_dict, label_to_histogram_filename=label_to_histogram_filename_relative, root_outdir=opts.dir_path, outfile_name=outfile_name, title="Distance Histograms", ) # Handle saving web resources locally. # javascript file javascript_path = path.join(opts.dir_path, "js") try: mkdir(javascript_path) except OSError: # raised if dir exists pass js_out = open(javascript_path + "/histograms.js", "w") js_out.write(open(qiime_dir + "js/histograms.js").read()) js_out.close() monte_carlo_iters = opts.monte_carlo_iters if monte_carlo_iters > 0: # Do Monte Carlo for all fields monte_carlo_group_distances( mapping_file=opts.map_fname, dmatrix_file=opts.distance_matrix_file, prefs=prefs, dir_prefix=opts.dir_path, fields=fields, default_iters=monte_carlo_iters, ) # Do Monte Carlo for within and between fields monte_carlo_group_distances_within_between( single_field=within_distances, paired_field=between_distances, dmat=dmat, dir_prefix=opts.dir_path, num_iters=monte_carlo_iters, )
def check_mapping_file(mapping_fp, output_dir=".", has_barcodes=True, char_replace="_", verbose=True, variable_len_barcodes=False, disable_primer_check=False, added_demultiplex_field=None, suppress_html=False): """ Main program function for checking mapping file Checks mapping file for errors, warnings, writes log file, html file, and corrected mapping file. mapping_fp: path to metadata mapping file output_dir: output directory for log, html, corrected mapping file. has_barcodes: If True, will test for perform barcodes test (presence, uniqueness, valid IUPAC DNA chars). char_replace: Character used to replace invalid characters in data fields. SampleIDs always use periods to be MIENS compliant. verbose: If True, a message about warnings and/or errors will be printed to stdout. variable_len_barcodes: If True, suppresses warnings about barcodes of varying length. disable_primer_check: If True, disables tests for valid primer sequences. added_demultiplex_field: If specified, references a field in the mapping file to use for demultiplexing. These are to be read from fasta labels during the actual demultiplexing step. All combinations of barcodes, primers, and the added_demultiplex_field must be unique.""" header, mapping_data, run_description, errors, warnings =\ process_id_map(open(mapping_fp, 'U'), disable_primer_check, has_barcodes, char_replace, variable_len_barcodes, added_demultiplex_field, strip_quotes=False, suppress_stripping=True) if not suppress_html: formatted_html = format_mapping_html_data(header, mapping_data, errors, warnings) output_html = join(output_dir + basename(mapping_fp).replace('.txt', '') + ".html") html_f = open(output_html, "w") html_f.write(formatted_html) # get QIIME directory qiime_dir = get_qiime_project_dir() # Write javascript file necessary for mouseover tooltips. # move javascript file to javascript output directory copyfile(join(qiime_dir, 'qiime', 'support_files', 'js/overlib.js'), join(output_dir, 'overlib.js')) corrected_mapping_data = correct_mapping_data(mapping_data, header, char_replace) output_corrected_fp = join(output_dir + basename(mapping_fp).replace('.txt', '') + "_corrected.txt") write_corrected_mapping(output_corrected_fp, header, run_description, corrected_mapping_data) output_log_fp = join(output_dir + basename(mapping_fp).replace('.txt', '') + ".log") write_log_file(output_log_fp, errors, warnings) if verbose: if errors or warnings: print "Errors and/or warnings detected in mapping file. Please " +\ "check the log and html file for details." else: print "No errors or warnings were found in mapping file."
def check_mapping_file(mapping_fp, output_dir=".", has_barcodes=True, char_replace="_", verbose=True, variable_len_barcodes=False, disable_primer_check=False, added_demultiplex_field=None): """ Main program function for checking mapping file Checks mapping file for errors, warnings, writes log file, html file, and corrected mapping file. mapping_fp: path to metadata mapping file output_dir: output directory for log, html, corrected mapping file. has_barcodes: If True, will test for perform barcodes test (presence, uniqueness, valid IUPAC DNA chars). char_replace: Character used to replace invalid characters in data fields. SampleIDs always use periods to be MIENS compliant. verbose: If True, a message about warnings and/or errors will be printed to stdout. variable_len_barcodes: If True, suppresses warnings about barcodes of varying length. disable_primer_check: If True, disables tests for valid primer sequences. added_demultiplex_field: If specified, references a field in the mapping file to use for demultiplexing. These are to be read from fasta labels during the actual demultiplexing step. All combinations of barcodes, primers, and the added_demultiplex_field must be unique.""" header, mapping_data, run_description, errors, warnings =\ process_id_map(open(mapping_fp, 'U'), disable_primer_check, has_barcodes, char_replace, variable_len_barcodes, added_demultiplex_field) formatted_html = format_mapping_html_data(header, mapping_data, errors, warnings) output_html = join(output_dir +\ basename(mapping_fp).replace('.txt', '') + ".html") html_f = open(output_html, "w") html_f.write(formatted_html) #get QIIME directory qiime_dir=get_qiime_project_dir() # Write javascript file necessary for mouseover tooltips. # move javascript file to javascript output directory copyfile(join(qiime_dir,'qiime','support_files',\ 'js/overlib.js'), join(output_dir,'overlib.js')) corrected_mapping_data = correct_mapping_data(mapping_data, header, char_replace) output_corrected_fp = join(output_dir +\ basename(mapping_fp).replace('.txt', '') + "_corrected.txt") write_corrected_mapping(output_corrected_fp, header, run_description, corrected_mapping_data) output_log_fp = join(output_dir +\ basename(mapping_fp).replace('.txt', '') + ".log") write_log_file(output_log_fp, errors, warnings) if verbose: if errors or warnings: print "Errors and/or warnings detected in mapping file. Please "+\ "check the log and html file for details." else: print "No errors or warnings were found in mapping file."
def main(): option_parser, opts, args = parse_command_line_parameters(**script_info) #Some code for error checking of input args: #Check if distance_matrix_file is valid: try: d_header, d_mat = parse_distmat(open(opts.distance_matrix_file, 'U')) except: option_parser.error( "This does not look like a valid distance matrix file. Please supply a valid distance matrix file using the -d option." ) if not is_symmetric_and_hollow(d_mat): option_parser.error("The distance matrix must be symmetric and " "hollow.") #Check if map_fname is valid: try: mapping, m_header, m_comments = \ parse_mapping_file(open(opts.map_fname,'U')) except QiimeParseError: option_parser.error( "This does not look like a valid metadata mapping file. Please supply a valid mapping file using the -m option." ) #make sure background_color is valid if opts.background_color not in ['black', 'white']: option_parser.error( "'%s' is not a valid background color. Please pass in either 'black' or 'white' using the -k option." % (opts.background_color)) #make sure prefs file is valid if it exists if opts.prefs_path is not None: try: prefs_file = open(opts.prefs_path, 'U').read() except IOError: option_parser.error( "Provided prefs file, '%s', does not exist. Please pass in a valid prefs file with the -p option." % (opts.prefs_path)) if opts.prefs_path is not None: prefs = parse_prefs_file(prefs_file) else: prefs = None color_prefs, color_data, background_color, label_color, ball_scale,\ arrow_colors=sample_color_prefs_and_map_data_from_options(opts) #list of labelname, groups, colors, data_colors, data_color_order groups_and_colors=list(iter_color_groups(mapping=color_data['map'],\ prefs=color_prefs)) #dict mapping labelname to list of: [groups, colors, data_colors, # data_color_order] field_to_colors = {} for color_info in groups_and_colors: field_to_colors[color_info[0]] = color_info[1:] qiime_dir = get_qiime_project_dir() + '/qiime/support_files/' fields = opts.fields if fields is not None: fields = map(strip, fields.split(',')) fields = [i.strip('"').strip("'") for i in fields] elif prefs is not None: fields = prefs.get('FIELDS', None) else: fields = get_interesting_mapping_fields(mapping, m_header) #Check that all provided fields are valid: if fields is not None: for f in fields: if f not in m_header: option_parser.error( "The field, %s, is not in the provided mapping file. Please supply correct fields (using the -f option or providing a 'FIELDS' list in the prefs file) corresponding to fields in mapping file." % (f)) within_distances, between_distances, dmat = \ group_distances(mapping_file=opts.map_fname,\ dmatrix_file=opts.distance_matrix_file,\ fields=fields,\ dir_prefix=get_random_directory_name(output_dir=opts.dir_path,\ prefix='distances')) if not opts.suppress_html_output: #histograms output path histograms_path = path.join(opts.dir_path, 'histograms') try: mkdir(histograms_path) except OSError: #raised if dir exists pass #draw all histograms distances_dict, label_to_histogram_filename = \ draw_all_histograms(single_field=within_distances, \ paired_field=between_distances, \ dmat=dmat,\ histogram_dir=histograms_path,\ field_to_color_prefs=field_to_colors,\ background_color=background_color) #Get relative path to histogram files. label_to_histogram_filename_relative = \ _make_relative_paths(label_to_histogram_filename, opts.dir_path) dm_fname = path.split(opts.distance_matrix_file)[-1] basename = path.splitext(dm_fname)[0] outfile_name = basename + '_distance_histograms.html' make_main_html(distances_dict=distances_dict,\ label_to_histogram_filename=label_to_histogram_filename_relative,\ root_outdir=opts.dir_path, \ outfile_name = outfile_name, \ title='Distance Histograms') #Handle saving web resources locally. #javascript file javascript_path = path.join(opts.dir_path, 'js') try: mkdir(javascript_path) except OSError: #raised if dir exists pass js_out = open(javascript_path + '/histograms.js', 'w') js_out.write(open(qiime_dir + 'js/histograms.js').read()) js_out.close() monte_carlo_iters = opts.monte_carlo_iters if monte_carlo_iters > 0: #Do Monte Carlo for all fields monte_carlo_group_distances(mapping_file=opts.map_fname,\ dmatrix_file=opts.distance_matrix_file,\ prefs=prefs, \ dir_prefix = opts.dir_path,\ fields=fields,\ default_iters=monte_carlo_iters) #Do Monte Carlo for within and between fields monte_carlo_group_distances_within_between(\ single_field=within_distances,\ paired_field=between_distances, dmat=dmat, \ dir_prefix = opts.dir_path,\ num_iters=monte_carlo_iters)
def main(): option_parser, opts, args = parse_command_line_parameters(**script_info) data = {} # Open and get coord data otu_table = get_otu_counts(opts.otu_table_fp) # determine whether fractional values are present in OTU table num_otu_hits = opts.num_otu_hits if opts.log_transform: otu_table = get_log_transform(otu_table) num_otu_hits = 0 fractional_values = False max_val = -1 for val in otu_table.iter_data(axis='observation'): max_val = maximum(max_val, val.max()) # the data cannot be of mixed types: if one is float, all are float fractional_values = (max_val.dtype.name == 'float32' or max_val.dtype.name == 'float64') if fractional_values and max_val <= 1: if num_otu_hits > 0: print("Warning: OTU table appears to be using relative " "abundances and num_otu_hits was set to %d. Setting " "num_otu_hits to 0." % num_otu_hits) num_otu_hits = 0 filepath = opts.otu_table_fp filename = filepath.strip().split('/')[-1].split('.')[0] dir_path = opts.output_dir create_dir(dir_path) js_dir_path = os.path.join(dir_path, 'js') create_dir(js_dir_path) qiime_dir = get_qiime_project_dir() js_path = os.path.join(qiime_dir, 'qiime/support_files/js') shutil.copyfile(os.path.join(js_path, 'overlib.js'), os.path.join(js_dir_path, 'overlib.js')) shutil.copyfile(os.path.join(js_path, 'otu_count_display.js'), os.path.join(js_dir_path, 'otu_count_display.js')) shutil.copyfile(os.path.join(js_path, 'jquery.js'), os.path.join(js_dir_path, 'jquery.js')) shutil.copyfile(os.path.join(js_path, 'jquery.tablednd_0_5.js'), os.path.join(js_dir_path, 'jquery.tablednd_0_5.js')) # load tree for sorting OTUs ordered_otu_names = None if opts.tree is not None: try: f = open(opts.tree, 'U') except (TypeError, IOError): raise TreeMissingError("Couldn't read tree file at path: %s" % tree_source) tree = parse_newick(f, PhyloNode) f.close() ordered_otu_names = [tip.Name for tip in tree.iterTips()] ordered_sample_names = None # load tree for sorting Samples if opts.sample_tree is not None: try: f = open(opts.sample_tree, 'U') except (TypeError, IOError): raise TreeMissingError("Couldn't read tree file at path: %s" % tree_source) tree = parse_newick(f, PhyloNode) f.close() ordered_sample_names = [tip.Name for tip in tree.iterTips()] # if there's no sample tree, load sample map for sorting samples elif opts.map_fname is not None: lines = open(opts.map_fname, 'U').readlines() map = parse_mapping_file(lines)[0] ordered_sample_names = [row[0] for row in map] try: action = generate_heatmap_plots except NameError: action = None # Place this outside try/except so we don't mask NameError in action if action: action(num_otu_hits, otu_table, ordered_otu_names, ordered_sample_names, dir_path, js_dir_path, filename, fractional_values)
def main(): option_parser, opts, args =\ parse_command_line_parameters(**script_info) if (opts.suppress_unit_tests and opts.suppress_script_usage_tests): option_parser.error( "You're suppressing both test types. Nothing to run.") test_dir = abspath(dirname(__file__)) unittest_good_pattern = re.compile('OK\s*$') application_not_found_pattern = re.compile('ApplicationNotFoundError') python_name = 'python' bad_tests = [] missing_application_tests = [] # Run through all of QIIME's unit tests, and keep track of any files which # fail unit tests. if not opts.suppress_unit_tests: unittest_names = [] if not opts.unittest_glob: for root, dirs, files in walk(test_dir): for name in files: if name.startswith('test_') and name.endswith('.py'): unittest_names.append(join(root, name)) else: for fp in glob(opts.unittest_glob): fn = split(fp)[1] if fn.startswith('test_') and fn.endswith('.py'): unittest_names.append(abspath(fp)) unittest_names.sort() for unittest_name in unittest_names: print "Testing %s:\n" % unittest_name command = '%s %s -v' % (python_name, unittest_name) stdout, stderr, return_value = qiime_system_call(command) print stderr if not unittest_good_pattern.search(stderr): if application_not_found_pattern.search(stderr): missing_application_tests.append(unittest_name) else: bad_tests.append(unittest_name) qiime_test_data_dir = join(get_qiime_project_dir(), 'qiime_test_data') qiime_test_data_dir_exists = exists(qiime_test_data_dir) if not opts.suppress_script_usage_tests and qiime_test_data_dir_exists: if opts.script_usage_tests is not None: script_usage_tests = opts.script_usage_tests.split(',') else: script_usage_tests = None # Run the script usage testing functionality script_usage_result_summary, has_script_usage_example_failures = \ run_script_usage_tests( test_data_dir=qiime_test_data_dir, scripts_dir=get_qiime_scripts_dir(), working_dir=qiime_config['temp_dir'], verbose=True, tests=script_usage_tests, force_overwrite=True, timeout=240) print "==============\nResult summary\n==============" if not opts.suppress_unit_tests: print "\nUnit test result summary\n------------------------\n" if bad_tests: print "\nFailed the following unit tests.\n%s" % '\n'.join(bad_tests) if missing_application_tests: print "\nFailed the following unit tests, in part or whole due " +\ "to missing external applications.\nDepending on the QIIME features " +\ "you plan to use, this may not be critical.\n%s"\ % '\n'.join(missing_application_tests) if not (missing_application_tests or bad_tests): print "\nAll unit tests passed.\n\n" if not opts.suppress_script_usage_tests: if qiime_test_data_dir_exists: print "\nScript usage test result summary\n--------------------------------\n" print script_usage_result_summary else: print "\nCould not run script usage tests because the directory %s does not exist." % qiime_test_data_dir print "" # If script usage tests weren't suppressed, the qiime_test_data dir must # exist and we can't have any failures. script_usage_tests_success = (opts.suppress_script_usage_tests or (qiime_test_data_dir_exists and not has_script_usage_example_failures)) # If any of the unit tests or script usage tests fail, or if we have any # missing application errors, use return code 1 (as python's unittest # module does to indicate one or more failures). return_code = 1 if (len(bad_tests) == 0 and len(missing_application_tests) == 0 and script_usage_tests_success): return_code = 0 return return_code
def get_flowgram_ali_exe(): """Return the path to the flowgram alignment prog """ fp = get_qiime_project_dir() +\ "/qiime/support_files/denoiser/bin/FlowgramAli_4frame" return fp
def main(): option_parser, opts, args = parse_command_line_parameters(**script_info) #Check the version of Matplotlib matplotlib_version = re.split("[^\d]", matplotlib.__version__) matplotlib_version_info = tuple([int(i) for i in matplotlib_version if \ i.isdigit()]) if matplotlib_version_info != (1,1,0): print "This code was only tested with Matplotlib-1.1.0" #get QIIME directory qiime_dir=get_qiime_project_dir() if not opts.counts_fname: option_parser.error("A list of input files must be specified") #get color preferences color_prefs, color_data, background_color, label_color= \ taxonomy_color_prefs_and_map_data_from_options(opts) colorby = opts.colorby if colorby==None: colorby=[] for c in color_data['counts'].values(): colorby.extend(c[0]) else: colorby=colorby.strip().strip("'").split(',') counts_fname = opts.counts_fname #Define labels to use labels = opts.labels if not opts.labels: new_labels=[] #create an empty list since the user didn't specify labels for i in counts_fname: new_labels.append("") labels=','.join(new_labels) data = [(label,f.strip()) \ for f,label in zip(counts_fname,labels.split(","))] filepath=data[0][1] filename=filepath.strip().rpartition('/')[0] num_categories = int(opts.num_categories) if num_categories<=0: raise ValueError, 'The number of categories has to be greater than 0!' #create directory path dir_path = os.getcwd() if opts.dir_path: dir_path = opts.dir_path try: create_dir(opts.dir_path) except OSError: pass #make javascript output directory javascript_path = os.path.join(dir_path,'js') try: create_dir(javascript_path) except OSError: #raised if dir exists pass #make raw_data output directory raw_data_path = os.path.join(dir_path,'raw_data') try: create_dir(raw_data_path) except OSError: #raised if dir exists pass # move javascript file to javascript output directory shutil.copyfile(os.path.join(qiime_dir,'qiime','support_files',\ 'js/overlib.js'),\ os.path.join(javascript_path,'overlib.js')) #make css output directory css_path = os.path.join(dir_path,'css') try: create_dir(css_path) except OSError: #raised if dir exists pass # move css file to css output directory shutil.copyfile(os.path.join(qiime_dir,'qiime','support_files',\ 'css/qiime_style.css'),\ os.path.join(css_path,'qiime_style.css')) # verify all parameters are valid plot_width=float(opts.x_width) if plot_width<=0: raise ValueError, 'The width of the plot has to be greater than 0!' plot_height=float(opts.y_height) if plot_height<=0: raise ValueError, 'The height of the plot has to be greater than 0!' bar_width=float(opts.bar_width) if bar_width<=0 or bar_width>1: raise ValueError, 'The bar width of the plot has to be between 0 and 1!' dpi=float(opts.dpi) if dpi<=0: raise ValueError, 'The dpi of the plot has to be greater than 0!' resize_nth_label=int(opts.resize_nth_label) if resize_nth_label<0: raise ValueError, 'The resize_nth_label of the plot has to be greater than 0!' generate_image_type=opts.type_of_file label_type=opts.label_type include_html_legend=opts.include_html_legend include_html_counts=opts.include_html_counts plots_to_make=opts.chart_type.split(',') chart_types=['area','pie','bar'] for i in plots_to_make: chart_type=i.lower().strip() if chart_type not in chart_types: raise ValueError, 'Please type in one of the appropriate chart types (i.e. %s)!' % ','.join(chart_types) #make pie chart output path charts_path = os.path.join(dir_path,'charts') try: create_dir(charts_path) except OSError: #raised if dir exists pass make_all_charts(data,dir_path,filename,num_categories, \ colorby,args,color_data, color_prefs,background_color,label_color,\ chart_type,generate_image_type,plot_width,plot_height,bar_width,dpi,\ resize_nth_label,label_type,include_html_legend,include_html_counts)
def main(): option_parser, opts, args = parse_command_line_parameters(**script_info) data = {} prefs,data,background_color,label_color,ball_scale, arrow_colors= \ sample_color_prefs_and_map_data_from_options(opts) data['ellipsoid_method'] = opts.ellipsoid_method if 0.00 <= opts.ellipsoid_opacity <= 1.00: data['alpha'] = opts.ellipsoid_opacity else: raise ValueError, 'The opacity must be a value between 0 and 1!' #Open and get coord data if os.path.isdir(opts.coord_fname) and opts.master_pcoa: data['coord'], data['support_pcoas'] = load_pcoa_files( opts.coord_fname) data['coord'] = get_coord(opts.master_pcoa) elif os.path.isdir(opts.coord_fname): data['coord'], data['support_pcoas'] = load_pcoa_files( opts.coord_fname) else: data['coord'] = get_coord(opts.coord_fname) filepath = opts.coord_fname basename, extension = os.path.splitext(filepath) filename = '%s_2D_PCoA_plots' % (basename) qiime_dir = get_qiime_project_dir() js_path = os.path.join(qiime_dir, 'qiime', 'support_files', 'js') if opts.output_dir: if os.path.exists(opts.output_dir): dir_path = opts.output_dir else: try: os.mkdir(opts.output_dir) dir_path = opts.output_dir except OSError: pass else: dir_path = './' html_dir_path = dir_path data_dir_path = get_random_directory_name(output_dir=dir_path) try: os.mkdir(data_dir_path) except OSError: pass js_dir_path = os.path.join(html_dir_path, 'js') try: os.mkdir(js_dir_path) except OSError: pass shutil.copyfile(os.path.join(js_path,'overlib.js'), \ os.path.join(js_dir_path,'overlib.js')) try: action = generate_2d_plots except NameError: action = None #Place this outside try/except so we don't mask NameError in action if action: action(prefs, data, html_dir_path, data_dir_path, filename, background_color, label_color, opts.scree)
def main(): option_parser, opts, args = parse_command_line_parameters(**script_info) data = {} prefs, data, background_color, label_color, ball_scale, arrow_colors = sample_color_prefs_and_map_data_from_options( opts ) data["ellipsoid_method"] = opts.ellipsoid_method if 0.00 <= opts.ellipsoid_opacity <= 1.00: data["alpha"] = opts.ellipsoid_opacity else: raise ValueError("The opacity must be a value between 0 and 1!") # Open and get coord data if os.path.isdir(opts.coord_fname) and opts.master_pcoa: data["coord"], data["support_pcoas"] = load_pcoa_files(opts.coord_fname) data["coord"] = get_coord(opts.master_pcoa) elif os.path.isdir(opts.coord_fname): data["coord"], data["support_pcoas"] = load_pcoa_files(opts.coord_fname) else: data["coord"] = get_coord(opts.coord_fname) filepath = opts.coord_fname basename, extension = os.path.splitext(filepath) filename = "%s_2D_PCoA_plots" % (basename) # obtaining where the files live so they can be copied qiime_dir = get_qiime_project_dir() js_path = os.path.join(qiime_dir, "qiime", "support_files", "js") if opts.output_dir: if os.path.exists(opts.output_dir): dir_path = opts.output_dir else: try: os.mkdir(opts.output_dir) dir_path = opts.output_dir except OSError: pass else: dir_path = "./" html_dir_path = dir_path data_dir_path = mkdtemp(dir=dir_path) try: os.mkdir(data_dir_path) except OSError: pass js_dir_path = os.path.join(html_dir_path, "js") try: os.mkdir(js_dir_path) except OSError: pass shutil.copyfile(os.path.join(js_path, "overlib.js"), os.path.join(js_dir_path, "overlib.js")) try: action = generate_2d_plots except NameError: action = None # Place this outside try/except so we don't mask NameError in action if action: action( prefs, data, html_dir_path, data_dir_path, filename, background_color, label_color, opts.scree, opts.pct_variation_below_one, )
def _get_R_script_dir(self): """Returns the path to the qiime R source directory """ qiime_dir = get_qiime_project_dir() script_dir = join(qiime_dir,'qiime','support_files','R') return script_dir
def main(): option_parser, opts, args = parse_command_line_parameters(**script_info) data = {} #Open and get coord data data['otu_counts'] = list(get_otu_counts(opts.otu_table_fp, data)) # determine whether fractional values are present in OTU table if opts.log_transform: if not opts.log_eps is None and opts.log_eps <= 0: print "Parameter 'log_eps' must be positive. Value was", opts.log_eps exit(1) data['otu_counts'][2] = get_log_transform(data['otu_counts'][2], opts.log_eps) opts.num_otu_hits = 0 # test: if using relative abundances, and opts.num_otu_hits > 0 # print warning and set to 0 fractional_values = ((data['otu_counts'][2] > 0) & \ (data['otu_counts'][2] < 1)).any() if fractional_values and (data['otu_counts'][2]).max() <= 1: if opts.num_otu_hits > 0: print "Warning: OTU table appears to be using relative abundances",\ "and num_otu_hits was set to %d. Setting num_otu_hits to 0."\ %(opts.num_otu_hits) opts.num_otu_hits = 0 filepath=opts.otu_table_fp filename=filepath.strip().split('/')[-1].split('.')[0] if opts.output_dir: if os.path.exists(opts.output_dir): dir_path=opts.output_dir else: try: os.mkdir(opts.output_dir) dir_path=opts.output_dir except OSError: pass else: dir_path='./' js_dir_path = os.path.join(dir_path,'js') try: os.mkdir(js_dir_path) except OSError: pass qiime_dir=get_qiime_project_dir() js_path=os.path.join(qiime_dir,'qiime/support_files/js') shutil.copyfile(os.path.join(js_path,'overlib.js'), os.path.join(js_dir_path,'overlib.js')) shutil.copyfile(os.path.join(js_path,'otu_count_display.js'), os.path.join(js_dir_path,'otu_count_display.js')) shutil.copyfile(os.path.join(js_path,'jquery.js'), os.path.join(js_dir_path,'jquery.js')) shutil.copyfile(os.path.join(js_path,'jquery.tablednd_0_5.js'), os.path.join(js_dir_path,'jquery.tablednd_0_5.js')) # load tree for sorting OTUs ordered_otu_names = None if not opts.tree is None: try: f = open(opts.tree, 'U') except (TypeError, IOError): raise TreeMissingError, \ "Couldn't read tree file at path: %s" % tree_source tree = parse_newick(f, PhyloNode) f.close() ordered_otu_names = [tip.Name for tip in tree.iterTips()] ordered_sample_names = None # load tree for sorting Samples if not opts.sample_tree is None: try: f = open(opts.sample_tree, 'U') except (TypeError, IOError): raise TreeMissingError, \ "Couldn't read tree file at path: %s" % tree_source tree = parse_newick(f, PhyloNode) f.close() ordered_sample_names = [tip.Name for tip in tree.iterTips()] # if there's no sample tree, load sample map for sorting samples elif not opts.map_fname is None: lines = open(opts.map_fname,'U').readlines() map = parse_mapping_file(lines)[0] ordered_sample_names = [row[0] for row in map] data['otu_order'] = ordered_otu_names data['sample_order'] = ordered_sample_names try: action = generate_heatmap_plots except NameError: action = None #Place this outside try/except so we don't mask NameError in action if action: action(opts,data, dir_path,js_dir_path,filename, fractional_values)
def _get_R_script_dir(self): """Returns the path to the qiime R source directory """ qiime_dir = get_qiime_project_dir() script_dir = join(qiime_dir, 'qiime', 'support_files', 'R') return script_dir
def main(): option_parser, opts, args = parse_command_line_parameters(**script_info) prefs, data, background_color, label_color, ball_scale, arrow_colors = sample_color_prefs_and_map_data_from_options( opts ) scaling_methods = opts.scaling_method.split(",") plot_scaled = False plot_unscaled = False for i in scaling_methods: if i.lower() == "scaled": plot_scaled = True elif i.lower() == "unscaled": plot_unscaled = True if not (plot_scaled or plot_unscaled): raise ValueError, "You must choose a valid scaling method (scaled or unscaled)" if opts.output_format == "invue": # validating the number of points for interpolation if opts.interpolation_points < 0: option_parser.error("The --interpolation_points should be " + "greater or equal to 0.") # make sure that coord file has internally consistent # of columns coord_files_valid = validate_coord_files(opts.coord_fname) if not coord_files_valid: option_parser.error("Every line of every coord file must " + "have the same number of columns.") # Open and get coord data data["coord"] = get_coord(opts.coord_fname, opts.ellipsoid_method) # remove any samples not present in mapping file remove_unmapped_samples(data["map"], data["coord"]) # if no samples overlapped between mapping file and otu table, exit if len(data["coord"][0]) == 0: print "\nError: OTU table and mapping file had no samples in common\n" exit(1) if opts.output_dir: create_dir(opts.output_dir, False) dir_path = opts.output_dir else: dir_path = "./" filepath = opts.coord_fname if os.path.isdir(filepath): coord_files = [fname for fname in os.listdir(filepath) if not fname.startswith(".")] filename = os.path.split(coord_files[0])[-1] else: filename = os.path.split(filepath)[-1] generate_3d_plots_invue( prefs, data, dir_path, filename, opts.interpolation_points, opts.polyhedron_points, opts.polyhedron_offset ) # finish script return # Potential conflicts if not opts.custom_axes is None and os.path.isdir(opts.coord_fname): # can't do averaged pcoa plots _and_ custom axes in the same plot option_parser.error( "Please supply either custom axes or multiple coordinate \ files, but not both." ) # check that smoothness is an integer between 0 and 3 try: ellipsoid_smoothness = int(opts.ellipsoid_smoothness) except: option_parser.error( "Please supply an integer ellipsoid smoothness \ value." ) if ellipsoid_smoothness < 0 or ellipsoid_smoothness > 3: option_parser.error( "Please supply an ellipsoid smoothness value \ between 0 and 3." ) # check that opacity is a float between 0 and 1 try: ellipsoid_alpha = float(opts.ellipsoid_opacity) except: option_parser.error("Please supply a number for ellipsoid opacity.") if ellipsoid_alpha < 0 or ellipsoid_alpha > 1: option_parser.error( "Please supply an ellipsoid opacity value \ between 0 and 1." ) # check that ellipsoid method is valid ellipsoid_methods = ["IQR", "sdev"] if not opts.ellipsoid_method in ellipsoid_methods: option_parser.error( "Please supply a valid ellipsoid method. \ Valid methods are: " + ", ".join(ellipsoid_methods) + "." ) # gather ellipsoid drawing preferences ellipsoid_prefs = {} ellipsoid_prefs["smoothness"] = ellipsoid_smoothness ellipsoid_prefs["alpha"] = ellipsoid_alpha # make sure that coord file has internally consistent # of columns coord_files_valid = validate_coord_files(opts.coord_fname) if not coord_files_valid: option_parser.error("Every line of every coord file must " + "have the same number of columns.") # Open and get coord data data["coord"] = get_coord(opts.coord_fname, opts.ellipsoid_method) # remove any samples not present in mapping file remove_unmapped_samples(data["map"], data["coord"]) # if no samples overlapped between mapping file and otu table, exit if len(data["coord"][0]) == 0: print "\nError: OTU table and mapping file had no samples in common\n" exit(1) # process custom axes, if present. custom_axes = None if opts.custom_axes: custom_axes = process_custom_axes(opts.custom_axes) get_custom_coords(custom_axes, data["map"], data["coord"]) remove_nans(data["coord"]) scale_custom_coords(custom_axes, data["coord"]) # process vectors if requested if opts.add_vectors: add_vectors = {} add_vectors["vectors"] = opts.add_vectors.split(",") add_vectors["weight_by_vector"] = opts.weight_by_vector if len(add_vectors) > 3: raise ValueError, "You must add maximum 3 columns but %s" % opts.add_vectors # Validating Vectors values if opts.vectors_algorithm: axes_number = len(data["coord"][1][1]) if opts.vectors_axes < 0 or opts.vectors_axes > axes_number: raise ValueError, "vectors_algorithm should be between 0 and the max number" + "of samples/pcoa-axes: %d" % len( data["coord"][1][1] ) if opts.vectors_axes == 0: opts.vectors_axes = axes_number add_vectors["vectors_axes"] = opts.vectors_axes valid_chars = "_.abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" for c in opts.vectors_path: if c not in valid_chars: raise ValueError, "vectors_path (%s) has invalid chars" % opts.vectors_path add_vectors["vectors_output"] = {} add_vectors["vectors_algorithm"] = opts.vectors_algorithm add_vectors["eigvals"] = data["coord"][3] add_vectors["window_size"] = None # checks specific for the modified first difference algorithm if add_vectors["vectors_algorithm"] == "wdiff": try: add_vectors["window_size"] = int(opts.window_size) except TypeError: raise TypeError, "Specify --window_size as an integer" # sanity check as the value can only be greater or equal to one if add_vectors["window_size"] < 1: raise ValueError, "The value of window_size is invalid, " + "the value must be greater than zero, not %d" % add_vectors[ "window_size" ] else: add_vectors["vectors_algorithm"] = None add_vectors["vectors_path"] = opts.vectors_path else: add_vectors = None if opts.taxa_fname != None: # get taxonomy counts # get list of sample_ids that haven't been removed sample_ids = data["coord"][0] # get taxa summaries for all sample_ids lineages, taxa_counts = get_taxa(opts.taxa_fname, sample_ids) data["taxa"] = {} data["taxa"]["lineages"] = lineages data["taxa"]["counts"] = taxa_counts # get average relative abundance of taxa data["taxa"]["prevalence"] = get_taxa_prevalence(data["taxa"]["counts"]) remove_rare_taxa(data["taxa"], nkeep=opts.n_taxa_keep) # get coordinates of taxa (weighted mean of sample scores) data["taxa"]["coord"] = get_taxa_coords(data["taxa"]["counts"], data["coord"][1]) data["taxa"]["coord"] # write taxa coords if requested if not opts.biplot_output_file is None: output = make_biplot_scores_output(data["taxa"]) fout = open(opts.biplot_output_file, "w") fout.write("\n".join(output)) fout.close() if opts.output_dir: create_dir(opts.output_dir, False) dir_path = opts.output_dir else: dir_path = "./" qiime_dir = get_qiime_project_dir() jar_path = os.path.join(qiime_dir, "qiime/support_files/jar/") data_dir_path = get_random_directory_name(output_dir=dir_path, return_absolute_path=False) try: os.mkdir(data_dir_path) except OSError: pass data_file_path = data_dir_path jar_dir_path = os.path.join(dir_path, "jar") try: os.mkdir(jar_dir_path) except OSError: pass shutil.copyfile(os.path.join(jar_path, "king.jar"), os.path.join(jar_dir_path, "king.jar")) filepath = opts.coord_fname if os.path.isdir(filepath): coord_files = [fname for fname in os.listdir(filepath) if not fname.startswith(".")] filename = os.path.split(coord_files[0])[-1] else: filename = os.path.split(filepath)[-1] try: action = generate_3d_plots except NameError: action = None # Place this outside try/except so we don't mask NameError in action if action: action( prefs, data, custom_axes, background_color, label_color, dir_path, data_file_path, filename, ellipsoid_prefs=ellipsoid_prefs, add_vectors=add_vectors, plot_scaled=plot_scaled, plot_unscaled=plot_unscaled, )
def main(): option_parser, opts, args = parse_command_line_parameters(**script_info) #get QIIME directory qiime_dir=get_qiime_project_dir() if not opts.counts_fname: option_parser.error("A list of input files must be specified") #get color preferences color_prefs, color_data, background_color, label_color= \ taxonomy_color_prefs_and_map_data_from_options(opts) colorby = opts.colorby if colorby==None: colorby=[] for c in color_data['counts'].values(): colorby.extend(c[0]) else: colorby=colorby.strip().strip("'").split(',') counts_fname = opts.counts_fname #Define labels to use labels = opts.labels if not opts.labels: new_labels=[] #create an empty list since the user didn't specify labels for i in counts_fname: new_labels.append("") labels=','.join(new_labels) data = [(label,f.strip()) \ for f,label in zip(counts_fname,labels.split(","))] filepath=data[0][1] filename=filepath.strip().rpartition('/')[0] num_categories = int(opts.num_categories) if num_categories<=0: raise ValueError, 'The number of categories has to be greater than 0!' #create directory path dir_path = os.getcwd() if opts.dir_path: dir_path = opts.dir_path try: create_dir(opts.dir_path) except OSError: pass #make javascript output directory javascript_path = os.path.join(dir_path,'js') try: create_dir(javascript_path) except OSError: #raised if dir exists pass #make raw_data output directory raw_data_path = os.path.join(dir_path,'raw_data') try: create_dir(raw_data_path) except OSError: #raised if dir exists pass # move javascript file to javascript output directory shutil.copyfile(os.path.join(qiime_dir,'qiime','support_files',\ 'js/overlib.js'),\ os.path.join(javascript_path,'overlib.js')) #make css output directory css_path = os.path.join(dir_path,'css') try: create_dir(css_path) except OSError: #raised if dir exists pass # move css file to css output directory shutil.copyfile(os.path.join(qiime_dir,'qiime','support_files',\ 'css/qiime_style.css'),\ os.path.join(css_path,'qiime_style.css')) # verify all parameters are valid plot_width=float(opts.x_width) if plot_width<=0: raise ValueError, 'The width of the plot has to be greater than 0!' plot_height=float(opts.y_height) if plot_height<=0: raise ValueError, 'The height of the plot has to be greater than 0!' bar_width=float(opts.bar_width) if bar_width<=0 or bar_width>1: raise ValueError, 'The bar width of the plot has to be between 0 and 1!' dpi=float(opts.dpi) if dpi<=0: raise ValueError, 'The dpi of the plot has to be greater than 0!' resize_nth_label=int(opts.resize_nth_label) if resize_nth_label<0: raise ValueError, 'The resize_nth_label of the plot has to be greater\ than 0!' generate_image_type=opts.type_of_file label_type=opts.label_type include_html_legend=opts.include_html_legend include_html_counts=opts.include_html_counts plots_to_make=opts.chart_type for chart_type in plots_to_make: #make pie chart output path charts_path = os.path.join(dir_path,'charts') try: create_dir(charts_path) except OSError: #raised if dir exists pass make_all_charts(data,dir_path,filename,num_categories, \ colorby,args,color_data, color_prefs,background_color,label_color,\ chart_type,generate_image_type,plot_width,plot_height,bar_width,dpi,\ resize_nth_label,label_type,include_html_legend,include_html_counts)
import signal import os from os import remove, rmdir from shutil import rmtree from subprocess import Popen, PIPE, STDOUT from cogent.util.unit_test import TestCase, main from cogent.parse.fasta import MinimalFastaParser from qiime.util import get_qiime_project_dir, get_qiime_scripts_dir from qiime.denoiser.utils import check_flowgram_ali_exe from qiime.denoiser.preprocess import make_tmp_name PROJECT_HOME = get_qiime_project_dir() # timeout handling taken from test_workflow.py class TimeExceededError(Exception): pass allowed_seconds_per_test = 240 def timeout(signum, frame): raise TimeExceededError, "Test failed to run in allowed time (%d seconds)." % allowed_seconds_per_test class DenoiserTests(TestCase): def setUp(self):
def main(): option_parser, opts, args = parse_command_line_parameters(**script_info) data = {} # Open and get coord data otu_table = get_otu_counts(opts.otu_table_fp) # determine whether fractional values are present in OTU table num_otu_hits = opts.num_otu_hits if opts.log_transform: otu_table = get_log_transform(otu_table) num_otu_hits = 0 fractional_values = False max_val = -1 for val in otu_table.iter_data(axis='observation'): max_val = maximum(max_val, val.max()) # the data cannot be of mixed types: if one is float, all are float fractional_values = ( max_val.dtype.name == 'float32' or max_val.dtype.name == 'float64') if fractional_values and max_val <= 1: if num_otu_hits > 0: print ("Warning: OTU table appears to be using relative " "abundances and num_otu_hits was set to %d. Setting " "num_otu_hits to 0." % num_otu_hits) num_otu_hits = 0 filepath = opts.otu_table_fp filename = filepath.strip().split('/')[-1].split('.')[0] dir_path = opts.output_dir create_dir(dir_path) js_dir_path = os.path.join(dir_path, 'js') create_dir(js_dir_path) qiime_dir = get_qiime_project_dir() js_path = os.path.join(qiime_dir, 'qiime/support_files/js') shutil.copyfile(os.path.join(js_path, 'overlib.js'), os.path.join(js_dir_path, 'overlib.js')) shutil.copyfile( os.path.join(js_path, 'otu_count_display.js'), os.path.join(js_dir_path, 'otu_count_display.js')) shutil.copyfile(os.path.join(js_path, 'jquery.js'), os.path.join(js_dir_path, 'jquery.js')) shutil.copyfile( os.path.join(js_path, 'jquery.tablednd_0_5.js'), os.path.join(js_dir_path, 'jquery.tablednd_0_5.js')) # load tree for sorting OTUs ordered_otu_names = None if opts.tree is not None: try: f = open(opts.tree, 'U') except (TypeError, IOError): raise TreeMissingError( "Couldn't read tree file at path: %s" % tree_source) tree = parse_newick(f, PhyloNode) f.close() ordered_otu_names = [tip.Name for tip in tree.iterTips()] ordered_sample_names = None # load tree for sorting Samples if opts.sample_tree is not None: try: f = open(opts.sample_tree, 'U') except (TypeError, IOError): raise TreeMissingError( "Couldn't read tree file at path: %s" % tree_source) tree = parse_newick(f, PhyloNode) f.close() ordered_sample_names = [tip.Name for tip in tree.iterTips()] # if there's no sample tree, load sample map for sorting samples elif opts.map_fname is not None: lines = open(opts.map_fname, 'U').readlines() map = parse_mapping_file(lines)[0] ordered_sample_names = [row[0] for row in map] try: action = generate_heatmap_plots except NameError: action = None # Place this outside try/except so we don't mask NameError in action if action: action( num_otu_hits, otu_table, ordered_otu_names, ordered_sample_names, dir_path, js_dir_path, filename, fractional_values)
import signal import os from os import remove, rmdir from shutil import rmtree from subprocess import Popen, PIPE, STDOUT from unittest import TestCase, main from skbio.parse.sequences import parse_fasta from qiime.util import get_qiime_project_dir from qiime.denoiser.utils import check_flowgram_ali_exe from qiime.denoiser.preprocess import make_tmp_name PROJECT_HOME = get_qiime_project_dir() # timeout handling taken from test_workflow.py class TimeExceededError(Exception): pass allowed_seconds_per_test = 240 def timeout(signum, frame): raise TimeExceededError( "Test failed to run in allowed time (%d seconds)." % allowed_seconds_per_test)
def main(): option_parser, opts, args = parse_command_line_parameters(**script_info) prefs, data, background_color, label_color, ball_scale, arrow_colors = \ sample_color_prefs_and_map_data_from_options(opts) if len(opts.coord_fnames.split(',')) < 2 and opts.edges_file is None: option_parser.error('Please provide at least two ' +\ 'coordinate files or a custom edges file') #Open and get coord data (for multiple coords files) coord_files = process_coord_filenames(opts.coord_fnames) coord_files_valid = validate_coord_files(coord_files) if not coord_files_valid: option_parser.error('Every line of every coord file must ' +\ 'have the same number of columns.') num_coord_files = len(coord_files) data['edges'], data['coord'] = \ get_multiple_coords(coord_files, opts.edges_file, opts.serial) # if the edges file wasn't supplied, we appended _i to each file's samples # therefore we now add duplicated samples with _0, _1,... to mapping file if opts.edges_file is None: newmap = [data['map'][0]] for i in xrange(len(coord_files)): for sample in data['map'][1:]: newsample = ['%s_%d' %(sample[0],i)] newsample.extend(sample[1:]) newmap.append(newsample) data['map'] = newmap # remove any samples not present in mapping file remove_unmapped_samples(data['map'],data['coord'],data['edges']) if(len(data['coord'][1]) == 0): raise ValueError, '\n\nError: None of the sample IDs in the coordinates files were present in the mapping file.\n' # process custom axes, if present. custom_axes = None if opts.custom_axes: custom_axes = process_custom_axes(opts.custom_axes) get_custom_coords(custom_axes, data['map'], data['coord']) remove_nans(data['coord']) scale_custom_coords(custom_axes,data['coord']) # Generate random output file name and create directories if opts.output_dir: create_dir(opts.output_dir) dir_path = opts.output_dir else: dir_path='./' qiime_dir=get_qiime_project_dir() jar_path=os.path.join(qiime_dir,'qiime/support_files/jar/') data_dir_path = get_random_directory_name(output_dir=dir_path, return_absolute_path=False) try: os.mkdir(data_dir_path) except OSError: pass jar_dir_path = os.path.join(dir_path,'jar') try: os.mkdir(jar_dir_path) except OSError: pass shutil.copyfile(os.path.join(jar_path,'king.jar'), os.path.join(jar_dir_path,'king.jar')) filepath=coord_files[0] filename=filepath.strip().split('/')[-1] try: action = generate_3d_plots except NameError: action = None #Place this outside try/except so we don't mask NameError in action if action: generate_3d_plots(prefs, data, custom_axes, background_color, label_color, dir_path, data_dir_path, filename, ball_scale=ball_scale, arrow_colors=arrow_colors, user_supplied_edges=not(opts.edges_file is None))
def main(): option_parser, opts, args = parse_command_line_parameters(**script_info) prefs, data, background_color, label_color, ball_scale, arrow_colors= \ sample_color_prefs_and_map_data_from_options(opts) if opts.output_format == 'invue': # validating the number of points for interpolation if (opts.interpolation_points<0): option_parser.error('The --interpolation_points should be ' +\ 'greater or equal to 0.') # make sure that coord file has internally consistent # of columns coord_files_valid = validate_coord_files(opts.coord_fname) if not coord_files_valid: option_parser.error('Every line of every coord file must ' +\ 'have the same number of columns.') coord_files_valid = validate_coord_files(opts.coord_fname) if not coord_files_valid: option_parser.error('Every line of every coord file must ' +\ 'have the same number of columns.') #Open and get coord data data['coord'] = get_coord(opts.coord_fname, opts.ellipsoid_method) # remove any samples not present in mapping file remove_unmapped_samples(data['map'],data['coord']) # if no samples overlapped between mapping file and otu table, exit if len(data['coord'][0]) == 0: print "\nError: OTU table and mapping file had no samples in common\n" exit(1) if opts.output_dir: create_dir(opts.output_dir,False) dir_path=opts.output_dir else: dir_path='./' filepath=opts.coord_fname if os.path.isdir(filepath): coord_files = [fname for fname in os.listdir(filepath) if not \ fname.startswith('.')] filename = os.path.split(coord_files[0])[-1] else: filename = os.path.split(filepath)[-1] generate_3d_plots_invue(prefs, data, dir_path, filename, \ opts.interpolation_points, opts.polyhedron_points, \ opts.polyhedron_offset) #finish script return # Potential conflicts if not opts.custom_axes is None and os.path.isdir(opts.coord_fname): # can't do averaged pcoa plots _and_ custom axes in the same plot option_parser.error("Please supply either custom axes or multiple coordinate \ files, but not both.") # check that smoothness is an integer between 0 and 3 try: ellipsoid_smoothness = int(opts.ellipsoid_smoothness) except: option_parser.error("Please supply an integer ellipsoid smoothness \ value.") if ellipsoid_smoothness < 0 or ellipsoid_smoothness > 3: option_parser.error("Please supply an ellipsoid smoothness value \ between 0 and 3.") # check that opacity is a float between 0 and 1 try: ellipsoid_alpha = float(opts.ellipsoid_opacity) except: option_parser.error("Please supply a number for ellipsoid opacity.") if ellipsoid_alpha < 0 or ellipsoid_alpha > 1: option_parser.error("Please supply an ellipsoid opacity value \ between 0 and 1.") # check that ellipsoid method is valid ellipsoid_methods = ['IQR','sdev'] if not opts.ellipsoid_method in ellipsoid_methods: option_parser.error("Please supply a valid ellipsoid method. \ Valid methods are: " + ', '.join(ellipsoid_methods) + ".") # gather ellipsoid drawing preferences ellipsoid_prefs = {} ellipsoid_prefs["smoothness"] = ellipsoid_smoothness ellipsoid_prefs["alpha"] = ellipsoid_alpha # make sure that coord file has internally consistent # of columns coord_files_valid = validate_coord_files(opts.coord_fname) if not coord_files_valid: option_parser.error('Every line of every coord file must ' +\ 'have the same number of columns.') #Open and get coord data data['coord'] = get_coord(opts.coord_fname, opts.ellipsoid_method) # remove any samples not present in mapping file remove_unmapped_samples(data['map'],data['coord']) # if no samples overlapped between mapping file and otu table, exit if len(data['coord'][0]) == 0: print "\nError: OTU table and mapping file had no samples in common\n" exit(1) if opts.taxa_fname != None: # get taxonomy counts # get list of sample_ids that haven't been removed sample_ids = data['coord'][0] # get taxa summaries for all sample_ids lineages, taxa_counts = get_taxa(opts.taxa_fname, sample_ids) data['taxa'] = {} data['taxa']['lineages'] = lineages data['taxa']['counts'] = taxa_counts # get average relative abundance of taxa data['taxa']['prevalence'] = get_taxa_prevalence(data['taxa']['counts']) remove_rare_taxa(data['taxa'],nkeep=opts.n_taxa_keep) # get coordinates of taxa (weighted mean of sample scores) data['taxa']['coord'] = get_taxa_coords(data['taxa']['counts'], data['coord'][1]) data['taxa']['coord'] # write taxa coords if requested if not opts.biplot_output_file is None: output = make_biplot_scores_output(data['taxa']) fout = open(opts.biplot_output_file,'w') fout.write('\n'.join(output)) fout.close() # process custom axes, if present. custom_axes = None if opts.custom_axes: custom_axes = process_custom_axes(opts.custom_axes) get_custom_coords(custom_axes, data['map'], data['coord']) remove_nans(data['coord']) scale_custom_coords(custom_axes,data['coord']) if opts.output_dir: create_dir(opts.output_dir,False) dir_path=opts.output_dir else: dir_path='./' qiime_dir=get_qiime_project_dir() jar_path=os.path.join(qiime_dir,'qiime/support_files/jar/') data_dir_path = get_random_directory_name(output_dir=dir_path, return_absolute_path=False) try: os.mkdir(data_dir_path) except OSError: pass data_file_path=data_dir_path jar_dir_path = os.path.join(dir_path,'jar') try: os.mkdir(jar_dir_path) except OSError: pass shutil.copyfile(os.path.join(jar_path,'king.jar'), os.path.join(jar_dir_path,'king.jar')) filepath=opts.coord_fname if os.path.isdir(filepath): coord_files = [fname for fname in os.listdir(filepath) if not \ fname.startswith('.')] filename = os.path.split(coord_files[0])[-1] else: filename = os.path.split(filepath)[-1] try: action = generate_3d_plots except NameError: action = None #Place this outside try/except so we don't mask NameError in action if action: action(prefs,data,custom_axes,background_color,label_color,dir_path, \ data_file_path,filename,ellipsoid_prefs=ellipsoid_prefs)