コード例 #1
0
    def test_create_dir(self):
        """create_dir creates dir and fails meaningful."""

        tmp_dir_path = get_random_directory_name()
        tmp_dir_path2 = get_random_directory_name(suppress_mkdir=True)
        tmp_dir_path3 = get_random_directory_name(suppress_mkdir=True)

        self.dirs_to_remove.append(tmp_dir_path)
        self.dirs_to_remove.append(tmp_dir_path2)
        self.dirs_to_remove.append(tmp_dir_path3)

        # create on existing dir raises OSError if fail_on_exist=True
        self.assertRaises(OSError,
                          create_dir,
                          tmp_dir_path,
                          fail_on_exist=True)
        self.assertEquals(
            create_dir(tmp_dir_path,
                       fail_on_exist=True,
                       handle_errors_externally=True), 1)

        # return should be 1 if dir exist and fail_on_exist=False
        self.assertEqual(create_dir(tmp_dir_path, fail_on_exist=False), 1)

        # if dir not there make it and return always 0
        self.assertEqual(create_dir(tmp_dir_path2), 0)
        self.assertEqual(create_dir(tmp_dir_path3, fail_on_exist=True), 0)
コード例 #2
0
    def test_make_table_file(self):
        random_dir_name = get_random_directory_name(output_dir='/tmp')
        foldername = random_dir_name

        self._dir_to_clean_up = foldername

        try:
            os.mkdir(foldername)
        except OSError:
            pass

        obs = foldername

        try:
            os.mkdir(os.path.join(obs, "otu_network"))
        except OSError:
            pass

        obs = os.path.join(obs, "otu_network")
        make_table_file(self.edge_file_str, self.labels, obs,
                        "real_edge_table.txt")

        self.assertTrue(
            exists(foldername + "/otu_network/real_edge_table.txt"),
            'The file was not created in \
the appropriate location')
コード例 #3
0
    def setUp(self):
        """define some top-level data"""

        qiime_dir = get_qiime_project_dir()

        self.key = 'qiime_test'
        self.project_id = 'qiime_test'
        self.sample_id = 'qiime_sample1'
        self.params=[('key', self.key), ('sample', self.sample_id), \
                     ('project', self.project_id)]
        test_dir = path.dirname(path.abspath(__file__))
        self.seq_file=path.join(test_dir,'test_support_files',\
                                'qiime_tutorial_split_lib_seqs_subset.fna')
        self.output_dir = get_random_directory_name(output_dir='/tmp/')
        self.sample_file = [('file', 'qiime_test.fna', fasta_example)]
        self._paths_to_clean_up = []
        self._dirs_to_clean_up = []

        #make the webfile directory
        try:
            mkdir(self.output_dir)
        except OSError:
            pass

        #define directory to clean up
        self._dirs_to_clean_up = [self.output_dir]
コード例 #4
0
    def test_make_table_file(self):
        random_dir_name = get_random_directory_name(output_dir='/tmp')
        foldername = random_dir_name

        self._dir_to_clean_up = foldername

        try:
            os.mkdir(foldername)
        except OSError:
            pass

        obs = foldername

        try:
            os.mkdir(os.path.join(obs, "otu_network"))
        except OSError:
            pass

        obs = os.path.join(obs, "otu_network")
        make_table_file(
            self.edge_file_str,
            self.labels,
            obs,
            "real_edge_table.txt")

        self.assertTrue(exists(foldername + "/otu_network/real_edge_table.txt"), 'The file was not created in \
the appropriate location')
コード例 #5
0
    def setUp(self):
        # create the temporary input files that will be used

        self.iupac = {
            'A': 'A',
            'T': 'T',
            'G': 'G',
            'C': 'C',
            'R': '[AG]',
            'Y': '[CT]',
            'S': '[GC]',
            'W': '[AT]',
            'K': '[GT]',
            'M': '[AC]',
            'B': '[CGT]',
            'D': '[AGT]',
            'H': '[ACT]',
            'V': '[ACG]',
            'N': '[ACGT]'
        }

        self.output_dir = get_random_directory_name(prefix='/tmp/')
        self.output_dir += '/'

        create_dir(self.output_dir)
コード例 #6
0
    def setUp(self):
        """define some top-level data"""
        
        qiime_dir = get_qiime_project_dir()
        
        self.key='qiime_test'
        self.project_id='qiime_test'
        self.sample_id='qiime_sample1'
        self.params=[('key', self.key), ('sample', self.sample_id), \
                     ('project', self.project_id)]
        test_dir = path.dirname(path.abspath(__file__))
        self.seq_file=path.join(test_dir,'test_support_files',\
                                'qiime_tutorial_split_lib_seqs_subset.fna')
        self.output_dir=get_random_directory_name(output_dir='/tmp/')
        self.sample_file=[('file','qiime_test.fna',fasta_example)]
        self._paths_to_clean_up = []
        self._dirs_to_clean_up = []

        #make the webfile directory
        try:
            mkdir(self.output_dir)
        except OSError:
            pass

        
        #define directory to clean up
        self._dirs_to_clean_up = [self.output_dir]
コード例 #7
0
def main():
    option_parser, opts, args = parse_command_line_parameters(**script_info)
    if not opts.counts_fname:
        parser.error("An otu table file must be specified")

    if not opts.map_fname:
        parser.error("A Map file must be specified")

    prefs,data,background_color,label_color, ball_scale, arrow_colors= \
             sample_color_prefs_and_map_data_from_options(opts)


    dir_path = opts.dir_path

    if dir_path==None or dir_path=='':
        dir_path = get_random_directory_name()

    create_dir(dir_path)
    create_dir(os.path.join(dir_path,"otu_network"))
    create_dir(os.path.join(dir_path,"otu_network/props"))
    create_dir(os.path.join(dir_path,"otu_network/stats"))

    map_lines = open(opts.map_fname,'U').readlines()
    otu_sample_lines = open(opts.counts_fname, 'U').readlines()
    create_network_and_stats(dir_path,map_lines,otu_sample_lines,prefs,data,background_color,label_color)
コード例 #8
0
    def test_make_props_files(self):
        random_dir_name=get_random_directory_name(output_dir='/tmp')
        foldername = random_dir_name

        self._dir_to_clean_up = foldername

        try:
            os.mkdir(foldername)
        except OSError:
            pass

        obs=foldername

        try:
            os.mkdir(os.path.join(obs,"otu_network"))
        except OSError:
            pass

        try:
            os.mkdir(os.path.join(obs,"otu_network/props"))
        except OSError:
            pass

        obs = os.path.join(obs,"otu_network")
 
        self.assertTrue(exists(foldername+"/otu_network/props/"),'The file was not created in \
the appropriate location')
        self.assertTrue(exists(foldername+"/otu_network/props"),'The file was not created in \
the appropriate location')
コード例 #9
0
 def test_mothur_supported_version(self):
     """mothur is in path and version is supported """
     acceptable_version = (1,6,0)
     self.assertTrue(app_path('mothur'),
      "mothur not found. This may or may not be a problem depending on "+\
      "which components of QIIME you plan to use.")
     # mothur creates a log file in cwd, so create a tmp and cd there first
     tmp_dir = get_random_directory_name(output_dir='/tmp/')
     command = "cd %s ; mothur -v | grep ^mothur" % tmp_dir
     proc = Popen(command,shell=True,universal_newlines=True,\
                      stdout=PIPE,stderr=STDOUT)
     stdout, stderr = proc.communicate()
     version_string = stdout.strip().split(' ')[1].strip('v.')
     try:
         version = tuple(map(int,version_string.split('.')))
         pass_test = version == acceptable_version
     except ValueError:
         pass_test = False
         version_string = stdout
     self.assertTrue(pass_test,\
      "Unsupported mothur version. %s is required, but running %s." \
      % ('.'.join(map(str,acceptable_version)), version_string))
     
     # remove the directory and the log file
     rmtree(tmp_dir)
コード例 #10
0
    def test_make_props_files(self):
        random_dir_name = get_random_directory_name(output_dir='/tmp')
        foldername = random_dir_name

        self._dir_to_clean_up = foldername

        try:
            os.mkdir(foldername)
        except OSError:
            pass

        obs = foldername

        try:
            os.mkdir(os.path.join(obs, "otu_network"))
        except OSError:
            pass

        try:
            os.mkdir(os.path.join(obs, "otu_network/props"))
        except OSError:
            pass

        obs = os.path.join(obs, "otu_network")

        self.assertTrue(
            exists(foldername + "/otu_network/props/"),
            'The file was not created in \
the appropriate location')
        self.assertTrue(
            exists(foldername + "/otu_network/props"),
            'The file was not created in \
the appropriate location')
コード例 #11
0
    def setUp(self):
        # create the temporary input files that will be used
        
        self.iupac = {'A':'A', 'T':'T', 'G':'G', 'C':'C', 'R':'[AG]',
            'Y':'[CT]', 'S':'[GC]', 'W':'[AT]', 'K':'[GT]', 'M':'[AC]',
            'B':'[CGT]','D':'[AGT]', 'H':'[ACT]', 'V':'[ACG]', 'N':'[ACGT]'}
 
        self.output_dir = get_random_directory_name(prefix = '/tmp/')
        self.output_dir += '/'
        
        create_dir(self.output_dir)
コード例 #12
0
    def test_make_stats_files(self):
        random_dir_name = get_random_directory_name(output_dir='/tmp')
        foldername = random_dir_name
        self._dir_to_clean_up = foldername

        try:
            os.mkdir(foldername)
        except OSError:
            pass

        obs = foldername

        try:
            os.mkdir(os.path.join(obs, "otu_network"))
        except OSError:
            pass

        try:
            os.mkdir(os.path.join(obs, "otu_network/stats"))
        except OSError:
            pass

        obs = os.path.join(obs, "otu_network")
        make_stats_files(self.sample_dc, self.otu_dc, self.degree_counts,
                         self.num_con_cat, self.num_con, self.num_cat,
                         self.cat_by_sample, obs)

        self.assertTrue(
            exists(foldername + "/otu_network/stats/real_dc_otu_degree.txt"),
            'The file was not created in \
the appropriate location')
        self.assertTrue(
            exists(foldername +
                   "/otu_network/stats/real_dc_sample_degree.txt"),
            'The file was not created in \
the appropriate location')
        self.assertTrue(
            exists(foldername +
                   "/otu_network/stats/real_dc_sample_otu_degree.txt"),
            'The file was not created in \
the appropriate location')
        self.assertTrue(
            exists(foldername + "/otu_network/stats/real_cat_stats_Day.txt"),
            'The file was not created in \
the appropriate location')

        self.assertTrue(
            exists(foldername + "/otu_network/stats/real_cat_stats_time.txt"),
            'The file was not created in \
the appropriate location')
コード例 #13
0
 def setUp(self):
     # create the temporary input files that will be used
     
     self._files_to_remove = []
     
     # Need an empty input directory to control fasta files present
     
     self.input_dir = get_random_directory_name(prefix = '/tmp/') + "/"
     # Input data
     self.sample_fasta1 = sample_fasta1
     self.sample_fasta2 = sample_fasta2
     self.sample_fasta3 = sample_fasta3
     
     self.fasta1_fp = join(self.input_dir, "fasta1.fasta")
     map_file = open(self.fasta1_fp, 'w')
     map_file.write(self.sample_fasta1)
     map_file.close()
     
     self.fasta2_fp = join(self.input_dir, "fasta2.fna")
     map_file = open(self.fasta2_fp, 'w')
     map_file.write(self.sample_fasta2)
     map_file.close()
     
     self.fasta3_fp = join(self.input_dir, "fasta3.fa")
     map_file = open(self.fasta3_fp, 'w')
     map_file.write(self.sample_fasta3)
     map_file.close()
     
     # Output data
     
     self.output_dir = get_random_directory_name(prefix = '/tmp/')
     self.output_dir += '/'
     
     create_dir(self.output_dir)
     
     self._files_to_remove =\
      [self.fasta1_fp, self.fasta2_fp, self.fasta3_fp]
コード例 #14
0
    def setUp(self):
        # create the temporary input files that will be used

        self._files_to_remove = []

        # Need an empty input directory to control fasta files present

        self.input_dir = get_random_directory_name(prefix='/tmp/') + "/"
        # Input data
        self.sample_fasta1 = sample_fasta1
        self.sample_fasta2 = sample_fasta2
        self.sample_fasta3 = sample_fasta3

        self.fasta1_fp = join(self.input_dir, "fasta1.fasta")
        map_file = open(self.fasta1_fp, 'w')
        map_file.write(self.sample_fasta1)
        map_file.close()

        self.fasta2_fp = join(self.input_dir, "fasta2.fna")
        map_file = open(self.fasta2_fp, 'w')
        map_file.write(self.sample_fasta2)
        map_file.close()

        self.fasta3_fp = join(self.input_dir, "fasta3.fa")
        map_file = open(self.fasta3_fp, 'w')
        map_file.write(self.sample_fasta3)
        map_file.close()

        # Output data

        self.output_dir = get_random_directory_name(prefix='/tmp/')
        self.output_dir += '/'

        create_dir(self.output_dir)

        self._files_to_remove =\
         [self.fasta1_fp, self.fasta2_fp, self.fasta3_fp]
コード例 #15
0
ファイル: test_util.py プロジェクト: carze/clovr-base
    def test_create_dir(self):
        """create_dir creates dir and fails meaningful."""

        tmp_dir_path = get_random_directory_name()
        tmp_dir_path2 = get_random_directory_name(suppress_mkdir=True)
        tmp_dir_path3 = get_random_directory_name(suppress_mkdir=True)

        self.dirs_to_remove.append(tmp_dir_path)
        self.dirs_to_remove.append(tmp_dir_path2)
        self.dirs_to_remove.append(tmp_dir_path3)

        # create on existing dir raises OSError if fail_on_exist=True
        self.assertRaises(OSError, create_dir, tmp_dir_path,
                          fail_on_exist=True)
        self.assertEquals(create_dir(tmp_dir_path,
                                     fail_on_exist=True,
                                     handle_errors_externally=True), 1)

        # return should be 1 if dir exist and fail_on_exist=False 
        self.assertEqual(create_dir(tmp_dir_path, fail_on_exist=False), 1)

        # if dir not there make it and return always 0
        self.assertEqual(create_dir(tmp_dir_path2), 0)
        self.assertEqual(create_dir(tmp_dir_path3, fail_on_exist=True), 0)
コード例 #16
0
    def test_make_stats_files(self):
        random_dir_name = get_random_directory_name(output_dir='/tmp')
        foldername = random_dir_name
        self._dir_to_clean_up = foldername

        try:
            os.mkdir(foldername)
        except OSError:
            pass

        obs = foldername

        try:
            os.mkdir(os.path.join(obs, "otu_network"))
        except OSError:
            pass

        try:
            os.mkdir(os.path.join(obs, "otu_network/stats"))
        except OSError:
            pass

        obs = os.path.join(obs, "otu_network")
        make_stats_files(
            self.sample_dc,
            self.otu_dc,
            self.degree_counts,
            self.num_con_cat,
            self.num_con,
            self.num_cat,
            self.cat_by_sample,
            obs)

        self.assertTrue(exists(foldername + "/otu_network/stats/real_dc_otu_degree.txt"), 'The file was not created in \
the appropriate location')
        self.assertTrue(exists(foldername + "/otu_network/stats/real_dc_sample_degree.txt"), 'The file was not created in \
the appropriate location')
        self.assertTrue(exists(foldername + "/otu_network/stats/real_dc_sample_otu_degree.txt"), 'The file was not created in \
the appropriate location')
        self.assertTrue(exists(foldername + "/otu_network/stats/real_cat_stats_Day.txt"), 'The file was not created in \
the appropriate location')

        self.assertTrue(exists(foldername + "/otu_network/stats/real_cat_stats_time.txt"), 'The file was not created in \
the appropriate location')
コード例 #17
0
    def setUp(self):
        # create the temporary input files that will be used

        self._files_to_remove = []

        self.qual_fp = get_tmp_filename(
            prefix='qual_scores_',
            suffix='.qual')
        seq_file = open(self.qual_fp, 'w')
        seq_file.write(qual_scores)
        seq_file.close()

        self.output_dir = get_random_directory_name(prefix='/tmp/')
        self.output_dir += '/'

        create_dir(self.output_dir)

        self.expected_output_text_file = expected_output_text_file

        self._files_to_remove =\
            [self.qual_fp]
コード例 #18
0
ファイル: test_biplots.py プロジェクト: jb2263/qiime
    def test_get_taxa(self):
        rand_fname = get_random_directory_name(suppress_mkdir=True)
        rand_fname += '_tmp.txt'
        fout = open(rand_fname,'w')
        lines = ['#Full OTU Counts', \
                     'Taxon\tA\tB\tC', \
                     'Root;Bacteria;Acidobacteria\t0.1\t0.2\t0.3', \
                     'Root;Bacteria;TM7\t0.05\t0.0\t0.3' \
                ]
        fout.write('\n'.join(lines))
        fout.close()

        otu_ids = ['Root;Bacteria;Acidobacteria','Root;Bacteria;TM7']
        otu_table = np.array([[0.1,0.3],[0.05,0.3]])
        res = bp.get_taxa(rand_fname,sample_ids_kept=['A','C'])
        self.assertEqual(res[0],otu_ids)
        self.assertEqual(res[1],otu_table)

        # remove temporary file
        system('rm %s' %(rand_fname))
        pass
コード例 #19
0
    def setUp(self):
        # create the temporary input files that will be used

        self._files_to_remove = []


        self.qual_fp = get_tmp_filename(\
         prefix = 'qual_scores_',
         suffix = '.qual')
        seq_file = open(self.qual_fp, 'w')
        seq_file.write(qual_scores)
        seq_file.close()

        self.output_dir = get_random_directory_name(prefix='/tmp/')
        self.output_dir += '/'

        create_dir(self.output_dir)

        self.expected_output_text_file = expected_output_text_file


        self._files_to_remove =\
         [self.qual_fp]
コード例 #20
0
    def setUp(self):
        """define some top-level data"""

        qiime_dir = get_qiime_project_dir()

        self.key = "qiime_test"
        self.project_id = "qiime_test"
        self.sample_id = "qiime_sample1"
        self.params = [("key", self.key), ("sample", self.sample_id), ("project", self.project_id)]
        test_dir = path.dirname(path.abspath(__file__))
        self.seq_file = path.join(test_dir, "test_support_files", "qiime_tutorial_split_lib_seqs_subset.fna")
        self.output_dir = get_random_directory_name(output_dir="/tmp/")
        self.sample_file = [("file", "qiime_test.fna", fasta_example)]
        self._paths_to_clean_up = []
        self._dirs_to_clean_up = []

        # make the webfile directory
        try:
            mkdir(self.output_dir)
        except OSError:
            pass

        # define directory to clean up
        self._dirs_to_clean_up = [self.output_dir]
コード例 #21
0
def main():
    option_parser, opts, args = parse_command_line_parameters(**script_info)

    # Some code for error checking of input args:

    # Check if distance_matrix_file is valid:
    try:
        d_header, d_mat = parse_distmat(open(opts.distance_matrix_file, "U"))
    except:
        option_parser.error(
            "This does not look like a valid distance matrix file.  Please supply a valid distance matrix file using the -d option."
        )

    if not is_symmetric_and_hollow(d_mat):
        option_parser.error("The distance matrix must be symmetric and " "hollow.")

    # Check if map_fname is valid:
    try:
        mapping, m_header, m_comments = parse_mapping_file(open(opts.map_fname, "U"))
    except QiimeParseError:
        option_parser.error(
            "This does not look like a valid metadata mapping file.  Please supply a valid mapping file using the -m option."
        )

    # make sure background_color is valid
    if opts.background_color not in ["black", "white"]:
        option_parser.error(
            "'%s' is not a valid background color.  Please pass in either 'black' or 'white' using the -k option."
            % (opts.background_color)
        )

    # make sure prefs file is valid if it exists
    if opts.prefs_path is not None:
        try:
            prefs_file = open(opts.prefs_path, "U").read()
        except IOError:
            option_parser.error(
                "Provided prefs file, '%s', does not exist.  Please pass in a valid prefs file with the -p option."
                % (opts.prefs_path)
            )

    if opts.prefs_path is not None:
        prefs = parse_prefs_file(prefs_file)
    else:
        prefs = None

    color_prefs, color_data, background_color, label_color, ball_scale, arrow_colors = sample_color_prefs_and_map_data_from_options(
        opts
    )

    # list of labelname, groups, colors, data_colors, data_color_order
    groups_and_colors = list(iter_color_groups(mapping=color_data["map"], prefs=color_prefs))

    # dict mapping labelname to list of: [groups, colors, data_colors,
    # data_color_order]
    field_to_colors = {}
    for color_info in groups_and_colors:
        field_to_colors[color_info[0]] = color_info[1:]

    qiime_dir = get_qiime_project_dir() + "/qiime/support_files/"

    fields = opts.fields
    if fields is not None:
        fields = map(strip, fields.split(","))
        fields = [i.strip('"').strip("'") for i in fields]
    elif prefs is not None:
        fields = prefs.get("FIELDS", None)
    else:
        fields = get_interesting_mapping_fields(mapping, m_header)

    # Check that all provided fields are valid:
    if fields is not None:
        for f in fields:
            if f not in m_header:
                option_parser.error(
                    "The field, %s, is not in the provided mapping file.  Please supply correct fields (using the -f option or providing a 'FIELDS' list in the prefs file) corresponding to fields in mapping file."
                    % (f)
                )

    within_distances, between_distances, dmat = group_distances(
        mapping_file=opts.map_fname,
        dmatrix_file=opts.distance_matrix_file,
        fields=fields,
        dir_prefix=get_random_directory_name(output_dir=opts.dir_path, prefix="distances"),
    )

    if not opts.suppress_html_output:
        # histograms output path
        histograms_path = path.join(opts.dir_path, "histograms")
        try:
            mkdir(histograms_path)
        except OSError:  # raised if dir exists
            pass

        # draw all histograms
        distances_dict, label_to_histogram_filename = draw_all_histograms(
            single_field=within_distances,
            paired_field=between_distances,
            dmat=dmat,
            histogram_dir=histograms_path,
            field_to_color_prefs=field_to_colors,
            background_color=background_color,
        )

        # Get relative path to histogram files.
        label_to_histogram_filename_relative = _make_relative_paths(label_to_histogram_filename, opts.dir_path)

        dm_fname = path.split(opts.distance_matrix_file)[-1]
        basename = path.splitext(dm_fname)[0]
        outfile_name = basename + "_distance_histograms.html"
        make_main_html(
            distances_dict=distances_dict,
            label_to_histogram_filename=label_to_histogram_filename_relative,
            root_outdir=opts.dir_path,
            outfile_name=outfile_name,
            title="Distance Histograms",
        )

        # Handle saving web resources locally.
        # javascript file
        javascript_path = path.join(opts.dir_path, "js")
        try:
            mkdir(javascript_path)
        except OSError:  # raised if dir exists
            pass
        js_out = open(javascript_path + "/histograms.js", "w")
        js_out.write(open(qiime_dir + "js/histograms.js").read())
        js_out.close()

    monte_carlo_iters = opts.monte_carlo_iters
    if monte_carlo_iters > 0:
        # Do Monte Carlo for all fields
        monte_carlo_group_distances(
            mapping_file=opts.map_fname,
            dmatrix_file=opts.distance_matrix_file,
            prefs=prefs,
            dir_prefix=opts.dir_path,
            fields=fields,
            default_iters=monte_carlo_iters,
        )

        # Do Monte Carlo for within and between fields
        monte_carlo_group_distances_within_between(
            single_field=within_distances,
            paired_field=between_distances,
            dmat=dmat,
            dir_prefix=opts.dir_path,
            num_iters=monte_carlo_iters,
        )
コード例 #22
0
def generate_2d_plots(prefs, data, html_dir_path, data_dir_path, filename,
                      background_color, label_color, generate_scree):
    """Generate interactive 2D scatterplots"""
    coord_tups = [("1", "2"), ("3", "2"), ("1", "3")]
    mapping = data['map']
    out_table = ''
    #Iterate through prefs and generate html files for each colorby option
    #Sort by the column name first
    sample_location = {}

    groups_and_colors = iter_color_groups(mapping, prefs)
    groups_and_colors = list(groups_and_colors)

    for i in range(len(groups_and_colors)):
        labelname = groups_and_colors[i][0]
        groups = groups_and_colors[i][1]
        colors = groups_and_colors[i][2]
        data_colors = groups_and_colors[i][3]
        data_color_order = groups_and_colors[i][4]

        data_file_dir_path = get_random_directory_name(
            output_dir=data_dir_path)

        new_link = os.path.split(data_file_dir_path)
        data_file_link=os.path.join('.', os.path.split(new_link[-2])[-1], \
                                    new_link[-1])

        new_col_name = labelname
        img_data = {}
        plot_label = labelname

        if data.has_key('support_pcoas'):
            matrix_average, matrix_low, matrix_high, eigval_average, m_names = \
                summarize_pcoas(data['coord'], data['support_pcoas'],
                method=data['ellipsoid_method'])
            data['coord'] = \
                (m_names,matrix_average,data['coord'][2],data['coord'][3])
            for i in range(len(m_names)):
                sample_location[m_names[i]] = i
        else:
            matrix_average = None
            matrix_low = None
            matrix_high = None
            eigval_average = None
            m_names = None
        iterator = 0

        for coord_tup in coord_tups:
            if isarray(matrix_low) and isarray(matrix_high) and \
                                                isarray(matrix_average):
                coord_1r = asarray(matrix_low)
                coord_2r = asarray(matrix_high)
                mat_ave = asarray(matrix_average)
            else:
                coord_1r = None
                coord_2r = None
                mat_ave = None
                sample_location = None

            coord_1, coord_2 = coord_tup
            img_data[coord_tup] = draw_pcoa_graph(plot_label,data_file_dir_path,
                                                 data_file_link,coord_1,coord_2,
                                                 coord_1r, coord_2r, mat_ave,\
                                                 sample_location,
                                                 data,prefs,groups,colors,
                                                 background_color,label_color,
                                                 data_colors,data_color_order,
                                                 generate_eps=True)

        out_table += TABLE_HTML % (labelname, "<br>".join(
            img_data[("1", "2")]), "<br>".join(
                img_data[("3", "2")]), "<br>".join(img_data[("1", "3")]))

    if generate_scree:
        data_file_dir_path = get_random_directory_name(
            output_dir=data_dir_path)
        new_link = os.path.split(data_file_dir_path)
        data_file_link = os.path.join('.',
                                      os.path.split(new_link[-2])[-1],
                                      new_link[-1])

        img_src, download_link = draw_scree_graph(data_file_dir_path,
                                                  data_file_link,
                                                  background_color,
                                                  label_color,
                                                  generate_eps=True,
                                                  data=data)

        out_table += SCREE_TABLE_HTML % ("<br>".join((img_src, download_link)))

    outfile = create_html_filename(filename, '.html')
    outfile = os.path.join(html_dir_path, outfile)

    write_html_file(out_table, outfile)
コード例 #23
0
def main():
    print "\nWarning: make_3d_plots.py is being deprecated in favor of make_emperor.py, and will no longer be available in QIIME 1.8.0-dev.\n"

    option_parser, opts, args = parse_command_line_parameters(**script_info)

    prefs, data, background_color, label_color, ball_scale, arrow_colors= \
                            sample_color_prefs_and_map_data_from_options(opts)
    
    
    plot_scaled= 'scaled' in opts.scaling_method
    plot_unscaled= 'unscaled' in opts.scaling_method
    
    if opts.output_format == 'invue':
        # validating the number of points for interpolation
        if (opts.interpolation_points<0):
            option_parser.error('The --interpolation_points should be ' +\
                            'greater or equal to 0.')
                            
        # make sure that coord file has internally consistent # of columns
        coord_files_valid = validate_coord_files(opts.coord_fname)
        if not coord_files_valid:
            option_parser.error('Every line of every coord file must ' +\
                            'have the same number of columns.')
                            
        #Open and get coord data
        data['coord'] = get_coord(opts.coord_fname, opts.ellipsoid_method)
    
        # remove any samples not present in mapping file
        remove_unmapped_samples(data['map'],data['coord'])

        # if no samples overlapped between mapping file and otu table, exit
        if len(data['coord'][0]) == 0:
            print "\nError: OTU table and mapping file had no samples in common\n"
            exit(1)

        if opts.output_dir:
            create_dir(opts.output_dir,False)
            dir_path=opts.output_dir
        else:
            dir_path='./'
        
        filepath=opts.coord_fname
        if os.path.isdir(filepath):
            coord_files = [fname for fname in os.listdir(filepath) if not \
                           fname.startswith('.')]
            filename = os.path.split(coord_files[0])[-1]
        else:
            filename = os.path.split(filepath)[-1]

        generate_3d_plots_invue(prefs, data, dir_path, filename, \
            opts.interpolation_points, opts.polyhedron_points, \
            opts.polyhedron_offset)
        
        #finish script
        return

    # Potential conflicts
    if not opts.custom_axes is None and os.path.isdir(opts.coord_fname):
        # can't do averaged pcoa plots _and_ custom axes in the same plot
        option_parser.error("Please supply either custom axes or multiple coordinate \
files, but not both.")
    # check that smoothness is an integer between 0 and 3
    try:
        ellipsoid_smoothness = int(opts.ellipsoid_smoothness)
    except:
        option_parser.error("Please supply an integer ellipsoid smoothness \
value.")
    if ellipsoid_smoothness < 0 or ellipsoid_smoothness > 3:
        option_parser.error("Please supply an ellipsoid smoothness value \
between 0 and 3.")
    # check that opacity is a float between 0 and 1
    try:
        ellipsoid_alpha = float(opts.ellipsoid_opacity)
    except:
        option_parser.error("Please supply a number for ellipsoid opacity.")
    if ellipsoid_alpha < 0 or ellipsoid_alpha > 1:
        option_parser.error("Please supply an ellipsoid opacity value \
between 0 and 1.")
    # check that ellipsoid method is valid
    ellipsoid_methods = ['IQR','sdev']
    if not opts.ellipsoid_method in ellipsoid_methods:
        option_parser.error("Please supply a valid ellipsoid method. \
Valid methods are: " + ', '.join(ellipsoid_methods) + ".")
  
    # gather ellipsoid drawing preferences
    ellipsoid_prefs = {}
    ellipsoid_prefs["smoothness"] = ellipsoid_smoothness
    ellipsoid_prefs["alpha"] = ellipsoid_alpha

    # make sure that coord file has internally consistent # of columns
    coord_files_valid = validate_coord_files(opts.coord_fname)
    if not coord_files_valid:
        option_parser.error('Every line of every coord file must ' +\
                            'have the same number of columns.')

    #Open and get coord data
    data['coord'] = get_coord(opts.coord_fname, opts.ellipsoid_method)
    
    # remove any samples not present in mapping file
    remove_unmapped_samples(data['map'],data['coord'])
    
    # if no samples overlapped between mapping file and otu table, exit
    if len(data['coord'][0]) == 0:
        print "\nError: OTU table and mapping file had no samples in common\n"
        exit(1)

    # process custom axes, if present.
    custom_axes = None
    if opts.custom_axes:
        custom_axes = process_custom_axes(opts.custom_axes)

        get_custom_coords(custom_axes, data['map'], data['coord'])
        remove_nans(data['coord'])
        scale_custom_coords(custom_axes,data['coord'])

    # process vectors if requested
    if opts.add_vectors:
        add_vectors={}
        add_vectors['vectors'] = opts.add_vectors.split(',')
        add_vectors['weight_by_vector'] = opts.weight_by_vector
        if len(add_vectors)>3:
            raise ValueError, 'You must add maximum 3 columns but %s' % opts.add_vectors
        
        # Validating Vectors values
        if opts.vectors_algorithm:
            axes_number = len(data['coord'][1][1])
            if opts.vectors_axes<0 or opts.vectors_axes>axes_number:
                raise ValueError, 'vectors_algorithm should be between 0 and the max number' +\
                      'of samples/pcoa-axes: %d' % len(data['coord'][1][1])
            if opts.vectors_axes == 0: 
                opts.vectors_axes = axes_number
            add_vectors['vectors_axes'] = opts.vectors_axes
            valid_chars = '_.abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'
            for c in opts.vectors_path:
                if c not in valid_chars:
                    raise ValueError, 'vectors_path (%s) has invalid chars' % opts.vectors_path
            add_vectors['vectors_output'] = {}
            add_vectors['vectors_algorithm']=opts.vectors_algorithm
            add_vectors['eigvals'] = data['coord'][3]
            add_vectors['window_size'] = None

            # checks specific for the modified first difference algorithm
            if add_vectors['vectors_algorithm'] == 'wdiff':
                try:
                    add_vectors['window_size'] = int(opts.window_size)
                except TypeError:
                    raise TypeError, 'Specify --window_size as an integer'

                # sanity check as the value can only be greater or equal to one
                if add_vectors['window_size'] < 1:
                    raise ValueError, 'The value of window_size is invalid, '+\
                        'the value must be greater than zero, not %d' % add_vectors['window_size']

        else:
            add_vectors['vectors_algorithm'] = None
        add_vectors['vectors_path'] = opts.vectors_path
    else:
        add_vectors = None

    if opts.taxa_fname != None:
        # get taxonomy counts
        # get list of sample_ids that haven't been removed
        sample_ids = data['coord'][0]
        # get taxa summaries for all sample_ids
        lineages, taxa_counts = get_taxa(opts.taxa_fname, sample_ids)
        data['taxa'] = {}
        data['taxa']['lineages'] = lineages
        data['taxa']['counts'] = taxa_counts

        # get average relative abundance of taxa
        data['taxa']['prevalence'] = get_taxa_prevalence(data['taxa']['counts'])
        # get coordinates of taxa (weighted mean of sample scores)
        data['taxa']['coord'] = get_taxa_coords(data['taxa']['counts'],
            data['coord'][1])
        
        # trim results, do NOT change order
        # check: https://github.com/qiime/qiime/issues/677
        remove_rare_taxa(data['taxa'],nkeep=opts.n_taxa_keep)
        
        # write taxa coords if requested
        if not opts.biplot_output_file is None:
            output = make_biplot_scores_output(data['taxa'])            
            fout = open(opts.biplot_output_file,'w')
            fout.write('\n'.join(output))
            fout.close()

    if opts.output_dir:
        create_dir(opts.output_dir,False)
        dir_path=opts.output_dir
    else:
        dir_path='./'
    
    qiime_dir=get_qiime_project_dir()

    jar_path=os.path.join(qiime_dir,'qiime/support_files/jar/')

    data_dir_path = get_random_directory_name(output_dir=dir_path,
                                              return_absolute_path=False)    
    
    try:
        os.mkdir(data_dir_path)
    except OSError:
        pass

    data_file_path=data_dir_path

    jar_dir_path = os.path.join(dir_path,'jar')
    
    try:
        os.mkdir(jar_dir_path)
    except OSError:
        pass
    
    shutil.copyfile(os.path.join(jar_path,'king.jar'), os.path.join(jar_dir_path,'king.jar'))

    filepath=opts.coord_fname
    if os.path.isdir(filepath):
        coord_files = [fname for fname in os.listdir(filepath) if not \
                           fname.startswith('.')]
        filename = os.path.split(coord_files[0])[-1]
    else:
        filename = os.path.split(filepath)[-1]

    try:
        action = generate_3d_plots
    except NameError:
        action = None

    #Place this outside try/except so we don't mask NameError in action
    if action:
        action(prefs,data,custom_axes,background_color,label_color,dir_path, \
                data_file_path,filename,ellipsoid_prefs=ellipsoid_prefs, \
                add_vectors=add_vectors, plot_scaled=plot_scaled, \
                plot_unscaled=plot_unscaled)
コード例 #24
0
ファイル: make_2d_plots.py プロジェクト: anwarMZ/metabeta
def generate_2d_plots(prefs,data,html_dir_path,data_dir_path,filename,
                        background_color,label_color,generate_scree):
    """Generate interactive 2D scatterplots"""
    coord_tups = [("1", "2"), ("3", "2"), ("1", "3")]
    mapping=data['map']
    out_table=''
    #Iterate through prefs and generate html files for each colorby option
    #Sort by the column name first
    sample_location={}

    groups_and_colors=iter_color_groups(mapping,prefs)
    groups_and_colors=list(groups_and_colors)
    radiobuttons = []
    for i in range(len(groups_and_colors)):
        labelname=groups_and_colors[i][0] #'EnvoID'
        groups=groups_and_colors[i][1]    #defaultdict(<type 'list'>, {'mangrove biome/marine habitat/ocean water': ['BBA.number1.filt..660397', 'BBA.number2.filt..660380', ...}
        colors=groups_and_colors[i][2]    #{'mangrove biome/marine habitat/ocean water': 'red5', 'Small lake biome/marine habitat/saline lake sediment': 'cyan1', 
        data_colors=groups_and_colors[i][3]#{'orange1': <qiime.colors.Color object at 0x25f1210>, 'orange3': 
        data_color_order=groups_and_colors[i][4]#['red1', 'blue1', 'orange1', 'green1', 'purple1', 'yellow1', 'cyan1', 'pink1', 'teal1', ...]
        
        data_file_dir_path = get_random_directory_name(output_dir=data_dir_path)
        
        new_link=os.path.split(data_file_dir_path)
        data_file_link=os.path.join('.', os.path.split(new_link[-2])[-1], \
                                    new_link[-1])

        new_col_name=labelname
        img_data = {}
        plot_label=labelname
        
        if data.has_key('support_pcoas'):
            matrix_average, matrix_low, matrix_high, eigval_average, m_names = \
                summarize_pcoas(data['coord'], data['support_pcoas'], 
                method=data['ellipsoid_method'])
            data['coord'] = \
                (m_names,matrix_average,data['coord'][2],data['coord'][3])
            for i in range(len(m_names)):
                sample_location[m_names[i]]=i
        else: 
            matrix_average = None
            matrix_low =  None
            matrix_high =  None
            eigval_average =  None
            m_names =  None
        iterator=0

        for coord_tup in coord_tups: # change, if you want more thatn one PCoA plot! (i.e involving PC3)
            if isarray(matrix_low) and isarray(matrix_high) and \
                                                isarray(matrix_average):
                coord_1r=asarray(matrix_low)
                coord_2r=asarray(matrix_high)
                mat_ave=asarray(matrix_average)
            else:
                coord_1r=None
                coord_2r=None
                mat_ave=None
                sample_location=None
            
            coord_1, coord_2 = coord_tup
    
            img_data[coord_tup] = draw_pcoa_graph(plot_label,data_file_dir_path,
                                                 data_file_link,coord_1,coord_2,
                                                 coord_1r, coord_2r, mat_ave,\
                                                 sample_location,
                                                 data,prefs,groups,colors,
                                                 background_color,label_color,
                                                 data_colors,data_color_order,
                                                 generate_eps=True) 
        radiobuttons.append(RADIO % (data_file_link, labelname))
    
        

    
        if i == 0: ## only create first table!
            out_table += TABLE_HTML % (labelname, 
                                       "<br>".join(img_data[("1", "2")]),
                                       "<br>".join(img_data[("3", "2")]),
                                       "<br>".join(img_data[("1", "3")]))


    if generate_scree:
        data_file_dir_path = get_random_directory_name(output_dir = data_dir_path)
        new_link = os.path.split(data_file_dir_path)
        data_file_link = os.path.join('.', os.path.split(new_link[-2])[-1], new_link[-1])

        img_src, download_link = draw_scree_graph(data_file_dir_path, data_file_link, background_color,
                            label_color, generate_eps = True, data = data)

        out_table += SCREE_TABLE_HTML % ("<br>".join((img_src, download_link)))

    out_table = "\n".join(radiobuttons) + out_table
    outfile = create_html_filename(filename,'.html')
    outfile = os.path.join(html_dir_path,outfile)
        
    write_html_file(out_table,outfile)
コード例 #25
0
ファイル: make_2d_plots.py プロジェクト: rob-knight/qiime
def main():
    option_parser, opts, args = parse_command_line_parameters(**script_info)

    data = {}

    prefs,data,background_color,label_color,ball_scale, arrow_colors= \
                            sample_color_prefs_and_map_data_from_options(opts)

    data['ellipsoid_method'] = opts.ellipsoid_method

    if 0.00 <= opts.ellipsoid_opacity <= 1.00:
        data['alpha'] = opts.ellipsoid_opacity
    else:
        raise ValueError, 'The opacity must be a value between 0 and 1!'

    #Open and get coord data
    if os.path.isdir(opts.coord_fname) and opts.master_pcoa:
        data['coord'], data['support_pcoas'] = load_pcoa_files(
            opts.coord_fname)
        data['coord'] = get_coord(opts.master_pcoa)
    elif os.path.isdir(opts.coord_fname):
        data['coord'], data['support_pcoas'] = load_pcoa_files(
            opts.coord_fname)
    else:
        data['coord'] = get_coord(opts.coord_fname)

    filepath = opts.coord_fname
    basename, extension = os.path.splitext(filepath)
    filename = '%s_2D_PCoA_plots' % (basename)

    qiime_dir = get_qiime_project_dir()

    js_path = os.path.join(qiime_dir, 'qiime', 'support_files', 'js')

    if opts.output_dir:
        if os.path.exists(opts.output_dir):
            dir_path = opts.output_dir
        else:
            try:
                os.mkdir(opts.output_dir)
                dir_path = opts.output_dir
            except OSError:
                pass
    else:
        dir_path = './'

    html_dir_path = dir_path
    data_dir_path = get_random_directory_name(output_dir=dir_path)
    try:
        os.mkdir(data_dir_path)
    except OSError:
        pass

    js_dir_path = os.path.join(html_dir_path, 'js')
    try:
        os.mkdir(js_dir_path)
    except OSError:
        pass

    shutil.copyfile(os.path.join(js_path,'overlib.js'), \
                                    os.path.join(js_dir_path,'overlib.js'))

    try:
        action = generate_2d_plots
    except NameError:
        action = None
    #Place this outside try/except so we don't mask NameError in action
    if action:
        action(prefs, data, html_dir_path, data_dir_path, filename,
               background_color, label_color, opts.scree)
コード例 #26
0
def main():
    option_parser, opts, args = parse_command_line_parameters(**script_info)

    #Some code for error checking of input args:

    #Check if distance_matrix_file is valid:
    try:
        d_header, d_mat = parse_distmat(open(opts.distance_matrix_file, 'U'))
    except:
        option_parser.error(
            "This does not look like a valid distance matrix file.  Please supply a valid distance matrix file using the -d option."
        )

    if not is_symmetric_and_hollow(d_mat):
        option_parser.error("The distance matrix must be symmetric and "
                            "hollow.")

    #Check if map_fname is valid:
    try:
        mapping, m_header, m_comments = \
            parse_mapping_file(open(opts.map_fname,'U'))
    except QiimeParseError:
        option_parser.error(
            "This does not look like a valid metadata mapping file.  Please supply a valid mapping file using the -m option."
        )

    #make sure background_color is valid
    if opts.background_color not in ['black', 'white']:
        option_parser.error(
            "'%s' is not a valid background color.  Please pass in either 'black' or 'white' using the -k option."
            % (opts.background_color))

    #make sure prefs file is valid if it exists
    if opts.prefs_path is not None:
        try:
            prefs_file = open(opts.prefs_path, 'U').read()
        except IOError:
            option_parser.error(
                "Provided prefs file, '%s', does not exist.  Please pass in a valid prefs file with the -p option."
                % (opts.prefs_path))

    if opts.prefs_path is not None:
        prefs = parse_prefs_file(prefs_file)
    else:
        prefs = None


    color_prefs, color_data, background_color, label_color, ball_scale,\
     arrow_colors=sample_color_prefs_and_map_data_from_options(opts)

    #list of labelname, groups, colors, data_colors, data_color_order
    groups_and_colors=list(iter_color_groups(mapping=color_data['map'],\
        prefs=color_prefs))

    #dict mapping labelname to list of: [groups, colors, data_colors,
    # data_color_order]
    field_to_colors = {}
    for color_info in groups_and_colors:
        field_to_colors[color_info[0]] = color_info[1:]

    qiime_dir = get_qiime_project_dir() + '/qiime/support_files/'

    fields = opts.fields
    if fields is not None:
        fields = map(strip, fields.split(','))
        fields = [i.strip('"').strip("'") for i in fields]
    elif prefs is not None:
        fields = prefs.get('FIELDS', None)
    else:
        fields = get_interesting_mapping_fields(mapping, m_header)

    #Check that all provided fields are valid:
    if fields is not None:
        for f in fields:
            if f not in m_header:
                option_parser.error(
                    "The field, %s, is not in the provided mapping file.  Please supply correct fields (using the -f option or providing a 'FIELDS' list in the prefs file) corresponding to fields in mapping file."
                    % (f))

    within_distances, between_distances, dmat = \
        group_distances(mapping_file=opts.map_fname,\
        dmatrix_file=opts.distance_matrix_file,\
        fields=fields,\
        dir_prefix=get_random_directory_name(output_dir=opts.dir_path,\
            prefix='distances'))

    if not opts.suppress_html_output:
        #histograms output path
        histograms_path = path.join(opts.dir_path, 'histograms')
        try:
            mkdir(histograms_path)
        except OSError:  #raised if dir exists
            pass

        #draw all histograms
        distances_dict, label_to_histogram_filename = \
            draw_all_histograms(single_field=within_distances, \
                paired_field=between_distances, \
                dmat=dmat,\
                histogram_dir=histograms_path,\
                field_to_color_prefs=field_to_colors,\
                background_color=background_color)

        #Get relative path to histogram files.
        label_to_histogram_filename_relative = \
            _make_relative_paths(label_to_histogram_filename, opts.dir_path)

        dm_fname = path.split(opts.distance_matrix_file)[-1]
        basename = path.splitext(dm_fname)[0]
        outfile_name = basename + '_distance_histograms.html'
        make_main_html(distances_dict=distances_dict,\
            label_to_histogram_filename=label_to_histogram_filename_relative,\
            root_outdir=opts.dir_path, \
            outfile_name = outfile_name, \
            title='Distance Histograms')

        #Handle saving web resources locally.
        #javascript file
        javascript_path = path.join(opts.dir_path, 'js')
        try:
            mkdir(javascript_path)
        except OSError:  #raised if dir exists
            pass
        js_out = open(javascript_path + '/histograms.js', 'w')
        js_out.write(open(qiime_dir + 'js/histograms.js').read())
        js_out.close()

    monte_carlo_iters = opts.monte_carlo_iters
    if monte_carlo_iters > 0:
        #Do Monte Carlo for all fields
        monte_carlo_group_distances(mapping_file=opts.map_fname,\
            dmatrix_file=opts.distance_matrix_file,\
            prefs=prefs, \
            dir_prefix = opts.dir_path,\
            fields=fields,\
            default_iters=monte_carlo_iters)

        #Do Monte Carlo for within and between fields
        monte_carlo_group_distances_within_between(\
            single_field=within_distances,\
            paired_field=between_distances, dmat=dmat, \
            dir_prefix = opts.dir_path,\
            num_iters=monte_carlo_iters)
コード例 #27
0
def main():
    option_parser, opts, args = parse_command_line_parameters(**script_info)

    input_dir = opts.input_dir
    imagetype = opts.imagetype
    resolution = opts.resolution
    output_dir = opts.output_dir
    ymax = opts.ymax
    std_type = opts.std_type
    suppress_webpage = opts.suppress_html_output
    output_type = opts.output_type
    generate_per_sample_plots = opts.generate_per_sample_plots
    generate_average_tables = opts.generate_average_tables

    # Get the command-line options.
    prefs, data, background_color, label_color, ball_scale, arrow_colors = \
        sample_color_prefs_and_map_data_from_options(opts)

    rares = {}
    if isdir(input_dir):
        rarenames = listdir(input_dir)
        rarenames = [r for r in rarenames if not r.startswith('.')]
        for r in rarenames:
            try:
                rarefl = open(path.join(input_dir, r), 'U').readlines()
                rares[r] = parse_rarefaction(rarefl)
            except(IOError):
                option_parser.error('Problem with rarefaction file. %s' %
                                    exc_info()[1])
                exit(0)
    else:
        try:
            input_file = input_dir.split(',')
            for i in range(len(input_file)):
                input_path = split(input_file[i])[-1]
                rarefl = open(input_file[i], 'U').readlines()
                rares[input_path] = parse_rarefaction(rarefl)
        except(IOError):
            option_parser.error('Problem with rarefaction file. %s' %
                                exc_info()[1])
            exit(0)
    if imagetype not in ['png', 'svg', 'pdf']:
        option_parser.error('Supplied extension not supported.')
        exit(0)

    try:
        resolution = int(resolution)
    except(ValueError):
        option_parser.error('Inavlid resolution.')
        exit(0)

    # output directory check
    if isinstance(output_dir, str) and output_dir != '.':
        if exists(output_dir):
            output_dir = output_dir
        else:
            try:
                create_dir(output_dir, False)
                output_dir = output_dir
            except(ValueError):
                option_parser.error('Could not create output directory.')
                exit(0)
    else:
        output_dir = get_random_directory_name()

    # Generate the plots and html text
    html_output = make_averages(prefs, data, background_color, label_color,
                                rares, output_dir, resolution, imagetype, ymax,
                                suppress_webpage, std_type, output_type,
                                generate_per_sample_plots=generate_per_sample_plots,
                                generate_average_tables=generate_average_tables)

    if html_output:
        # Write the html file.
        outfile = open(path.join(output_dir, 'rarefaction_plots.html'), 'w')
        outfile.write(html_output)
        outfile.close()
コード例 #28
0
ファイル: make_3d_plots.py プロジェクト: qinjunjie/qiime
def main():
    option_parser, opts, args = parse_command_line_parameters(**script_info)

    prefs, data, background_color, label_color, ball_scale, arrow_colors = sample_color_prefs_and_map_data_from_options(
        opts
    )

    scaling_methods = opts.scaling_method.split(",")
    plot_scaled = False
    plot_unscaled = False
    for i in scaling_methods:
        if i.lower() == "scaled":
            plot_scaled = True
        elif i.lower() == "unscaled":
            plot_unscaled = True

    if not (plot_scaled or plot_unscaled):
        raise ValueError, "You must choose a valid scaling method (scaled or unscaled)"

    if opts.output_format == "invue":
        # validating the number of points for interpolation
        if opts.interpolation_points < 0:
            option_parser.error("The --interpolation_points should be " + "greater or equal to 0.")

        # make sure that coord file has internally consistent # of columns
        coord_files_valid = validate_coord_files(opts.coord_fname)
        if not coord_files_valid:
            option_parser.error("Every line of every coord file must " + "have the same number of columns.")

        # Open and get coord data
        data["coord"] = get_coord(opts.coord_fname, opts.ellipsoid_method)

        # remove any samples not present in mapping file
        remove_unmapped_samples(data["map"], data["coord"])

        # if no samples overlapped between mapping file and otu table, exit
        if len(data["coord"][0]) == 0:
            print "\nError: OTU table and mapping file had no samples in common\n"
            exit(1)

        if opts.output_dir:
            create_dir(opts.output_dir, False)
            dir_path = opts.output_dir
        else:
            dir_path = "./"

        filepath = opts.coord_fname
        if os.path.isdir(filepath):
            coord_files = [fname for fname in os.listdir(filepath) if not fname.startswith(".")]
            filename = os.path.split(coord_files[0])[-1]
        else:
            filename = os.path.split(filepath)[-1]

        generate_3d_plots_invue(
            prefs, data, dir_path, filename, opts.interpolation_points, opts.polyhedron_points, opts.polyhedron_offset
        )

        # finish script
        return

    # Potential conflicts
    if not opts.custom_axes is None and os.path.isdir(opts.coord_fname):
        # can't do averaged pcoa plots _and_ custom axes in the same plot
        option_parser.error(
            "Please supply either custom axes or multiple coordinate \
files, but not both."
        )
    # check that smoothness is an integer between 0 and 3
    try:
        ellipsoid_smoothness = int(opts.ellipsoid_smoothness)
    except:
        option_parser.error(
            "Please supply an integer ellipsoid smoothness \
value."
        )
    if ellipsoid_smoothness < 0 or ellipsoid_smoothness > 3:
        option_parser.error(
            "Please supply an ellipsoid smoothness value \
between 0 and 3."
        )
    # check that opacity is a float between 0 and 1
    try:
        ellipsoid_alpha = float(opts.ellipsoid_opacity)
    except:
        option_parser.error("Please supply a number for ellipsoid opacity.")
    if ellipsoid_alpha < 0 or ellipsoid_alpha > 1:
        option_parser.error(
            "Please supply an ellipsoid opacity value \
between 0 and 1."
        )
    # check that ellipsoid method is valid
    ellipsoid_methods = ["IQR", "sdev"]
    if not opts.ellipsoid_method in ellipsoid_methods:
        option_parser.error(
            "Please supply a valid ellipsoid method. \
Valid methods are: "
            + ", ".join(ellipsoid_methods)
            + "."
        )

    # gather ellipsoid drawing preferences
    ellipsoid_prefs = {}
    ellipsoid_prefs["smoothness"] = ellipsoid_smoothness
    ellipsoid_prefs["alpha"] = ellipsoid_alpha

    # make sure that coord file has internally consistent # of columns
    coord_files_valid = validate_coord_files(opts.coord_fname)
    if not coord_files_valid:
        option_parser.error("Every line of every coord file must " + "have the same number of columns.")

    # Open and get coord data
    data["coord"] = get_coord(opts.coord_fname, opts.ellipsoid_method)

    # remove any samples not present in mapping file
    remove_unmapped_samples(data["map"], data["coord"])

    # if no samples overlapped between mapping file and otu table, exit
    if len(data["coord"][0]) == 0:
        print "\nError: OTU table and mapping file had no samples in common\n"
        exit(1)

    # process custom axes, if present.
    custom_axes = None
    if opts.custom_axes:
        custom_axes = process_custom_axes(opts.custom_axes)

        get_custom_coords(custom_axes, data["map"], data["coord"])
        remove_nans(data["coord"])
        scale_custom_coords(custom_axes, data["coord"])

    # process vectors if requested
    if opts.add_vectors:
        add_vectors = {}
        add_vectors["vectors"] = opts.add_vectors.split(",")
        add_vectors["weight_by_vector"] = opts.weight_by_vector
        if len(add_vectors) > 3:
            raise ValueError, "You must add maximum 3 columns but %s" % opts.add_vectors

        # Validating Vectors values
        if opts.vectors_algorithm:
            axes_number = len(data["coord"][1][1])
            if opts.vectors_axes < 0 or opts.vectors_axes > axes_number:
                raise ValueError, "vectors_algorithm should be between 0 and the max number" + "of samples/pcoa-axes: %d" % len(
                    data["coord"][1][1]
                )
            if opts.vectors_axes == 0:
                opts.vectors_axes = axes_number
            add_vectors["vectors_axes"] = opts.vectors_axes
            valid_chars = "_.abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
            for c in opts.vectors_path:
                if c not in valid_chars:
                    raise ValueError, "vectors_path (%s) has invalid chars" % opts.vectors_path
            add_vectors["vectors_output"] = {}
            add_vectors["vectors_algorithm"] = opts.vectors_algorithm
            add_vectors["eigvals"] = data["coord"][3]
            add_vectors["window_size"] = None

            # checks specific for the modified first difference algorithm
            if add_vectors["vectors_algorithm"] == "wdiff":
                try:
                    add_vectors["window_size"] = int(opts.window_size)
                except TypeError:
                    raise TypeError, "Specify --window_size as an integer"

                # sanity check as the value can only be greater or equal to one
                if add_vectors["window_size"] < 1:
                    raise ValueError, "The value of window_size is invalid, " + "the value must be greater than zero, not %d" % add_vectors[
                        "window_size"
                    ]

        else:
            add_vectors["vectors_algorithm"] = None
        add_vectors["vectors_path"] = opts.vectors_path
    else:
        add_vectors = None

    if opts.taxa_fname != None:
        # get taxonomy counts
        # get list of sample_ids that haven't been removed
        sample_ids = data["coord"][0]
        # get taxa summaries for all sample_ids
        lineages, taxa_counts = get_taxa(opts.taxa_fname, sample_ids)
        data["taxa"] = {}
        data["taxa"]["lineages"] = lineages
        data["taxa"]["counts"] = taxa_counts

        # get average relative abundance of taxa
        data["taxa"]["prevalence"] = get_taxa_prevalence(data["taxa"]["counts"])
        remove_rare_taxa(data["taxa"], nkeep=opts.n_taxa_keep)
        # get coordinates of taxa (weighted mean of sample scores)
        data["taxa"]["coord"] = get_taxa_coords(data["taxa"]["counts"], data["coord"][1])
        data["taxa"]["coord"]

        # write taxa coords if requested
        if not opts.biplot_output_file is None:
            output = make_biplot_scores_output(data["taxa"])
            fout = open(opts.biplot_output_file, "w")
            fout.write("\n".join(output))
            fout.close()

    if opts.output_dir:
        create_dir(opts.output_dir, False)
        dir_path = opts.output_dir
    else:
        dir_path = "./"

    qiime_dir = get_qiime_project_dir()

    jar_path = os.path.join(qiime_dir, "qiime/support_files/jar/")

    data_dir_path = get_random_directory_name(output_dir=dir_path, return_absolute_path=False)

    try:
        os.mkdir(data_dir_path)
    except OSError:
        pass

    data_file_path = data_dir_path

    jar_dir_path = os.path.join(dir_path, "jar")

    try:
        os.mkdir(jar_dir_path)
    except OSError:
        pass

    shutil.copyfile(os.path.join(jar_path, "king.jar"), os.path.join(jar_dir_path, "king.jar"))

    filepath = opts.coord_fname
    if os.path.isdir(filepath):
        coord_files = [fname for fname in os.listdir(filepath) if not fname.startswith(".")]
        filename = os.path.split(coord_files[0])[-1]
    else:
        filename = os.path.split(filepath)[-1]

    try:
        action = generate_3d_plots
    except NameError:
        action = None

    # Place this outside try/except so we don't mask NameError in action
    if action:
        action(
            prefs,
            data,
            custom_axes,
            background_color,
            label_color,
            dir_path,
            data_file_path,
            filename,
            ellipsoid_prefs=ellipsoid_prefs,
            add_vectors=add_vectors,
            plot_scaled=plot_scaled,
            plot_unscaled=plot_unscaled,
        )
コード例 #29
0
ファイル: compare_3d_plots.py プロジェクト: Jorge-C/qiime
def main():
    print "\nWarning: compare_3d_plots.py is being deprecated in favor of make_emperor.py, and will no longer be available in QIIME 1.8.0-dev.\n"

    option_parser, opts, args = parse_command_line_parameters(**script_info)

    prefs, data, background_color, label_color, ball_scale, arrow_colors = sample_color_prefs_and_map_data_from_options(
        opts
    )

    if len(opts.coord_fnames) < 2 and opts.edges_file is None:
        option_parser.error("Please provide at least two " + "coordinate files or a custom edges file")

    # Open and get coord data (for multiple coords files)
    coord_files = opts.coord_fnames
    coord_files_valid = validate_coord_files(coord_files)
    if not coord_files_valid:
        option_parser.error("Every line of every coord file must " + "have the same number of columns.")
    num_coord_files = len(coord_files)
    data["edges"], data["coord"] = get_multiple_coords(coord_files, opts.edges_file, opts.serial)

    # if the edges file wasn't supplied, we appended _i to each file's samples
    # therefore we now add duplicated samples with _0, _1,... to mapping file
    if opts.edges_file is None:
        newmap = [data["map"][0]]
        for i in xrange(len(coord_files)):
            for sample in data["map"][1:]:
                newsample = ["%s_%d" % (sample[0], i)]
                newsample.extend(sample[1:])
                newmap.append(newsample)
        data["map"] = newmap

    # remove any samples not present in mapping file
    remove_unmapped_samples(data["map"], data["coord"], data["edges"])

    if len(data["coord"][1]) == 0:
        raise ValueError, "\n\nError: None of the sample IDs in the coordinates files were present in the mapping file.\n"

    # process custom axes, if present.
    custom_axes = None
    if opts.custom_axes:
        custom_axes = process_custom_axes(opts.custom_axes)
        get_custom_coords(custom_axes, data["map"], data["coord"])
        remove_nans(data["coord"])
        scale_custom_coords(custom_axes, data["coord"])

    # Generate random output file name and create directories
    if opts.output_dir:
        create_dir(opts.output_dir)
        dir_path = opts.output_dir
    else:
        dir_path = "./"

    qiime_dir = get_qiime_project_dir()

    jar_path = os.path.join(qiime_dir, "qiime/support_files/jar/")

    data_dir_path = get_random_directory_name(output_dir=dir_path, return_absolute_path=False)

    try:
        os.mkdir(data_dir_path)
    except OSError:
        pass

    jar_dir_path = os.path.join(dir_path, "jar")

    try:
        os.mkdir(jar_dir_path)
    except OSError:
        pass

    shutil.copyfile(os.path.join(jar_path, "king.jar"), os.path.join(jar_dir_path, "king.jar"))

    filepath = coord_files[0]
    filename = filepath.strip().split("/")[-1]

    try:
        action = generate_3d_plots
    except NameError:
        action = None

    # Place this outside try/except so we don't mask NameError in action
    if action:
        generate_3d_plots(
            prefs,
            data,
            custom_axes,
            background_color,
            label_color,
            dir_path,
            data_dir_path,
            filename,
            ball_scale=ball_scale,
            arrow_colors=arrow_colors,
            user_supplied_edges=not (opts.edges_file is None),
        )
コード例 #30
0
ファイル: make_2d_plots.py プロジェクト: cmhill/qiime
def main():
    option_parser, opts, args = parse_command_line_parameters(**script_info)

    matplotlib_version = re.split("[^\d]", matplotlib.__version__)
    matplotlib_version_info = tuple([int(i) for i in matplotlib_version if \
                            i.isdigit()])

    if matplotlib_version_info != (1,1,0):
        print "This code was only tested with Matplotlib-1.1.0"
        
    data = {}

    prefs,data,background_color,label_color,ball_scale, arrow_colors= \
                            sample_color_prefs_and_map_data_from_options(opts)

    
    data['ellipsoid_method']=opts.ellipsoid_method
   
    if 0.00 <= opts.ellipsoid_opacity <= 1.00:
        data['alpha']=opts.ellipsoid_opacity
    else:
        raise ValueError, 'The opacity must be a value between 0 and 1!'
    
    #Open and get coord data
    if os.path.isdir(opts.coord_fname) and opts.master_pcoa:
        data['coord'],data['support_pcoas'] = load_pcoa_files(opts.coord_fname)
        data['coord']=get_coord(opts.master_pcoa)
    elif os.path.isdir(opts.coord_fname):
        data['coord'],data['support_pcoas'] = load_pcoa_files(opts.coord_fname)
    else:
        data['coord'] = get_coord(opts.coord_fname)

    filepath=opts.coord_fname
    basename,extension=os.path.splitext(filepath)
    filename='%s_2D_PCoA_plots' % (basename)

    qiime_dir=get_qiime_project_dir()

    js_path=os.path.join(qiime_dir,'qiime','support_files','js')

    if opts.output_dir:
        if os.path.exists(opts.output_dir):
            dir_path=opts.output_dir
        else:
            try:
                os.mkdir(opts.output_dir)
                dir_path=opts.output_dir
            except OSError:
                pass
    else:
        dir_path='./'
        
    html_dir_path=dir_path
    data_dir_path = get_random_directory_name(output_dir=dir_path)
    try:
        os.mkdir(data_dir_path)
    except OSError:
        pass

    js_dir_path = os.path.join(html_dir_path,'js')
    try:
        os.mkdir(js_dir_path)
    except OSError:
        pass

    shutil.copyfile(os.path.join(js_path,'overlib.js'), \
                                    os.path.join(js_dir_path,'overlib.js'))

    try:
        action = generate_2d_plots
    except NameError:
        action = None
    #Place this outside try/except so we don't mask NameError in action
    if action:
        action(prefs,data,html_dir_path,data_dir_path,filename,background_color,
                label_color,opts.scree)
コード例 #31
0
    def setUp(self):
        # create the temporary input files that will be used

        self._files_to_remove = []

        self.sample_fasta_file1_data = sample_fasta_file1
        self.sample_fasta_file_bad_labels_data =\
         sample_fasta_file_bad_labels

        self.sample_mapping_file1_data = sample_mapping_file1
        self.sample_mapping_file_no_revprimer_header =\
         sample_mapping_file_no_revprimer_header
        self.sample_mapping_file_bad_revprimer =\
         sample_mapping_file_bad_revprimer
        self.expected_truncation_default_settings =\
         expected_truncation_default_settings
        self.expected_truncation_zero_mismatches =\
         expected_truncation_zero_mismatches
        self.expected_truncation_zero_mismatches_truncate_remove =\
         expected_truncation_zero_mismatches_truncate_remove

        self.fasta_fp = get_tmp_filename(\
         prefix = 'fasta_seqs_',
         suffix = '.fna')
        seq_file = open(self.fasta_fp, 'w')
        seq_file.write(self.sample_fasta_file1_data)
        seq_file.close()

        self.fasta_badlabels_fp = get_tmp_filename(\
         prefix = "fasta_seqs_badlabels_",
         suffix = ".fna")
        seq_file = open(self.fasta_badlabels_fp, "w")
        seq_file.write(self.sample_fasta_file_bad_labels_data)
        seq_file.close()

        self.mapping_fp = get_tmp_filename(\
         prefix = 'sample_mapping_',
         suffix = '.txt')
        mapping_file = open(self.mapping_fp, "w")
        mapping_file.write(self.sample_mapping_file1_data)
        mapping_file.close()

        self.mapping_bad_header_fp = get_tmp_filename(\
         prefix = 'sample_mapping_badheader_',
         suffix = ".txt")
        mapping_file = open(self.mapping_bad_header_fp, "w")
        mapping_file.write(self.sample_mapping_file_no_revprimer_header)
        mapping_file.close()

        self.mapping_bad_primer_fp = get_tmp_filename(\
         prefix = 'sample_mapping_badprimer_',
         suffix = ".txt")
        mapping_file = open(self.mapping_bad_primer_fp, "w")
        mapping_file.write(self.sample_mapping_file_bad_revprimer)
        mapping_file.close()

        self.output_dir = get_random_directory_name(prefix='/tmp/')
        self.output_dir += '/'

        create_dir(self.output_dir)

        self._files_to_remove =\
         [self.fasta_fp, self.mapping_fp, self.mapping_bad_header_fp,
         self.mapping_bad_primer_fp, self.fasta_badlabels_fp]
コード例 #32
0
ファイル: make_2d_plots.py プロジェクト: DDomogala3/qiime
def generate_2d_plots(prefs,data,html_dir_path,data_dir_path,filename,
                        background_color,label_color,generate_scree):
    """Generate interactive 2D scatterplots"""
    coord_tups = [("1", "2"), ("3", "2"), ("1", "3")]
    mapping=data['map']
    out_table=''
    #Iterate through prefs and generate html files for each colorby option
    #Sort by the column name first
    sample_location={}

    groups_and_colors=iter_color_groups(mapping,prefs)
    groups_and_colors=list(groups_and_colors)

    for i in range(len(groups_and_colors)):
        labelname=groups_and_colors[i][0]
        groups=groups_and_colors[i][1]
        colors=groups_and_colors[i][2]
        data_colors=groups_and_colors[i][3]
        data_color_order=groups_and_colors[i][4]
        
        data_file_dir_path = get_random_directory_name(output_dir=data_dir_path)
        
        new_link=os.path.split(data_file_dir_path)
        data_file_link=os.path.join('.', os.path.split(new_link[-2])[-1], \
                                    new_link[-1])

        new_col_name=labelname
        img_data = {}
        plot_label=labelname
        
        if data.has_key('support_pcoas'):
            matrix_average, matrix_low, matrix_high, eigval_average, m_names = \
                summarize_pcoas(data['coord'], data['support_pcoas'], 
                method=data['ellipsoid_method'])
            data['coord'] = \
                (m_names,matrix_average,data['coord'][2],data['coord'][3])
            for i in range(len(m_names)):
                sample_location[m_names[i]]=i
        else: 
            matrix_average = None
            matrix_low =  None
            matrix_high =  None
            eigval_average =  None
            m_names =  None
        iterator=0

        for coord_tup in coord_tups:
            if isarray(matrix_low) and isarray(matrix_high) and \
                                                isarray(matrix_average):
                coord_1r=asarray(matrix_low)
                coord_2r=asarray(matrix_high)
                mat_ave=asarray(matrix_average)
            else:
                coord_1r=None
                coord_2r=None
                mat_ave=None
                sample_location=None
            
            coord_1, coord_2 = coord_tup
            img_data[coord_tup] = draw_pcoa_graph(plot_label,data_file_dir_path,
                                                 data_file_link,coord_1,coord_2,
                                                 coord_1r, coord_2r, mat_ave,\
                                                 sample_location,
                                                 data,prefs,groups,colors,
                                                 background_color,label_color,
                                                 data_colors,data_color_order,
                                                 generate_eps=True)    

        out_table += TABLE_HTML % (labelname, 
                                    "<br>".join(img_data[("1", "2")]),
                                    "<br>".join(img_data[("3", "2")]),
                                    "<br>".join(img_data[("1", "3")]))


    if generate_scree:
        data_file_dir_path = get_random_directory_name(output_dir = data_dir_path)
        new_link = os.path.split(data_file_dir_path)
        data_file_link = os.path.join('.', os.path.split(new_link[-2])[-1], new_link[-1])

        img_src, download_link = draw_scree_graph(data_file_dir_path, data_file_link, background_color,
                            label_color, generate_eps = True, data = data)

        out_table += SCREE_TABLE_HTML % ("<br>".join((img_src, download_link)))
                                   
    outfile = create_html_filename(filename,'.html')
    outfile = os.path.join(html_dir_path,outfile)
        
    write_html_file(out_table,outfile)
コード例 #33
0
ファイル: make_2d_plots.py プロジェクト: tracykteal/metabeta
def generate_2d_plots(prefs,data,html_dir_path,data_dir_path,filename,
                        background_color,label_color,generate_scree):
    """Generate interactive 2D scatterplots"""
    coord_tups = [("1", "2"), ("3", "2"), ("1", "3")]
    mapping=data['map']
    out_table=''
    #Iterate through prefs and generate html files for each colorby option
    #Sort by the column name first
    sample_location={}

    groups_and_colors=iter_color_groups(mapping,prefs)
    groups_and_colors=list(groups_and_colors)
    radiobuttons = []
    for i in range(len(groups_and_colors)):
        labelname=groups_and_colors[i][0] #'EnvoID'
        groups=groups_and_colors[i][1]    #defaultdict(<type 'list'>, {'mangrove biome/marine habitat/ocean water': ['BBA.number1.filt..660397', 'BBA.number2.filt..660380', ...}
        colors=groups_and_colors[i][2]    #{'mangrove biome/marine habitat/ocean water': 'red5', 'Small lake biome/marine habitat/saline lake sediment': 'cyan1', 
        data_colors=groups_and_colors[i][3]#{'orange1': <qiime.colors.Color object at 0x25f1210>, 'orange3': 
        data_color_order=groups_and_colors[i][4]#['red1', 'blue1', 'orange1', 'green1', 'purple1', 'yellow1', 'cyan1', 'pink1', 'teal1', ...]
        
        data_file_dir_path = get_random_directory_name(output_dir=data_dir_path)
        
        new_link=os.path.split(data_file_dir_path)
        data_file_link=os.path.join('.', os.path.split(new_link[-2])[-1], \
                                    new_link[-1])

        new_col_name=labelname
        img_data = {}
        plot_label=labelname
        
        if data.has_key('support_pcoas'):
            matrix_average, matrix_low, matrix_high, eigval_average, m_names = \
                summarize_pcoas(data['coord'], data['support_pcoas'], 
                method=data['ellipsoid_method'])
            data['coord'] = \
                (m_names,matrix_average,data['coord'][2],data['coord'][3])
            for i in range(len(m_names)):
                sample_location[m_names[i]]=i
        else: 
            matrix_average = None
            matrix_low =  None
            matrix_high =  None
            eigval_average =  None
            m_names =  None
        iterator=0

        for coord_tup in coord_tups: # change, if you want more thatn one PCoA plot! (i.e involving PC3)
            if isarray(matrix_low) and isarray(matrix_high) and \
                                                isarray(matrix_average):
                coord_1r=asarray(matrix_low)
                coord_2r=asarray(matrix_high)
                mat_ave=asarray(matrix_average)
            else:
                coord_1r=None
                coord_2r=None
                mat_ave=None
                sample_location=None
            
            coord_1, coord_2 = coord_tup
    
            img_data[coord_tup] = draw_pcoa_graph(plot_label,data_file_dir_path,
                                                 data_file_link,coord_1,coord_2,
                                                 coord_1r, coord_2r, mat_ave,\
                                                 sample_location,
                                                 data,prefs,groups,colors,
                                                 background_color,label_color,
                                                 data_colors,data_color_order,
                                                 generate_eps=True) 
        radiobuttons.append(RADIO % (data_file_link, labelname))
    
        

    
        if i == 0: ## only create first table!
            out_table += TABLE_HTML % (labelname, 
                                       "<br>".join(img_data[("1", "2")]),
                                       "<br>".join(img_data[("3", "2")]),
                                       "<br>".join(img_data[("1", "3")]))


    if generate_scree:
        data_file_dir_path = get_random_directory_name(output_dir = data_dir_path)
        new_link = os.path.split(data_file_dir_path)
        data_file_link = os.path.join('.', os.path.split(new_link[-2])[-1], new_link[-1])

        img_src, download_link = draw_scree_graph(data_file_dir_path, data_file_link, background_color,
                            label_color, generate_eps = True, data = data)

        out_table += SCREE_TABLE_HTML % ("<br>".join((img_src, download_link)))

    out_table = "\n".join(radiobuttons) + out_table
    outfile = create_html_filename(filename,'.html')
    outfile = os.path.join(html_dir_path,outfile)
        
    write_html_file(out_table,outfile)
コード例 #34
0
def main():
    option_parser, opts, args = parse_command_line_parameters(**script_info)

    prefs, data, background_color, label_color, ball_scale, arrow_colors = \
                            sample_color_prefs_and_map_data_from_options(opts)
    
    if len(opts.coord_fnames) < 2 and opts.edges_file is None:
        option_parser.error('Please provide at least two ' +\
                     'coordinate files or a custom edges file')

    #Open and get coord data (for multiple coords files)
    coord_files = opts.coord_fnames
    coord_files_valid = validate_coord_files(coord_files)
    if not coord_files_valid:
        option_parser.error('Every line of every coord file must ' +\
                            'have the same number of columns.')
    num_coord_files = len(coord_files)
    data['edges'], data['coord'] = \
        get_multiple_coords(coord_files, opts.edges_file, opts.serial)
    
    # if the edges file wasn't supplied, we appended _i to each file's samples
    # therefore we now add duplicated samples with _0, _1,... to mapping file
    if opts.edges_file is None:
        newmap = [data['map'][0]]
        for i in xrange(len(coord_files)):
            for sample in data['map'][1:]:
                newsample = ['%s_%d' %(sample[0],i)]
                newsample.extend(sample[1:])
                newmap.append(newsample)
        data['map'] = newmap

    # remove any samples not present in mapping file
    remove_unmapped_samples(data['map'],data['coord'],data['edges'])

    if(len(data['coord'][1]) == 0):
        raise ValueError, '\n\nError: None of the sample IDs in the coordinates files were present in the mapping file.\n'
    
    # process custom axes, if present.
    custom_axes = None
    if opts.custom_axes:
        custom_axes = process_custom_axes(opts.custom_axes)
        get_custom_coords(custom_axes, data['map'], data['coord'])
        remove_nans(data['coord'])
        scale_custom_coords(custom_axes,data['coord'])

    

    # Generate random output file name and create directories
    if opts.output_dir:
        create_dir(opts.output_dir)
        dir_path = opts.output_dir
    else:
        dir_path='./'
    
    qiime_dir=get_qiime_project_dir()

    jar_path=os.path.join(qiime_dir,'qiime/support_files/jar/')

    data_dir_path = get_random_directory_name(output_dir=dir_path,
                                              return_absolute_path=False)    

    try:
        os.mkdir(data_dir_path)
    except OSError:
        pass

    jar_dir_path = os.path.join(dir_path,'jar')
    
    try:
        os.mkdir(jar_dir_path)
    except OSError:
        pass
    
    shutil.copyfile(os.path.join(jar_path,'king.jar'), os.path.join(jar_dir_path,'king.jar'))

    filepath=coord_files[0]
    filename=filepath.strip().split('/')[-1]
    
    try:
        action = generate_3d_plots
    except NameError:
        action = None

    #Place this outside try/except so we don't mask NameError in action
    if action:
        generate_3d_plots(prefs, data, custom_axes,
               background_color, label_color,
               dir_path, data_dir_path, filename,
               ball_scale=ball_scale, arrow_colors=arrow_colors,
               user_supplied_edges=not(opts.edges_file is None))
コード例 #35
0
ファイル: compare_3d_plots.py プロジェクト: clozupone/qiime
def main():
    option_parser, opts, args = parse_command_line_parameters(**script_info)

    prefs, data, background_color, label_color, ball_scale, arrow_colors = \
                            sample_color_prefs_and_map_data_from_options(opts)
    
    if len(opts.coord_fnames.split(',')) < 2 and opts.edges_file is None:
        option_parser.error('Please provide at least two ' +\
                     'coordinate files or a custom edges file')

    #Open and get coord data (for multiple coords files)
    coord_files = process_coord_filenames(opts.coord_fnames)
    coord_files_valid = validate_coord_files(coord_files)
    if not coord_files_valid:
        option_parser.error('Every line of every coord file must ' +\
                            'have the same number of columns.')
    num_coord_files = len(coord_files)
    data['edges'], data['coord'] = \
        get_multiple_coords(coord_files, opts.edges_file, opts.serial)
    
    # if the edges file wasn't supplied, we appended _i to each file's samples
    # therefore we now add duplicated samples with _0, _1,... to mapping file
    if opts.edges_file is None:
        newmap = [data['map'][0]]
        for i in xrange(len(coord_files)):
            for sample in data['map'][1:]:
                newsample = ['%s_%d' %(sample[0],i)]
                newsample.extend(sample[1:])
                newmap.append(newsample)
        data['map'] = newmap

    # remove any samples not present in mapping file
    remove_unmapped_samples(data['map'],data['coord'],data['edges'])

    if(len(data['coord'][1]) == 0):
        raise ValueError, '\n\nError: None of the sample IDs in the coordinates files were present in the mapping file.\n'
    
    # process custom axes, if present.
    custom_axes = None
    if opts.custom_axes:
        custom_axes = process_custom_axes(opts.custom_axes)
        get_custom_coords(custom_axes, data['map'], data['coord'])
        remove_nans(data['coord'])
        scale_custom_coords(custom_axes,data['coord'])

    

    # Generate random output file name and create directories
    if opts.output_dir:
        create_dir(opts.output_dir)
        dir_path = opts.output_dir
    else:
        dir_path='./'
    
    qiime_dir=get_qiime_project_dir()

    jar_path=os.path.join(qiime_dir,'qiime/support_files/jar/')

    data_dir_path = get_random_directory_name(output_dir=dir_path,
                                              return_absolute_path=False)    

    try:
        os.mkdir(data_dir_path)
    except OSError:
        pass

    jar_dir_path = os.path.join(dir_path,'jar')
    
    try:
        os.mkdir(jar_dir_path)
    except OSError:
        pass
    
    shutil.copyfile(os.path.join(jar_path,'king.jar'), os.path.join(jar_dir_path,'king.jar'))

    filepath=coord_files[0]
    filename=filepath.strip().split('/')[-1]
    
    try:
        action = generate_3d_plots
    except NameError:
        action = None

    #Place this outside try/except so we don't mask NameError in action
    if action:
        generate_3d_plots(prefs, data, custom_axes,
               background_color, label_color,
               dir_path, data_dir_path, filename,
               ball_scale=ball_scale, arrow_colors=arrow_colors,
               user_supplied_edges=not(opts.edges_file is None))
コード例 #36
0
def main():
    option_parser, options, args = parse_command_line_parameters(**script_info)

    ops = {}
    input_dir = options.input_dir

    rares = {}
    if isdir(input_dir):
        rarenames = listdir(input_dir)
        rarenames = [r for r in rarenames if not r.startswith(".")]
        for r in rarenames:
            try:
                rarefl = open(path.join(input_dir, r), "U").readlines()
                rares[r] = parse_rarefaction(rarefl)
            except (IOError):
                option_parser.error("Problem with rarefaction file. %s" % exc_info()[1])
                exit(0)
    else:
        try:
            input_file = input_dir.split(",")
            for i in range(len(input_file)):
                input_path = split(input_file[i])[-1]
                rarefl = open(input_file[i], "U").readlines()
                rares[input_path] = parse_rarefaction(rarefl)
        except (IOError):
            option_parser.error("Problem with rarefaction file. %s" % exc_info()[1])
            exit(0)
    if options.imagetype not in ["png", "svg", "pdf"]:
        option_parser.error("Supplied extension not supported.")
        exit(0)
    else:
        imagetype = options.imagetype

    try:
        resolution = int(options.resolution)
    except (ValueError):
        option_parser.error("Inavlid resolution.")
        exit(0)

    # Get the command-line options.
    prefs, data, background_color, label_color, ball_scale, arrow_colors = sample_color_prefs_and_map_data_from_options(
        options
    )

    # output directory check
    if isinstance(options.output_dir, str) and options.output_dir != ".":
        if exists(options.output_dir):
            output_dir = options.output_dir
        else:
            try:
                create_dir(options.output_dir, False)
                output_dir = options.output_dir
            except (ValueError):
                option_parser.error("Could not create output directory.")
                exit(0)
    else:
        output_dir = get_random_directory_name()

    # Generate the plots and html text
    ymax = options.ymax
    suppress_webpage = options.suppress_html_output
    html_output = make_averages(
        prefs, data, background_color, label_color, rares, output_dir, resolution, imagetype, ymax, suppress_webpage
    )

    if html_output:
        # Write the html file.
        outfile = open(path.join(output_dir, "rarefaction_plots.html"), "w")
        outfile.write(html_output)
        outfile.close()
コード例 #37
0
def main():
    option_parser, opts, args = parse_command_line_parameters(**script_info)

    prefs, data, background_color, label_color, ball_scale, arrow_colors= \
                            sample_color_prefs_and_map_data_from_options(opts)
    
    if opts.output_format == 'invue':
        # validating the number of points for interpolation
        if (opts.interpolation_points<0):
            option_parser.error('The --interpolation_points should be ' +\
                            'greater or equal to 0.')
                            
        # make sure that coord file has internally consistent # of columns
        coord_files_valid = validate_coord_files(opts.coord_fname)
        if not coord_files_valid:
            option_parser.error('Every line of every coord file must ' +\
                            'have the same number of columns.')
       
        coord_files_valid = validate_coord_files(opts.coord_fname)
        if not coord_files_valid:
            option_parser.error('Every line of every coord file must ' +\
                            'have the same number of columns.')
        #Open and get coord data
        data['coord'] = get_coord(opts.coord_fname, opts.ellipsoid_method)
    
        # remove any samples not present in mapping file
        remove_unmapped_samples(data['map'],data['coord'])

        # if no samples overlapped between mapping file and otu table, exit
        if len(data['coord'][0]) == 0:
            print "\nError: OTU table and mapping file had no samples in common\n"
            exit(1)
        

        if opts.output_dir:
            create_dir(opts.output_dir,False)
            dir_path=opts.output_dir
        else:
            dir_path='./'
        
        filepath=opts.coord_fname
        if os.path.isdir(filepath):
            coord_files = [fname for fname in os.listdir(filepath) if not \
                           fname.startswith('.')]
            filename = os.path.split(coord_files[0])[-1]
        else:
            filename = os.path.split(filepath)[-1]
	
        generate_3d_plots_invue(prefs, data, dir_path, filename, \
            opts.interpolation_points, opts.polyhedron_points, \
            opts.polyhedron_offset)
        
        #finish script
        return

    # Potential conflicts
    if not opts.custom_axes is None and os.path.isdir(opts.coord_fname):
        # can't do averaged pcoa plots _and_ custom axes in the same plot
        option_parser.error("Please supply either custom axes or multiple coordinate \
files, but not both.")
    # check that smoothness is an integer between 0 and 3
    try:
        ellipsoid_smoothness = int(opts.ellipsoid_smoothness)
    except:
        option_parser.error("Please supply an integer ellipsoid smoothness \
value.")
    if ellipsoid_smoothness < 0 or ellipsoid_smoothness > 3:
        option_parser.error("Please supply an ellipsoid smoothness value \
between 0 and 3.")
    # check that opacity is a float between 0 and 1
    try:
        ellipsoid_alpha = float(opts.ellipsoid_opacity)
    except:
        option_parser.error("Please supply a number for ellipsoid opacity.")
    if ellipsoid_alpha < 0 or ellipsoid_alpha > 1:
        option_parser.error("Please supply an ellipsoid opacity value \
between 0 and 1.")
    # check that ellipsoid method is valid
    ellipsoid_methods = ['IQR','sdev']
    if not opts.ellipsoid_method in ellipsoid_methods:
        option_parser.error("Please supply a valid ellipsoid method. \
Valid methods are: " + ', '.join(ellipsoid_methods) + ".")
  
    # gather ellipsoid drawing preferences
    ellipsoid_prefs = {}
    ellipsoid_prefs["smoothness"] = ellipsoid_smoothness
    ellipsoid_prefs["alpha"] = ellipsoid_alpha

    # make sure that coord file has internally consistent # of columns
    coord_files_valid = validate_coord_files(opts.coord_fname)
    if not coord_files_valid:
        option_parser.error('Every line of every coord file must ' +\
                            'have the same number of columns.')

    #Open and get coord data
    data['coord'] = get_coord(opts.coord_fname, opts.ellipsoid_method)
    
    # remove any samples not present in mapping file
    remove_unmapped_samples(data['map'],data['coord'])
    
    # if no samples overlapped between mapping file and otu table, exit
    if len(data['coord'][0]) == 0:
        print "\nError: OTU table and mapping file had no samples in common\n"
        exit(1)

    if opts.taxa_fname != None:
        # get taxonomy counts
        # get list of sample_ids that haven't been removed
        sample_ids = data['coord'][0]
        # get taxa summaries for all sample_ids
        lineages, taxa_counts = get_taxa(opts.taxa_fname, sample_ids)
        data['taxa'] = {}
        data['taxa']['lineages'] = lineages
        data['taxa']['counts'] = taxa_counts

        # get average relative abundance of taxa
        data['taxa']['prevalence'] = get_taxa_prevalence(data['taxa']['counts'])
        remove_rare_taxa(data['taxa'],nkeep=opts.n_taxa_keep)
        # get coordinates of taxa (weighted mean of sample scores)
        data['taxa']['coord'] = get_taxa_coords(data['taxa']['counts'],
            data['coord'][1])
        data['taxa']['coord']

        # write taxa coords if requested
        if not opts.biplot_output_file is None:
            output = make_biplot_scores_output(data['taxa'])            
            fout = open(opts.biplot_output_file,'w')
            fout.write('\n'.join(output))
            fout.close()

    # process custom axes, if present.
    custom_axes = None
    if opts.custom_axes:
        custom_axes = process_custom_axes(opts.custom_axes)
        get_custom_coords(custom_axes, data['map'], data['coord'])
        remove_nans(data['coord'])
        scale_custom_coords(custom_axes,data['coord'])

    if opts.output_dir:
        create_dir(opts.output_dir,False)
        dir_path=opts.output_dir
    else:
        dir_path='./'
    
    qiime_dir=get_qiime_project_dir()

    jar_path=os.path.join(qiime_dir,'qiime/support_files/jar/')

    data_dir_path = get_random_directory_name(output_dir=dir_path,
                                              return_absolute_path=False)    
    
    try:
        os.mkdir(data_dir_path)
    except OSError:
        pass

    data_file_path=data_dir_path

    jar_dir_path = os.path.join(dir_path,'jar')
    
    try:
        os.mkdir(jar_dir_path)
    except OSError:
        pass
    
    shutil.copyfile(os.path.join(jar_path,'king.jar'), os.path.join(jar_dir_path,'king.jar'))

    filepath=opts.coord_fname
    if os.path.isdir(filepath):
        coord_files = [fname for fname in os.listdir(filepath) if not \
                           fname.startswith('.')]
        filename = os.path.split(coord_files[0])[-1]
    else:
        filename = os.path.split(filepath)[-1]

    try:
        action = generate_3d_plots
    except NameError:
        action = None

    #Place this outside try/except so we don't mask NameError in action
    if action:
        action(prefs,data,custom_axes,background_color,label_color,dir_path, \
                data_file_path,filename,ellipsoid_prefs=ellipsoid_prefs)
コード例 #38
0
 def setUp(self):
     # create the temporary input files that will be used
     
     self._files_to_remove = []
     
     self.sample_fasta_file1_data = sample_fasta_file1
     self.sample_fasta_file_bad_labels_data =\
      sample_fasta_file_bad_labels
     
     self.sample_mapping_file1_data = sample_mapping_file1
     self.sample_mapping_file_no_revprimer_header =\
      sample_mapping_file_no_revprimer_header
     self.sample_mapping_file_bad_revprimer =\
      sample_mapping_file_bad_revprimer
     self.expected_truncation_default_settings =\
      expected_truncation_default_settings
     self.expected_truncation_zero_mismatches =\
      expected_truncation_zero_mismatches
     self.expected_truncation_zero_mismatches_truncate_remove =\
      expected_truncation_zero_mismatches_truncate_remove
      
     self.fasta_fp = get_tmp_filename(\
      prefix = 'fasta_seqs_',
      suffix = '.fna')
     seq_file = open(self.fasta_fp, 'w')
     seq_file.write(self.sample_fasta_file1_data)
     seq_file.close()
     
     self.fasta_badlabels_fp = get_tmp_filename(\
      prefix = "fasta_seqs_badlabels_",
      suffix = ".fna")
     seq_file = open(self.fasta_badlabels_fp, "w")
     seq_file.write(self.sample_fasta_file_bad_labels_data)
     seq_file.close()
     
     self.mapping_fp = get_tmp_filename(\
      prefix = 'sample_mapping_',
      suffix = '.txt')
     mapping_file = open(self.mapping_fp, "w")
     mapping_file.write(self.sample_mapping_file1_data)
     mapping_file.close()
     
     self.mapping_bad_header_fp = get_tmp_filename(\
      prefix = 'sample_mapping_badheader_',
      suffix = ".txt")
     mapping_file = open(self.mapping_bad_header_fp, "w")
     mapping_file.write(self.sample_mapping_file_no_revprimer_header)
     mapping_file.close()
     
     self.mapping_bad_primer_fp = get_tmp_filename(\
      prefix = 'sample_mapping_badprimer_',
      suffix = ".txt")
     mapping_file = open(self.mapping_bad_primer_fp, "w")
     mapping_file.write(self.sample_mapping_file_bad_revprimer)
     mapping_file.close()
     
     self.output_dir = get_random_directory_name(prefix = '/tmp/')
     self.output_dir += '/'
     
     create_dir(self.output_dir)
     
     self._files_to_remove =\
      [self.fasta_fp, self.mapping_fp, self.mapping_bad_header_fp,
      self.mapping_bad_primer_fp, self.fasta_badlabels_fp]