예제 #1
0
    def test_get_torque(self):
        obs = ConfigurationManager()

        conf_setter = partial(self.conf.set, 'torque')
        conf_setter('TORQUE_JOB_OWNER', '')
        obs._get_torque(self.conf)
        self.assertIsNone(obs.trq_owner)
예제 #2
0
    def test_get_torque(self):
        obs = ConfigurationManager()

        conf_setter = partial(self.conf.set, 'torque')
        conf_setter('TORQUE_JOB_OWNER', '')
        obs._get_torque(self.conf)
        self.assertIsNone(obs.trq_owner)
예제 #3
0
    def test_get_postgres(self):
        obs = ConfigurationManager()

        conf_setter = partial(self.conf.set, 'postgres')
        conf_setter('PASSWORD', '')
        conf_setter('ADMIN_PASSWORD', '')
        obs._get_postgres(self.conf)
        self.assertIsNone(obs.password)
        self.assertIsNone(obs.admin_password)
    def test_get_postgres(self):
        obs = ConfigurationManager()

        conf_setter = partial(self.conf.set, 'postgres')
        conf_setter('PASSWORD', '')
        conf_setter('ADMIN_PASSWORD', '')
        obs._get_postgres(self.conf)
        self.assertIsNone(obs.password)
        self.assertIsNone(obs.admin_password)
예제 #5
0
def _test_wrapper_remote(func):
    """Execute a function on a remote ipengine"""
    from IPython.parallel import Client
    from qiita_core.configuration_manager import ConfigurationManager
    config = ConfigurationManager()
    c = Client(profile=config.ipython_default)
    bv = c.load_balanced_view()
    return _ipy_wait(bv.apply_async(func))
예제 #6
0
 def test_get_portal(self):
     obs = ConfigurationManager()
     conf_setter = partial(self.conf.set, 'portal')
     # Default portal_dir
     conf_setter('PORTAL_DIR', '')
     obs._get_portal(self.conf)
     self.assertEqual(obs.portal_dir, "")
     # Portal dir does not start with /
     conf_setter('PORTAL_DIR', 'gold_portal')
     obs._get_portal(self.conf)
     self.assertEqual(obs.portal_dir, "/gold_portal")
     # Portal dir endswith /
     conf_setter('PORTAL_DIR', '/gold_portal/')
     obs._get_portal(self.conf)
     self.assertEqual(obs.portal_dir, "/gold_portal")
예제 #7
0
    def redis_test(**kwargs):
        """Put and get a key from redis"""
        from uuid import uuid4
        from redis import Redis
        from qiita_core.configuration_manager import ConfigurationManager
        config = ConfigurationManager()

        r_client = Redis(host=config.redis_host,
                         port=config.redis_port,
                         password=config.redis_password,
                         db=config.redis_db)
        key = str(uuid4())
        r_client.set(key, 42, ex=1)
        return int(r_client.get(key))
 def test_get_portal(self):
     obs = ConfigurationManager()
     conf_setter = partial(self.conf.set, 'portal')
     # Default portal_dir
     conf_setter('PORTAL_DIR', '')
     obs._get_portal(self.conf)
     self.assertEqual(obs.portal_dir, "")
     # Portal dir does not start with /
     conf_setter('PORTAL_DIR', 'gold_portal')
     obs._get_portal(self.conf)
     self.assertEqual(obs.portal_dir, "/gold_portal")
     # Portal dir endswith /
     conf_setter('PORTAL_DIR', '/gold_portal/')
     obs._get_portal(self.conf)
     self.assertEqual(obs.portal_dir, "/gold_portal")
예제 #9
0
def generate_plugin_releases():
    """Generate releases for plugins
    """
    ARCHIVE = qdb.archive.Archive
    qiita_config = ConfigurationManager()
    working_dir = qiita_config.working_dir

    commands = [
        c for s in qdb.software.Software.iter(active=True) for c in s.commands
        if c.post_processing_cmd is not None
    ]

    tnow = datetime.now()
    ts = tnow.strftime('%m%d%y-%H%M%S')
    tgz_dir = join(working_dir, 'releases', 'archive')
    create_nested_path(tgz_dir)
    tgz_dir_release = join(tgz_dir, ts)
    create_nested_path(tgz_dir_release)
    for cmd in commands:
        cmd_name = cmd.name
        mschemes = [
            v for _, v in ARCHIVE.merging_schemes().items() if cmd_name in v
        ]
        for ms in mschemes:
            ms_name = sub('[^0-9a-zA-Z]+', '', ms)
            ms_fp = join(tgz_dir_release, ms_name)
            create_nested_path(ms_fp)

            pfp = join(ms_fp, 'archive.json')
            archives = {
                k: loads(v)
                for k, v in ARCHIVE.retrieve_feature_values(
                    archive_merging_scheme=ms).items() if v != ''
            }
            with open(pfp, 'w') as f:
                dump(archives, f)

            # now let's run the post_processing_cmd
            ppc = cmd.post_processing_cmd

            # concatenate any other parameters into a string
            params = ' '.join(
                ["%s=%s" % (k, v) for k, v in ppc['script_params'].items()])
            # append archives file and output dir parameters
            params = ("%s --fp_archive=%s --output_dir=%s" %
                      (params, pfp, ms_fp))

            ppc_cmd = "%s %s %s" % (ppc['script_env'], ppc['script_path'],
                                    params)
            p_out, p_err, rv = qdb.processing_job._system_call(ppc_cmd)
            p_out = p_out.rstrip()
            if rv != 0:
                raise ValueError('Error %d: %s' % (rv, p_out))
            p_out = loads(p_out)

    # tgz-ing all files
    tgz_name = join(tgz_dir, 'archive-%s-building.tgz' % ts)
    tgz_name_final = join(tgz_dir, 'archive.tgz')
    with topen(tgz_name, "w|gz") as tgz:
        tgz.add(tgz_dir_release, arcname=basename(tgz_dir_release))
    # getting the release md5
    with open(tgz_name, "rb") as f:
        md5sum = md5()
        for c in iter(lambda: f.read(4096), b""):
            md5sum.update(c)
    rename(tgz_name, tgz_name_final)
    vals = [('filepath', tgz_name_final[len(working_dir):], r_client.set),
            ('md5sum', md5sum.hexdigest(), r_client.set),
            ('time', tnow.strftime('%m-%d-%y %H:%M:%S'), r_client.set)]
    for k, v, f in vals:
        redis_key = 'release-archive:%s' % k
        # important to "flush" variables to avoid errors
        r_client.delete(redis_key)
        f(redis_key, v)
예제 #10
0
def generate_biom_and_metadata_release(study_status='public'):
    """Generate a list of biom/meatadata filepaths and a tgz of those files

    Parameters
    ----------
    study_status : str, optional
        The study status to search for. Note that this should always be set
        to 'public' but having this exposed helps with testing. The other
        options are 'private' and 'sandbox'
    """
    studies = qdb.study.Study.get_by_status(study_status)
    qiita_config = ConfigurationManager()
    working_dir = qiita_config.working_dir
    portal = qiita_config.portal
    bdir = qdb.util.get_db_files_base_dir()
    time = datetime.now().strftime('%m-%d-%y %H:%M:%S')

    data = []
    for s in studies:
        # [0] latest is first, [1] only getting the filepath
        sample_fp = relpath(s.sample_template.get_filepaths()[0][1], bdir)

        for a in s.artifacts(artifact_type='BIOM'):
            if a.processing_parameters is None or a.visibility != study_status:
                continue

            merging_schemes, parent_softwares = a.merging_scheme
            software = a.processing_parameters.command.software
            software = '%s v%s' % (software.name, software.version)

            for x in a.filepaths:
                if x['fp_type'] != 'biom' or 'only-16s' in x['fp']:
                    continue
                fp = relpath(x['fp'], bdir)
                for pt in a.prep_templates:
                    categories = pt.categories()
                    platform = ''
                    target_gene = ''
                    if 'platform' in categories:
                        platform = ', '.join(
                            set(pt.get_category('platform').values()))
                    if 'target_gene' in categories:
                        target_gene = ', '.join(
                            set(pt.get_category('target_gene').values()))
                    for _, prep_fp in pt.get_filepaths():
                        if 'qiime' not in prep_fp:
                            break
                    prep_fp = relpath(prep_fp, bdir)
                    # format: (biom_fp, sample_fp, prep_fp, qiita_artifact_id,
                    #          platform, target gene, merging schemes,
                    #          artifact software/version,
                    #          parent sofware/version)
                    data.append(
                        (fp, sample_fp, prep_fp, a.id, platform, target_gene,
                         merging_schemes, software, parent_softwares))

    # writing text and tgz file
    ts = datetime.now().strftime('%m%d%y-%H%M%S')
    tgz_dir = join(working_dir, 'releases')
    create_nested_path(tgz_dir)
    tgz_name = join(tgz_dir, '%s-%s-building.tgz' % (portal, study_status))
    tgz_name_final = join(tgz_dir, '%s-%s.tgz' % (portal, study_status))
    txt_lines = [
        "biom fp\tsample fp\tprep fp\tqiita artifact id\tplatform\t"
        "target gene\tmerging scheme\tartifact software\tparent software"
    ]
    with topen(tgz_name, "w|gz") as tgz:
        for biom_fp, sample_fp, prep_fp, aid, pform, tg, ms, asv, psv in data:
            txt_lines.append(
                "%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s" %
                (biom_fp, sample_fp, prep_fp, aid, pform, tg, ms, asv, psv))
            tgz.add(join(bdir, biom_fp), arcname=biom_fp, recursive=False)
            tgz.add(join(bdir, sample_fp), arcname=sample_fp, recursive=False)
            tgz.add(join(bdir, prep_fp), arcname=prep_fp, recursive=False)
        info = TarInfo(name='%s-%s-%s.txt' % (portal, study_status, ts))
        txt_hd = BytesIO()
        txt_hd.write(bytes('\n'.join(txt_lines), 'ascii'))
        txt_hd.seek(0)
        info.size = len(txt_hd.read())
        txt_hd.seek(0)
        tgz.addfile(tarinfo=info, fileobj=txt_hd)

    with open(tgz_name, "rb") as f:
        md5sum = md5()
        for c in iter(lambda: f.read(4096), b""):
            md5sum.update(c)

    rename(tgz_name, tgz_name_final)

    vals = [('filepath', tgz_name_final[len(working_dir):], r_client.set),
            ('md5sum', md5sum.hexdigest(), r_client.set),
            ('time', time, r_client.set)]
    for k, v, f in vals:
        redis_key = '%s:release:%s:%s' % (portal, study_status, k)
        # important to "flush" variables to avoid errors
        r_client.delete(redis_key)
        f(redis_key, v)
예제 #11
0
    def test_init(self):
        obs = ConfigurationManager()
        # Main section
        self.assertEqual(obs.conf_fp, self.conf_fp)
        self.assertTrue(obs.test_environment)
        self.assertEqual(obs.base_data_dir, "/tmp/")
        self.assertEqual(obs.log_dir, "/tmp/")
        self.assertEqual(obs.base_url, "https://localhost")
        self.assertEqual(obs.max_upload_size, 100)
        self.assertTrue(obs.require_approval)
        self.assertEqual(obs.qiita_env, "source activate qiita")
        self.assertEqual(obs.private_launcher, 'qiita-private-launcher')
        self.assertEqual(obs.plugin_launcher, "qiita-plugin-launcher")
        self.assertEqual(obs.plugin_dir, "/tmp/")
        self.assertEqual(
            obs.valid_upload_extension,
            ["fastq", "fastq.gz", "txt", "tsv", "sff", "fna", "qual"])
        self.assertEqual(obs.certificate_file, "/tmp/server.cert")
        self.assertEqual(obs.cookie_secret, "SECRET")
        self.assertEqual(obs.key_file, "/tmp/server.key")

        # Torque section
        self.assertEqual(obs.trq_owner, "*****@*****.**")
        self.assertEqual(obs.trq_poll_val, 15)
        self.assertEqual(obs.trq_dependency_q_cnt, 2)

        # Postgres section
        self.assertEqual(obs.user, "postgres")
        self.assertEqual(obs.admin_user, "postgres")
        self.assertEqual(obs.password, "andanotherpwd")
        self.assertEqual(obs.admin_password, "thishastobesecure")
        self.assertEqual(obs.database, "qiita_test")
        self.assertEqual(obs.host, "localhost")
        self.assertEqual(obs.port, 5432)

        # Redis section
        self.assertEqual(obs.redis_host, "localhost")
        self.assertEqual(obs.redis_password, "anotherpassword")
        self.assertEqual(obs.redis_db, 13)
        self.assertEqual(obs.redis_port, 6379)

        # SMTP section
        self.assertEqual(obs.smtp_host, "localhost")
        self.assertEqual(obs.smtp_port, 25)
        self.assertEqual(obs.smtp_user, "qiita")
        self.assertEqual(obs.smtp_password, "supersecurepassword")
        self.assertFalse(obs.smtp_ssl)
        self.assertEqual(obs.smtp_email, "*****@*****.**")

        # EBI section
        self.assertEqual(obs.ebi_seq_xfer_user, "Webin-41528")
        self.assertEqual(obs.ebi_seq_xfer_pass, "passwordforebi")
        self.assertEqual(obs.ebi_seq_xfer_url, "webin.ebi.ac.uk")
        self.assertEqual(
            obs.ebi_dropbox_url,
            "https://www-test.ebi.ac.uk/ena/submit/drop-box/submit/")
        self.assertEqual(obs.ebi_center_name, "qiita-test")
        self.assertEqual(obs.ebi_organization_prefix, "example_organization")

        # VAMPS section
        self.assertEqual(obs.vamps_user, "user")
        self.assertEqual(obs.vamps_pass, "password")
        self.assertEqual(obs.vamps_url,
                         "https://vamps.mbl.edu/mobe_workshop/getfile.php")

        # Portal section
        self.assertEqual(obs.portal_fp, "/tmp/portal.cfg")
        self.assertEqual(obs.portal, "QIITA")
        self.assertEqual(obs.portal_dir, "/portal")

        # iframe section
        self.assertEqual(obs.iframe_qiimp, "https://localhost:8898/")
예제 #12
0
    def test_get_main(self):
        obs = ConfigurationManager()

        conf_setter = partial(self.conf.set, 'main')
        conf_setter('COOKIE_SECRET', '')
        conf_setter('JWT_SECRET', '')
        conf_setter('BASE_DATA_DIR', '')
        conf_setter('PLUGIN_DIR', '')
        conf_setter('CERTIFICATE_FILE', '')
        conf_setter('KEY_FILE', '')
        conf_setter('QIITA_ENV', '')

        # Warning raised if No files will be allowed to be uploaded
        # Warning raised if no cookie_secret
        with warnings.catch_warnings(record=True) as warns:
            obs._get_main(self.conf)

            obs_warns = [str(w.message) for w in warns]
            exp_warns = [
                'Random cookie secret generated.',
                'Random JWT secret generated.  Non Public Artifact '
                'Download Links will expire upon system restart.'
            ]
            self.assertCountEqual(obs_warns, exp_warns)

        self.assertNotEqual(obs.cookie_secret, "SECRET")
        # Test default base_data_dir
        self.assertTrue(
            obs.base_data_dir.endswith("/qiita_db/support_files/test_data"))
        # Test default plugin dir
        self.assertTrue(obs.plugin_dir.endswith("/.qiita_plugins"))
        # Default certificate_file
        self.assertTrue(
            obs.certificate_file.endswith(
                "/qiita_core/support_files/server.crt"))
        # Default key_file
        self.assertTrue(
            obs.key_file.endswith("/qiita_core/support_files/server.key"))

        # BASE_DATA_DIR does not exist
        conf_setter('BASE_DATA_DIR', '/surprised/if/this/dir/exists')
        with self.assertRaises(ValueError):
            obs._get_main(self.conf)

        # WORKING_DIR does not exist
        conf_setter('BASE_DATA_DIR', '/tmp')
        conf_setter('WORKING_DIR', '/surprised/if/this/dir/exists')
        with self.assertRaises(ValueError):
            obs._get_main(self.conf)

        # PLUGIN_DIR does not exist
        conf_setter('WORKING_DIR', '/tmp')
        conf_setter('PLUGIN_DIR', '/surprised/if/this/dir/exists')
        with self.assertRaises(ValueError):
            obs._get_main(self.conf)

        # No files can be uploaded
        conf_setter('PLUGIN_DIR', '/tmp')
        conf_setter('VALID_UPLOAD_EXTENSION', '')
        with self.assertRaises(ValueError):
            obs._get_main(self.conf)

        self.assertEqual(obs.qiita_env, "")
예제 #13
0
    def test_init_error(self):
        with open(self.conf_fp, 'w') as f:
            f.write("\n")

        with self.assertRaises(MissingConfigSection):
            ConfigurationManager()
예제 #14
0
#!/usr/bin/env python
# -----------------------------------------------------------------------------
# Copyright (c) 2014--, The Qiita Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
from __future__ import division

from redis import Redis

from qiita_core.configuration_manager import ConfigurationManager

qiita_config = ConfigurationManager()

r_client = Redis(host=qiita_config.redis_host,
                 port=qiita_config.redis_port,
                 password=qiita_config.redis_password,
                 db=qiita_config.redis_db)
    def test_get_main(self):
        obs = ConfigurationManager()

        conf_setter = partial(self.conf.set, 'main')
        conf_setter('COOKIE_SECRET', '')
        conf_setter('BASE_DATA_DIR', '')
        conf_setter('PLUGIN_DIR', '')
        conf_setter('CERTIFICATE_FILE', '')
        conf_setter('KEY_FILE', '')
        conf_setter('QIITA_ENV', '')

        # Warning raised if No files will be allowed to be uploaded
        # Warning raised if no cookie_secret
        with warnings.catch_warnings(record=True) as warns:
            obs._get_main(self.conf)

            obs_warns = [str(w.message) for w in warns]
            exp_warns = ['Random cookie secret generated.']
            self.assertItemsEqual(obs_warns, exp_warns)

        self.assertNotEqual(obs.cookie_secret, "SECRET")
        # Test default base_data_dir
        self.assertTrue(
            obs.base_data_dir.endswith("/qiita_db/support_files/test_data"))
        # Test default plugin dir
        self.assertTrue(obs.plugin_dir.endswith("/.qiita_plugins"))
        # Default certificate_file
        self.assertTrue(
            obs.certificate_file.endswith(
                "/qiita_core/support_files/server.crt"))
        # Default key_file
        self.assertTrue(
            obs.key_file.endswith("/qiita_core/support_files/server.key"))

        # BASE_DATA_DIR does not exist
        conf_setter('BASE_DATA_DIR', '/surprised/if/this/dir/exists')
        with self.assertRaises(ValueError):
            obs._get_main(self.conf)

        # WORKING_DIR does not exist
        conf_setter('BASE_DATA_DIR', '/tmp')
        conf_setter('WORKING_DIR', '/surprised/if/this/dir/exists')
        with self.assertRaises(ValueError):
            obs._get_main(self.conf)

        # PLUGIN_DIR does not exist
        conf_setter('WORKING_DIR', '/tmp')
        conf_setter('PLUGIN_DIR', '/surprised/if/this/dir/exists')
        with self.assertRaises(ValueError):
            obs._get_main(self.conf)

        # No files can be uploaded
        conf_setter('PLUGIN_DIR', '/tmp')
        conf_setter('VALID_UPLOAD_EXTENSION', '')
        with self.assertRaises(ValueError):
            obs._get_main(self.conf)

        self.assertEqual(obs.qiita_env, "")
예제 #16
0
def generate_biom_and_metadata_release(study_status='public'):
    """Generate a list of biom/meatadata filepaths and a tgz of those files

    Parameters
    ----------
    study_status : str, optional
        The study status to search for. Note that this should always be set
        to 'public' but having this exposed helps with testing. The other
        options are 'private' and 'sandbox'
    """
    studies = qdb.study.Study.get_by_status(study_status)
    qiita_config = ConfigurationManager()
    working_dir = qiita_config.working_dir
    portal = qiita_config.portal
    bdir = qdb.util.get_db_files_base_dir()
    time = datetime.now().strftime('%m-%d-%y %H:%M:%S')

    data = []
    for s in studies:
        # [0] latest is first, [1] only getting the filepath
        sample_fp = relpath(s.sample_template.get_filepaths()[0][1], bdir)

        for a in s.artifacts(artifact_type='BIOM'):
            if a.processing_parameters is None:
                continue

            cmd_name = a.processing_parameters.command.name

            # this loop is necessary as in theory an artifact can be
            # generated from multiple prep info files
            human_cmd = []
            for p in a.parents:
                pp = p.processing_parameters
                pp_cmd_name = pp.command.name
                if pp_cmd_name == 'Trimming':
                    human_cmd.append('%s @ %s' % (
                        cmd_name, str(pp.values['length'])))
                else:
                    human_cmd.append('%s, %s' % (cmd_name, pp_cmd_name))
            human_cmd = ', '.join(human_cmd)

            for _, fp, fp_type in a.filepaths:
                if fp_type != 'biom' or 'only-16s' in fp:
                    continue
                fp = relpath(fp, bdir)
                # format: (biom_fp, sample_fp, prep_fp, qiita_artifact_id,
                #          human readable name)
                for pt in a.prep_templates:
                    for _, prep_fp in pt.get_filepaths():
                        if 'qiime' not in prep_fp:
                            break
                    prep_fp = relpath(prep_fp, bdir)
                    data.append((fp, sample_fp, prep_fp, a.id, human_cmd))

    # writing text and tgz file
    ts = datetime.now().strftime('%m%d%y-%H%M%S')
    tgz_dir = join(working_dir, 'releases')
    if not exists(tgz_dir):
        makedirs(tgz_dir)
    tgz_name = join(tgz_dir, '%s-%s-building.tgz' % (portal, study_status))
    tgz_name_final = join(tgz_dir, '%s-%s.tgz' % (portal, study_status))
    txt_hd = StringIO()
    with topen(tgz_name, "w|gz") as tgz:
        # writing header for txt
        txt_hd.write(
            "biom_fp\tsample_fp\tprep_fp\tqiita_artifact_id\tcommand\n")
        for biom_fp, sample_fp, prep_fp, artifact_id, human_cmd in data:
            txt_hd.write("%s\t%s\t%s\t%s\t%s\n" % (
                biom_fp, sample_fp, prep_fp, artifact_id, human_cmd))
            tgz.add(join(bdir, biom_fp), arcname=biom_fp, recursive=False)
            tgz.add(join(bdir, sample_fp), arcname=sample_fp, recursive=False)
            tgz.add(join(bdir, prep_fp), arcname=prep_fp, recursive=False)

        txt_hd.seek(0)
        info = TarInfo(name='%s-%s-%s.txt' % (portal, study_status, ts))
        info.size = len(txt_hd.buf)
        tgz.addfile(tarinfo=info, fileobj=txt_hd)

    with open(tgz_name, "rb") as f:
        md5sum = md5()
        for c in iter(lambda: f.read(4096), b""):
            md5sum.update(c)

    rename(tgz_name, tgz_name_final)

    vals = [
        ('filepath', tgz_name_final[len(working_dir):], r_client.set),
        ('md5sum', md5sum.hexdigest(), r_client.set),
        ('time', time, r_client.set)]
    for k, v, f in vals:
        redis_key = '%s:release:%s:%s' % (portal, study_status, k)
        # important to "flush" variables to avoid errors
        r_client.delete(redis_key)
        f(redis_key, v)