Example #1
0
def dex_intro_openssl():
    '''Compile and run external example `downloaded-examples/intro-openssl/`
    written by Kenneth Ballard.

    https://www.ibm.com/developerworks/linux/library/l-openssl/index.html
    '''
    basedir = dirname(__file__)
    example_dir = flo('{basedir}/downloaded-examples/intro-openssl')

    print(cyan('## Compile `nossl.c`\n'))
    local(
        flo('cd {example_dir}  &&  '
            'gcc -Wall nossl.c -o nossl -lssl -lcrypto'))

    print(cyan('\n## Run `nossl`\n'))
    local(
        flo('cd {example_dir}  &&  '
            './nossl > nossl_output  &&  head nossl_output'))

    print(cyan('\n## Compile `withssl.c`\n'))
    local(
        flo('cd {example_dir}  &&  '
            'gcc -Wall withssl.c -o withssl -lssl -lcrypto'))

    # raises a segmentation fault
    print(cyan('\n## Run `withssl` -- raises segmentation fault\n'))
    with warn_only():
        local(
            flo('cd {example_dir}  &&  '
                './withssl > withssl_output  &&  head withssl_output'))
def get_variables_from(filename):
    file_buffer = BytesIO()
    with warn_only():
        get(filename, file_buffer)

    variables = dict()

    for line in file_buffer.getvalue().splitlines():
        line = line.decode('utf-8').strip()
        if not line or line.startswith('#'):
            continue

        key, value = line.split('=', 1)
        variables[key] = value

    if 'EMAIL_URL' in variables:
        email_url = urlparse(variables['EMAIL_URL'])

        email = {
            'email_scheme': email_url.scheme,
            'email_host': email_url.hostname,
            'email_port': email_url.port,
            'email_user': email_url.username,
            'email_password': email_url.password,
            'email_path': email_url.path
        }

        variables.update(email)
        admins = json.loads(variables['ADMINS'])
        variables['MONIT_ADMINS'] = [adm[1] for adm in admins]

    return variables
def test_search_job_imzml_example_es_export_fails(get_compute_img_metrics_mock, filter_sf_metrics_mock,
                                                  post_images_to_annot_service_mock,
                                                  MolDBServiceWrapperMock, MolDBServiceWrapperMock2,
                                                  sm_config, create_fill_sm_database, es_dsl_search,
                                                  clean_isotope_storage):
    init_mol_db_service_wrapper_mock(MolDBServiceWrapperMock)
    init_mol_db_service_wrapper_mock(MolDBServiceWrapperMock2)

    get_compute_img_metrics_mock.return_value = lambda *args: (0.9, 0.9, 0.9, [100.], [0], [10.])
    filter_sf_metrics_mock.side_effect = lambda x: x

    url_dict = {
        'iso_image_ids': ['iso_image_1', None, None, None]
    }
    post_images_to_annot_service_mock.return_value = {
        35: url_dict,
        44: url_dict
    }

    db = DB(sm_config['db'])

    def throw_exception_function(*args):
        raise Exception('Test')

    try:
        ds_id = '2000-01-01_00h00m'
        upload_dt = datetime.now()
        ds_config_str = open(ds_config_path).read()
        db.insert(Dataset.DS_INSERT, [{
            'id': ds_id,
            'name': test_ds_name,
            'input_path': input_dir_path,
            'upload_dt': upload_dt,
            'metadata': '{}',
            'config': ds_config_str,
            'status': DatasetStatus.QUEUED,
            'is_public': True,
            'mol_dbs': ['HMDB-v4'],
            'adducts': ['+H', '+Na', '+K'],
            'ion_img_storage': 'fs'
        }])

        with patch('sm.engine.search_job.ESExporter.index_ds') as index_ds_mock:
            index_ds_mock.side_effect = throw_exception_function

            img_store = ImageStoreServiceWrapper(sm_config['services']['img_service_url'])
            job = SearchJob(img_store=img_store)
            ds = Dataset.load(db, ds_id)
            job.run(ds)
    except ESExportFailedError as e:
        assert e
        # dataset table asserts
        row = db.select_one('SELECT status from dataset')
        assert row[0] == 'FAILED'
    else:
        raise AssertionError('ESExportFailedError should be raised')
    finally:
        db.close()
        with warn_only():
            local('rm -rf {}'.format(data_dir_path))
Example #4
0
def test(args='', py=None):
    '''Run unit tests.

    Keyword-Args:
        args: Optional arguments passed to pytest
        py: python version to run the tests against

    Example:

        fab test:args=-s,py=py27
    '''
    basedir = dirname(__file__)

    if py is None:
        # e.g. envlist: 'envlist = py26,py27,py33,py34,py35,py36'
        envlist = local(flo('cd {basedir}  &&  grep envlist tox.ini'),
                        capture=True)
        _, py = envlist.rsplit(',', 1)

    with warn_only():
        res = local(
            flo('cd {basedir}  &&  '
                "PYTHONPATH='.' .tox/{py}/bin/python -m pytest {args}"))
        print(res)
        if res.return_code == 127:
            print(
                cyan('missing tox virtualenv, '
                     'run fabric task `tox`:\n\n    '
                     'fab tox\n'))
            sys.exit(1)
Example #5
0
def stop():
    service_name = 'raftis'

    with warn_only():
        res = sudo('service {} status | grep running'.format(service_name))
        if res.return_code == 0:
            sudo('stop {}'.format(service_name))
Example #6
0
def create_database(config_dir):
    config_instance = Config(config_dir)

    database_url = urlparse(config_instance.DATABASE_URL)

    if database_url.scheme not in ['postgres', 'postgresql', 'pgsql', 'psql']:
        return

    host = database_url.hostname
    port = database_url.port

    user = database_url.username
    password = database_url.password

    database = database_url.path[1:]
    database = database.split('?', 2)[0]

    command = 'createdb'

    if host:
        command += ' -h %s' % host
    if port:
        command += ' -p %s' % port
    if user:
        command += ' -U%s' % user
    if password:
        command = ('export PGPASSWORD="******" && ' % password) + command

    command += ' %s' % database

    with warn_only():
        result = run(command)

    if 'already exists' in result:
        print('Database %s already exists' % database)
Example #7
0
def setup_db():
    """Set the main databases and users
    """
    require.postgres.server()
    require.deb.package("postgis")
    with warn_only():
        status_line = run(r'dpkg -s postgis | grep "Suggests\|Recommends"')
    for package in status_line.split():
        if "postgis" in package and "postgres" in package and not "doc" in package:
            require.deb.package(package.replace(',', ''))

    # tyr db creation
    create_postgresql_user(env.tyr_postgresql_user,
                           env.tyr_postgresql_password)
    create_postgresql_database(env.tyr_postgresql_database,
                               env.tyr_postgresql_user)
    postgis_initdb(env.tyr_postgresql_database)

    # cities db creation
    if env.use_cities:
        create_postgresql_user(env.cities_db_user, env.cities_db_password)
        create_postgresql_database(env.cities_db_name, env.cities_db_user)
        postgis_initdb(env.cities_db_name)

    # read_only user db creation
    create_postgresql_user(env.postgres_read_only_user,
                           env.postgres_read_only_password)
Example #8
0
def setup_db():
    require.postgres.server()
    require.deb.package("postgis")
    with warn_only():
        status_line = run(r'dpkg -s postgis | grep "Suggests\|Recommends"')
    for package in status_line.split():
        if "postgis" in package and "postgres" in package and not "doc" in package:
            require.deb.package(package.replace(',', ''))

    # tyr db creation
    require.postgres.user(env.tyr_postgresql_user, env.tyr_postgresql_password)
    require.postgres.database(env.tyr_postgresql_database,
                              owner=env.tyr_postgresql_user,
                              locale='en_US.UTF-8')
    postgis_initdb(env.tyr_postgresql_database)

    #cities db creation
    if env.use_cities:
        require.postgres.user(env.cities_db_user, env.cities_db_password)
        require.postgres.database(env.cities_db_name,
                                  owner=env.cities_db_user,
                                  locale='en_US.UTF-8')
        postgis_initdb(env.cities_db_name)

    # read_only user db creation
    require.postgres.user(env.postgres_read_only_user,
                          env.postgres_read_only_password)
Example #9
0
 def _run_chef_on_hosts(self, hosts):
     with hide(*self.hidden_outputs):
         execute(self.chef_manager.push_deployment_data, hosts=hosts)
     with warn_only():
         results = execute(self.chef_manager.run_chef_client, hosts=hosts)
     execute(self.chef_manager.pull_node_info, hosts=hosts)
     return results
Example #10
0
def lint():
    with warn_only(), hide('warnings'):
        res = local('pycodestyle .')
    print('lint ', end='')
    if res.failed:
        print(red('failed'))
    else:
        print(green('passed'))
def test_sm_daemons_annot_fails(get_compute_img_metrics_mock, filter_sf_metrics_mock,
                                post_images_to_annot_service_mock,
                                MolDBServiceWrapperMock,
                                sm_config, test_db, es_dsl_search,
                                clean_isotope_storage):
    init_mol_db_service_wrapper_mock(MolDBServiceWrapperMock)

    def throw_exception_function(*args):
        raise Exception('Test')
    get_compute_img_metrics_mock.return_value = throw_exception_function
    filter_sf_metrics_mock.side_effect = lambda x: x

    url_dict = {
        'iso_image_ids': ['iso_image_1', None, None, None]
    }
    post_images_to_annot_service_mock.return_value = {
        35: url_dict,
        44: url_dict
    }

    db = DB(sm_config['db'])
    es = ESExporter(db)
    annotate_daemon = None

    try:
        ds_id = '2000-01-01_00h00m'
        upload_dt = datetime.now()
        ds_config_str = open(ds_config_path).read()
        db.insert(Dataset.DS_INSERT, [{
            'id': ds_id,
            'name': test_ds_name,
            'input_path': input_dir_path,
            'upload_dt': upload_dt,
            'metadata': '{}',
            'config': ds_config_str,
            'status': DatasetStatus.QUEUED,
            'is_public': True,
            'mol_dbs': ['HMDB-v4'],
            'adducts': ['+H', '+Na', '+K'],
            'ion_img_storage': 'fs'
        }])

        ds = Dataset.load(db, ds_id)
        queue_pub.publish({'ds_id': ds.id, 'ds_name': ds.name, 'action': 'annotate'})

        run_daemons(db, es)

        # dataset and job tables asserts
        row = db.select_one('SELECT status from dataset')
        assert row[0] == 'FAILED'
        row = db.select_one('SELECT status from job')
        assert row[0] == 'FAILED'
    finally:
        db.close()
        if annotate_daemon:
            annotate_daemon.stop()
        with warn_only():
            local('rm -rf {}'.format(data_dir_path))
def test_search_job_imzml_example(get_compute_img_measures_mock, create_fill_sm_database, sm_config):
    get_compute_img_measures_mock.return_value = lambda *args: (0.9, 0.9, 0.9)

    SMConfig._config_dict = sm_config

    db = DB(sm_config['db'])
    try:
        job = SearchJob('imzml_example_ds')
        job.run(input_dir_path, clean=True)

        # dataset meta asserts
        rows = db.select("SELECT name, file_path, img_bounds from dataset")
        img_bounds = {u'y': {u'max': 3, u'min': 1}, u'x': {u'max': 3, u'min': 1}}
        file_path = join(data_dir_path, 'Example_Continuous.imzML')
        assert len(rows) == 1
        assert rows[0] == (test_ds_name, file_path, img_bounds)

        # theoretical patterns asserts
        rows = db.select('SELECT db_id, sf_id, adduct, centr_mzs, centr_ints, prof_mzs, prof_ints '
                         'FROM theor_peaks '
                         'ORDER BY adduct')

        assert len(rows) == 3
        assert rows[0][:3] == (0, 10007, '+H')
        assert rows[1][:3] == (0, 10007, '+K')
        assert rows[2][:3] == (0, 10007, '+Na')
        for r in rows:
            assert r[3] and r[4] and r[5] and r[6]

        # image metrics asserts
        rows = db.select(('SELECT db_id, sf_id, adduct, peaks_n, stats FROM iso_image_metrics '
                          'ORDER BY sf_id, adduct'))

        assert rows
        assert rows[0]
        assert tuple(rows[0][:2]) == (0, 10007)
        assert set(rows[0][4].keys()) == {'chaos', 'img_corr', 'pat_match'}

        # image asserts
        rows = db.select(('SELECT db_id, sf_id, adduct, peak, intensities, min_int, max_int '
                          'FROM iso_image '
                          'ORDER BY sf_id, adduct'))
        assert rows

        max_int = 0.0
        for r in rows:
            max_int = max(max_int, r[-1])
            assert tuple(r[:2]) == (0, 10007)
        assert max_int

    finally:
        db.close()
        with warn_only():
            local('rm -rf {}'.format(data_dir_path))
            if not sm_config['fs']['local']:
                local(hdfs_prefix() + '-rm -r {}'.format(data_dir_path))
Example #13
0
def prepare_virtualenv(config_dir):
    config_instance = Config(config_dir)

    sudo('pip3 install virtualenv')
    sudo('pip3 install virtualenvwrapper')
    # Additional steps to install virtualenvwrapper

    user_home = os.path.join('/', 'home', config_instance.user)

    user_profile_file = os.path.join(user_home, '.profile')
    if not exists(user_profile_file):
        run('touch %s' % user_profile_file)

    python_path = "VIRTUALENVWRAPPER_PYTHON='/usr/bin/python3'"
    virtualenv_source = 'source /usr/local/bin/virtualenvwrapper.sh'
    workon_home_export = 'export WORKON_HOME=\'{}\''.format(config_instance.workon_home)

    if not contains(user_profile_file, python_path):
        append(user_profile_file, '\n' + python_path)
    if not contains(user_profile_file, virtualenv_source):
        append(user_profile_file, '\n' + virtualenv_source)
    if not contains(user_profile_file, 'export WORKON_HOME='):
        append(user_profile_file, '\n' + workon_home_export)

    run('source %s' % user_profile_file)

    # Creating new virtualenv if it doesn't exist
    with warn_only():
        res = run('workon')

    if config_instance.project_name not in res:
        run('mkvirtualenv %s' % config_instance.project_name)
    else:
        print('Virtual environment exists!')

    postactivate = os.path.join('$WORKON_HOME', config_instance.project_name, 'bin', 'postactivate')
    settings_module_config = 'export DJANGO_SETTINGS_MODULE="%s"' % config_instance.settings_module

    with warn_only():
        res = run('grep \'%s\' "%s"' % (settings_module_config, postactivate))

    if res.failed:
        run('echo \'%s\' >> "%s"' % (settings_module_config, postactivate))
Example #14
0
def _local_needs_pythons(*args, **kwargs):
    with warn_only():
        res = local(*args, **kwargs)
        print(res)
        if res.return_code == 127:
            print(
                cyan('missing python version(s), '
                     'run fabric task `pythons`:\n\n    '
                     'fab pythons\n'))
            sys.exit(1)
Example #15
0
def check_heroku_authenticated():
    """
    Call before running any methods that capture output of the heroku command, such as get_heroku_variable.

    This ensures that the user is prompted to log in if they are not currently authenticated.
    """
    with warn_only():
        result = local('heroku auth:whoami', capture=True)

    if result.return_code != 0:
        local('heroku auth:login')
Example #16
0
def install_system_python_protobuf():
    """
    force uninstall python protobuf to allow using system protobuf
    """
    apt_get_update()
    sudo("apt-get --yes remove python-protobuf")
    sudo("apt-get --yes autoremove")
    with warn_only():
        sudo("pip uninstall --yes protobuf")
    sudo("! (pip freeze | grep -q protobuf)")
    sudo("apt-get --yes install python-protobuf")
Example #17
0
def create_db(username):
    with lcd(fabfile_dir):
        with warn_only():
            # heroku somtimes has memory issues doing migrate
            # it seems to work fine if we just migrate opal first
            # it will later fail because content types haven't
            # been migrated, but that's fine we'll do that later
            local("python manage.py migrate opal")

        for db_command in db_commands(username):
            local(db_command)
Example #18
0
def install_system_python_protobuf():
    """
    force uninstall python protobuf to allow using system protobuf
    """
    apt_get_update()
    sudo("apt-get --yes remove python-protobuf")
    sudo("apt-get --yes autoremove")
    with warn_only():
        sudo("pip uninstall --yes protobuf")
    sudo("! (pip freeze | grep -q protobuf)")
    sudo("apt-get --yes install python-protobuf")
Example #19
0
def restore_saved_db():
    local(f'sudo -u postgres dropdb --if-exists {DB_NAME}')
    local(f'sudo -u postgres dropuser --if-exists {DB_USER}')
    local(f'sudo -u postgres createuser --login -g clients {DB_USER}')
    local(f'sudo -u postgres createdb --owner {DB_USER} {DB_NAME}')
    with warn_only():
        for parent in Path(LOCAL_BACKUP).resolve().parents:
            local(f'chmod o+x {parent}')
    local(f'chmod o+r {LOCAL_BACKUP}')
    local(f'sudo -u postgres pg_restore -e -d {DB_NAME} '
          f'-j 5 "{Path(LOCAL_BACKUP).resolve()}"')
    invalidate_cachalot()
Example #20
0
def prepare(config_dir):
    config_instance = Config(config_dir)

    prepare_journal(config_dir)

    install_packages(config_dir, config_instance.packages)
    prepare_virtualenv(config_dir)
    install_rabbitmq(config_dir)

    # Removing default site for nginx
    with warn_only():
        sudo('rm /etc/nginx/sites-enabled/default')
Example #21
0
def test_search_job_imzml_example(get_compute_img_measures_mock, filter_sf_metrics_mock, create_fill_sm_database, sm_config):
    get_compute_img_measures_mock.return_value = lambda *args: (0.9, 0.9, 0.9)
    filter_sf_metrics_mock.side_effect = lambda x: x

    SMConfig._config_dict = sm_config

    db = DB(sm_config['db'])
    try:
        job = SearchJob(None, 'imzml_example_ds')
        job.run(input_dir_path, ds_config_path, clean=True)

        # dataset meta asserts
        rows = db.select("SELECT name, file_path, img_bounds from dataset")
        img_bounds = {u'y': {u'max': 3, u'min': 1}, u'x': {u'max': 3, u'min': 1}}
        file_path = join(dirname(__file__), 'data', 'imzml_example_ds')
        assert len(rows) == 1
        assert rows[0] == (test_ds_name, file_path, img_bounds)

        # theoretical patterns asserts
        rows = db.select('SELECT db_id, sf_id, adduct, centr_mzs, centr_ints, prof_mzs, prof_ints '
                         'FROM theor_peaks '
                         'ORDER BY adduct')

        assert len(rows) == 3 + len(DECOY_ADDUCTS)
        for r in rows:
            assert r[3] and r[4]

        # image metrics asserts
        rows = db.select(('SELECT db_id, sf_id, adduct, peaks_n, stats FROM iso_image_metrics '
                          'ORDER BY sf_id, adduct'))

        assert rows
        assert rows[0]
        assert tuple(rows[0][:2]) == (0, 10007)
        assert set(rows[0][4].keys()) == {'chaos', 'spatial', 'spectral'}

        # image asserts
        rows = db.select(('SELECT db_id, sf_id, adduct, peak, intensities, min_int, max_int '
                          'FROM iso_image '
                          'ORDER BY sf_id, adduct'))
        assert rows

        max_int = 0.0
        for r in rows:
            max_int = max(max_int, r[-1])
            assert tuple(r[:2]) == (0, 10007)
        assert max_int

    finally:
        db.close()
        with warn_only():
            local('rm -rf {}'.format(data_dir_path))
Example #22
0
def setup_db():
    require.postgres.server()
    require.deb.package("postgis")
    with warn_only():
        status_line = run(r'dpkg -s postgis | grep "Suggests\|Recommends"')
    for package in status_line.split():
        if "postgis" in package and "postgres" in package and not "doc" in package:
            require.deb.package(package.replace(',', ''))

    # db creation
    require.postgres.user(env.tyr_postgresql_user, env.tyr_postgresql_password)
    require.postgres.database(env.tyr_postgresql_database, owner=env.tyr_postgresql_user, locale='en_US.UTF-8')
    postgis_initdb(env.tyr_postgresql_database)
def test_search_job_imzml_example(get_compute_img_measures_mock, create_fill_sm_database, sm_config):
    get_compute_img_measures_mock.return_value = lambda *args: (0.9, 0.9, 0.9)

    SMConfig._config_dict = sm_config

    db = DB(sm_config['db'])
    try:
        job = SearchJob(None, 'imzml_example_ds')
        job.run(input_dir_path, ds_config_path, clean=True)

        # dataset meta asserts
        rows = db.select("SELECT name, file_path, img_bounds from dataset")
        img_bounds = {u'y': {u'max': 3, u'min': 1}, u'x': {u'max': 3, u'min': 1}}
        file_path = 'file://' + join(data_dir_path, 'ds.txt')
        assert len(rows) == 1
        assert rows[0] == (test_ds_name, file_path, img_bounds)

        # theoretical patterns asserts
        rows = db.select('SELECT db_id, sf_id, adduct, centr_mzs, centr_ints, prof_mzs, prof_ints '
                         'FROM theor_peaks '
                         'ORDER BY adduct')

        assert len(rows) == 3 + len(DECOY_ADDUCTS)
        for r in rows:
            assert r[3] and r[4]

        # image metrics asserts
        rows = db.select(('SELECT db_id, sf_id, adduct, peaks_n, stats FROM iso_image_metrics '
                          'ORDER BY sf_id, adduct'))

        assert rows
        assert rows[0]
        assert tuple(rows[0][:2]) == (0, 10007)
        assert set(rows[0][4].keys()) == {'chaos', 'spatial', 'spectral'}

        # image asserts
        rows = db.select(('SELECT db_id, sf_id, adduct, peak, intensities, min_int, max_int '
                          'FROM iso_image '
                          'ORDER BY sf_id, adduct'))
        assert rows

        max_int = 0.0
        for r in rows:
            max_int = max(max_int, r[-1])
            assert tuple(r[:2]) == (0, 10007)
        assert max_int

    finally:
        db.close()
        with warn_only():
            local('rm -rf {}'.format(data_dir_path))
Example #24
0
def reload():
    """
    Reload supervisor config
    Restart supervisor website
    Reload nginx config (without restart)
    Start nginx
    """
    _dynamic_env()
    env.sudo('supervisorctl reread', shell=False)
    env.sudo('supervisorctl add %(server_name)s' % env, shell=False)
    env.sudo('supervisorctl restart %(server_name)s' % env, shell=False)
    with warn_only():
        env.sudo('nginx', shell=False)
    env.sudo('nginx -s reload', shell=False)
Example #25
0
def remove():
    """
    Blow away the current project.
    """
    with warn_only():
        if exists(env.venv_path):
            sudo("rm -rf %s" % env.venv_path)
        for template in get_templates().values():
            remote_path = template["remote_path"]
            if exists(remote_path):
                sudo("rm %s" % remote_path)
        psql("DROP DATABASE %s;" % env.proj_name)
        psql("DROP DATABASE test_%s;" % env.proj_name)
        psql("DROP USER %s;" % env.proj_name)
Example #26
0
def _list_upgrades():
    with warn_only():
        res = run('apt-get --simulate --verbose-versions dist-upgrade \
                     | grep "  "')
        host = red(env.host_string, bold=True)
        print('[{}]'.format(host))
        if res:
            # clean up return values, prettier arrows!
            res = [x.lstrip().replace('=>', '→') for x in res.split("\n")]
            for package in res:
                package = white(package, bold=True)
                print('Update for {}'.format(package))
        else:
            print('No updates.')
Example #27
0
def restore_saved_db():
    local(f'sudo -u postgres dropdb --if-exists {DB_NAME}')
    local(f'sudo -u postgres dropuser --if-exists {DB_USER}')
    local(f'sudo -u postgres createuser --login -g clients {DB_USER}')
    local(f'sudo -u postgres createdb --owner {DB_USER} {DB_NAME}')
    with warn_only():
        for parent in Path(LOCAL_BACKUP).resolve().parents:
            local(f'chmod o+x {parent}')
    local(f'chmod o+r {LOCAL_BACKUP}')
    local(f'sudo -u postgres psql dezede -c "DROP SCHEMA public;"')
    local(f'sudo -u postgres pg_restore -e -d {DB_NAME} '
          f'-j 5 "{Path(LOCAL_BACKUP).resolve()}"')
    local(f'sudo -u postgres psql dezede '
          f'-c "ALTER SCHEMA public OWNER TO dezede;"')
    invalidate_cachalot()
Example #28
0
 def update_shell(self, *args, **kwargs):
     """
     fab update:*                            # 拷贝整个目录,同时会将 script 中的文件全部拷贝出来
     fab update:bi,gitbranch=develop         # 拷贝 bi 目录,并指定 develop 分支
     fab update:bi/unit                      # 拷贝 bi 目录下的 unit 目录,要确保 bi 目录在远程服务器中存在
     fab update:manage.py,main.py            # 拷贝 manage.py 和 main.py 这两个文件
     fab update:bi/unit/month_consume.py     # 拷贝 bi/unit 目录下的 month_consume.py 文件,同样要确保 bi/unit 目录存在
     """
     print self.update_shell.__doc__
     question = "确认更新服务器?({self.host}) ".format(self=self)
     if not confirm(question, default=False):
         return
     with warn_only():
         with cd('%s/analysis' % self.shell_dir):
             run('sudo chattr -i -R ../analysis')
             with lcd('%s/analysis' % repository):
                 # 从本地拷贝源码文件到服务器
                 # 指定分支后会先 pull 然后再拷贝
                 # 注意:此步会将本地代码仓库的修改清理掉,注意提交保存
                 gitbranch = kwargs.get('gitbranch')
                 if gitbranch:
                     local('git checkout -q master')
                     local('git pull')
                     local('git reset --hard origin/master')
                     local('git branch -D %s 2>/dev/null' % gitbranch)
                     local('git checkout -q -b %s origin/%s' %
                           (gitbranch, gitbranch))
                 # 清理本地 pyc 文件,防止拷贝到远程服务器上
                 local('find ./ -type f -name "*.pyc" | xargs sudo rm -f')
                 for file in args:
                     # 处理文件路径
                     info = file.rsplit('/', 1)
                     fdir = '.'
                     if len(info) == 2:
                         fdir = info[0]
                     # 拷贝文件到服务器
                     put(file, fdir, use_sudo=True)
                     # 如果是 script 目录下文件,还需要将其拷贝出来
                     if file == 'script' or fdir == 'script' or file == '*':
                         run('cp script/* ./')
             # 清理一下远程服务器上的 pyc 文件
             run('find ./ -type f -name "*.pyc" | xargs sudo rm -f')
             # 通过参数 chattr_i 来判定是否设置目录 immutable 属性
             chattr_i = kwargs.get('chattr_i', 'true')
             if chattr_i == 'true':
                 run('sudo chattr +i -R ../analysis')
Example #29
0
def clean(deltox=None):
    '''Remove temporary files and compiled binaries not under version control.

    Args:
        deltox: Also delete virtual environments used by tox
    '''
    basedir = dirname(__file__)

    print(cyan('delete files not under version control'))
    ignore_filter = r"grep -v downloaded-examples | grep -v '\.c$' |"
    local("bash -c '" +
          flo('cd {basedir}  &&  '
              'git check-ignore **/* | {ignore_filter} xargs rm -rvf') + "'")

    # temporary python files

    print(cyan('\ndelete temp files and dirs for packaging'))
    local(
        flo('rm -rf  '
            '{basedir}/pyopenssl_examples.egg-info/  '
            '{basedir}/dist  '
            '{basedir}/README  '
            '{basedir}/build/  '))

    print(cyan('\ndelete temp files and dirs for editing'))
    local(flo('rm -rf  ' '{basedir}/.cache  ' '{basedir}/.ropeproject  '))

    print(cyan('\ndelete bytecode compiled versions of the python src'))
    # cf. http://stackoverflow.com/a/30659970
    with warn_only():
        local(
            flo('find  {basedir}/pyopenssl_examples  '
                '{basedir}/pyopenssl_examples/tests  ') +
            '\( -name \*pyc -o -name \*.pyo -o -name __pycache__ '
            '-o -name \*.c -o -name \*.o -o -name \*.so \) '
            '-prune '
            '-exec rm -rf {} +')

    print(cyan('\ndelete automatically created and compiled c code'))
    local(flo('rm -f {basedir}/pyopenssl_examples/cffi/example_02_real.c'))
    local(flo('rm -f {basedir}/pyopenssl_examples/cffi/example_*.o'))
    local(flo('rm -f {basedir}/pyopenssl_examples/cffi/example_*.so'))

    if deltox:
        print(cyan('\ndelete tox virtual environments'))
        local(flo('cd {basedir}  &&  rm -rf .tox/'))
def put_variables_to(filename, variables):
    file_buffer = BytesIO()
    with warn_only():
        get(filename, file_buffer)

    values = file_buffer.getvalue()
    file_buffer = BytesIO()
    for line in values.splitlines():
        line = line.decode('utf-8')
        if not line.lstrip().startswith('#'):
            variable = line.lstrip().split('=')[0].strip()
            if variable in variables:
                line = line.replace('=', '=' + str(variables[variable]))

        file_buffer.write(line.encode('utf-8'))
        file_buffer.write(b'\n')

    put(file_buffer, filename)
Example #31
0
def uplogs():
    '''Download latest version of `all_logs_list.json`, `log_list.json`
    into dir `tests/data/test_ctlog`.
    '''
    basedir = dirname(__file__)
    test_data_dir = flo('{basedir}/tests/data/test_ctlog')

    tmp_dir = tempfile.mkdtemp(prefix='ctutlz_')

    file_items = [
        ('known-logs.html',
         'http://www.certificate-transparency.org/known-logs'),
        ('all_logs_list.json',
         'https://www.gstatic.com/ct/log_list/all_logs_list.json'),
        ('log_list.json', 'https://www.gstatic.com/ct/log_list/log_list.json'),
    ]
    for filename, url in file_items:
        print_msg(flo('\n## {filename}\n'))

        basename, ending = filename.split('.')
        latest = local(flo('cd {test_data_dir}  &&  '
                           'ls {basename}_*.{ending} | sort | tail -n1'),
                       capture=True)
        print(latest)

        tmp_file = flo('{tmp_dir}/{filename}')

        local(flo('wget {url} -O {tmp_file}'))

        with warn_only():
            res = local(flo('diff -u {test_data_dir}/{latest}  {tmp_file}'),
                        capture=True)
        print(res)
        files_differ = bool(res.return_code != 0)
        if files_differ:
            today = datetime.date.today().strftime('%F')
            local(
                flo('cp {tmp_file} {test_data_dir}/{basename}_{today}.{ending}'
                    ))
        else:
            print('no changes')

    print('')
    local(flo('rm -rf {tmp_dir}'))
Example #32
0
 def _run_chef_on_hosts(self, hosts):
     with hide(*self.hidden_outputs):
         execute(self.chef_manager.push_deployment_data, hosts=hosts)
     with warn_only():
         results = execute(self.chef_manager.run_chef_client, hosts=hosts)
     failed = False
     for machine, result in results.iteritems():
         if result.succeeded:
             print green('Success on host: ' + machine)
         if result.failed:
             failed = True
             fail_log_name = 'calyptos-failure-' + machine + '.log'
             print red('Chef Client failed on ' + machine + ' log available at ' +  fail_log_name)
             with open(fail_log_name, 'w') as file:
                 file.write(result.stdout)
     if failed:
         exit(1)
     execute(self.chef_manager.pull_node_info, hosts=hosts)
     return results
Example #33
0
def create_heroku_instance(name, username):
    """
    creates and populates a heroku instance
    TODO make sure that we're fully committed git wise before pushing
    """
    with lcd(fabfile_dir):
        local("heroku apps:create {}".format(name))
        git_url = "https://git.heroku.com/{}.git".format(name)
        local("git remote add {0} {1}".format(name, git_url))
        push_to_heroku(name)
        with warn_only():
            #     # heroku somtimes has memory issues doing migrate
            #     # it seems to work fine if we just migrate opal first
            #     # it will later fail because content types haven't
            #     # been migrated, but that's fine we'll do that later
            local("heroku run --app {0} python manage.py migrate opal".format(
                name))
        for db_command in db_commands(username):
            local("heroku run --app {0} {1}".format(name, db_command))
Example #34
0
 def _run_chef_on_hosts(self, hosts):
     with hide(*self.hidden_outputs):
         execute(self.chef_manager.push_deployment_data, hosts=hosts)
     with warn_only():
         results = execute(self.chef_manager.run_chef_client, hosts=hosts)
     failed = False
     for machine, result in results.iteritems():
         if result.succeeded:
             print green('Success on host: ' + machine)
         if result.failed:
             failed = True
             fail_log_name = 'calyptus-failure-' + machine + '.log'
             print red('Chef Client failed on ' + machine + ' log available at ' +  fail_log_name)
             with open(fail_log_name, 'w') as file:
                 file.write(result.stdout)
     if failed:
         exit(1)
     execute(self.chef_manager.pull_node_info, hosts=hosts)
     return results
Example #35
0
def upgrade(instance):
    init(instance)

    print(u'Updating {instance}'.format(instance=instance)) 

    local("git pull --rebase")
    local("git push")

    with virtualenv():
        run('git pull --rebase')
        install_requirements()

        if env.application == 'django':
            with warn_only():
                manage('syncdb --noinput')
                manage('migrate --noinput')
                manage('collectstatic --noinput')

        restart()
Example #36
0
def create_heroku_instance(name, username):
    """
    creates and populates a heroku instance
    TODO make sure that we're fully committed git wise before pushing
    """
    with lcd(fabfile_dir):
        local("heroku apps:create {}".format(name))
        git_url = "https://git.heroku.com/{}.git".format(name)
        local("git remote add {0} {1}".format(name, git_url))
        push_to_heroku(name)
        with warn_only():
        #     # heroku somtimes has memory issues doing migrate
        #     # it seems to work fine if we just migrate opal first
        #     # it will later fail because content types haven't
        #     # been migrated, but that's fine we'll do that later
            local("heroku run --app {0} python manage.py migrate opal".format(
                name
            ))
        for db_command in db_commands(username):
            local("heroku run --app {0} {1}".format(name, db_command))
Example #37
0
def remove_tyr_instance(instance, purge_logs=False):
    """Remove a tyr instance entirely
        * Remove ini file
        * Restart tyr worker
        * Remove tyr log
    """
    # ex.: /etc/tyr.d/fr-bou.ini
    run("rm --force %s/%s.ini" % (env.tyr_base_instances_dir, instance))
    execute(restart_tyr_worker)
    restart_tyr_beat()
    if purge_logs:
        # ex.: /var/log/tyr/northwest.log
        run("rm --force %s/%s.log" % (env.tyr_base_logdir, instance))

    # purge instance in jormungandr database
    execute(db.remove_instance_from_jormun_database, instance)

    # purge the instance database and user
    with warn_only():
        execute(db.remove_postgresql_database, db.instance2postgresql_name(instance))
        execute(db.remove_postgresql_user, db.instance2postgresql_name(instance))
Example #38
0
    def push_to_remote( self, since ):
        server = self.srv_ctx.server
        local_path = self.srv_ctx.get_abs_local_folder( self.cluster_name,
                                                        self.app_name )
        remote_path = '%s/%s' % ( self.srv_ctx.get_remote_host_ssh_str(),
                                  self.srv_ctx.get_abs_remote_folder( self.cluster_name,
                                                                      self.app_name ) )
        with settings( host_string=server.get_user_host_string(),
                       password=server.password ):
            with lcd( local_path ), warn_only():
                local( 'pwd' )
                result = local( 'LANGUAGE=C hg push %s' % remote_path )
                errors = result.stderr
                if errors:
                    print( red( errors ) )
                    return (result, errors)
                failed = False
                for line in result.splitlines():
                    if line.startswith( 'remote: abort' ) \
                    or line.startswith( 'abort:' ):
                        failed = True
                if failed:
                    term.printDebug( 'output: %s' % result )
                    raise Exception( 'Failed' )

#         (result, errors) = self.repo_list.local.exec_single( 'hg push',
#                                                              prefix='LANGUAGE=C ' )
#         if errors:
#             print( red( errors ) )
#             return (result, errors)
#         failed = False
#         for line in result.splitlines():
#             if line.startswith( 'remote: abort' ) \
#             or line.startswith( 'abort:' ):
#                 failed = True
#         if failed:
#             term.printDebug( 'output: %s' % result )
#             raise Exception( 'Failed' )
#         term.printDebug( 'pushed to remote' )
        return (result, errors)
Example #39
0
def run():
    '''Run all pyopenssl_examples.'''
    basedir = dirname(__file__)
    cmds = [
        'pyopenssl_examples/bio/bio_connect.py  editorconfig.org',
        'pyopenssl_examples/cffi/example_01_simple.py',
        'pyopenssl_examples/cffi/example_02_real_build.py',
        'pyopenssl_examples/cffi/example_02_real_run.py',
        'pyopenssl_examples/cffi/openssl/bio_01_connect_build.py',
        'pyopenssl_examples/cffi/openssl/bio_01_connect.py  editorconfig.org',
        # 'pyopenssl_examples/cffi/openssl/bio_01_connect.py  --help',
        'pyopenssl_examples/cffi/openssl/bio_02_tls_build.py',
        'pyopenssl_examples/cffi/openssl/bio_02_tls.py  editorconfig.org',
        'pyopenssl_examples/cffi/openssl/bio_03_scts_build.py',
        'pyopenssl_examples/cffi/openssl/bio_03_scts.py  ritter.vg',
    ]
    with warn_only():
        for cmd in cmds:
            print(cyan(flo('\n## Run `{cmd}`\n')))
            local(
                flo('cd {basedir}  &&  '
                    "PYTHONPATH='.'  .tox/py36/bin/python  {cmd}"))
Example #40
0
def remove_tyr_instance(instance, purge_logs=False):
    """Remove a tyr instance entirely
        * Remove ini file
        * Restart tyr worker
        * Remove tyr log
    """
    # ex.: /etc/tyr.d/fr-bou.ini
    run("rm --force %s/%s.ini" % (env.tyr_base_instances_dir, instance))
    execute(restart_tyr_worker)
    restart_tyr_beat()
    if purge_logs:
        # ex.: /var/log/tyr/northwest.log
        run("rm --force %s/%s.log" % (env.tyr_base_logdir, instance))

    # purge instance in jormungandr database
    execute(db.remove_instance_from_jormun_database, instance)

    # purge the instance database and user
    with warn_only():
        execute(db.remove_postgresql_database,
                db.instance2postgresql_name(instance))
        execute(db.remove_postgresql_user,
                db.instance2postgresql_name(instance))
Example #41
0
def build_project_env(for_deployment=False):
    '''
    Assumes code has already been retrieved from SVN and requirements installed in the virtualenv in env.
    '''
    # Install or update compass and compile sass files
    # Note that nemidjdev will always contain a copy of the latest css files in
    # webapps/nemi/nemi_project/static/styles if you don't have Ruby installed.
    with lcd('compass'):
        with shell_env(GEM_HOME=os.environ.get('PWD', '') + '/compass/Gem'):
            with warn_only():
                available = local('gem list -i compass', capture=True)
            if available == 'true':
                local('gem update -i Gem compass')
            else:
                local('gem install -i Gem compass -v 0.12.7')
        local('./compass.sh compile')

    # Collect static files
    if for_deployment:
        execute_django_command(
            'collectstatic --settings=nemi_project.settings',
            for_deployment,
            force_overwrite=True)
Example #42
0
def deploy_files(config_dir):
    config_instance = Config(config_dir)

    with cd(config_instance.deploy_dir):
        if not exists(config_instance.project_name):
            clone_project(config_dir)
            return

    with cd(config_instance.project_dir):
        run('git fetch origin')

        branch_exists = True
        with warn_only():
            result = run('git rev-parse --verify %s' % config_instance.branch)
            if result.failed:
                branch_exists = False

        if branch_exists:
            run('git checkout %s' % config_instance.branch)
        else:
            run('git checkout -b {0} origin/{0}'.format(
                config_instance.branch))
        run('git merge origin/%s' % config_instance.branch)
Example #43
0
def setup():
    """
    Create container structure if not exists
    Create virtual envirionemen if not existst
    Upload configuration files for services
    """
    _dynamic_env()
    with warn_only():
        env.run("mkdir -p %(container_path)s" % env)
        env.run("mkdir -p %(media)s" % env)
        env.run("mkdir -p %(container_path)s/media/ckeditor" % env)
        env.run("mkdir -p %(container_path)s/static" % env)
        env.run("mkdir -p %(container_path)s/logs" % env)
        env.run("mkdir -p %(container_path)s/etc" % env)
        env.run("mkdir -p %(container_path)s/tmp" % env)
        env.run("mkdir -p %(source_path)s" % env)

        clearsessions = "%(python)s %(source_path)s/manage.py clearsessions" \
            % env
        crontab_update("0 0 */1 * * %s > /dev/null 2>&1" % clearsessions,
                       "clearsessions_%(project_fullname)s" % env)

    if not exists(env.env_path):
        env.run("virtualenv %(env_path)s --python=%(bin_python)s" % env)

    etc_out_folder = "%s/etc" % env.container_path
    env.run("mkdir -p %s" % etc_out_folder)
    etc_in_folder = os.path.join(os.path.dirname(__file__), 'etc')

    for filename in os.listdir(etc_in_folder):
        conf_out = os.path.join(etc_out_folder, filename)
        upload_template(filename,
                        conf_out,
                        template_dir=etc_in_folder,
                        context={'env': env},
                        use_jinja=True)
Example #44
0
def setup_db():
    """Set the main databases and users
    """
    require.postgres.server()
    require.deb.package("postgis")
    with warn_only():
        status_line = run(r'dpkg -s postgis | grep "Suggests\|Recommends"')
    for package in status_line.split():
        if "postgis" in package and "postgres" in package and not "doc" in package:
            require.deb.package(package.replace(",", ""))

    # tyr db creation
    create_postgresql_user(env.tyr_postgresql_user, env.tyr_postgresql_password)
    create_postgresql_database(env.tyr_postgresql_database, env.tyr_postgresql_user)
    postgis_initdb(env.tyr_postgresql_database)

    # cities db creation
    if env.use_cities:
        create_postgresql_user(env.cities_db_user, env.cities_db_password)
        create_postgresql_database(env.cities_db_name, env.cities_db_user)
        postgis_initdb(env.cities_db_name)

    # read_only user db creation
    create_postgresql_user(env.postgres_read_only_user, env.postgres_read_only_password)
Example #45
0
 def test_warn_only_does_not_imply_hide_everything(self):
     with warn_only():
         run("ls /simple")
         assert sys.stdout.getvalue().strip() != ""
Example #46
0
 def test_warn_only_is_same_as_settings_warn_only(self):
     with warn_only():
         eq_(run("hrm").failed, True)
 def test_warn_only_does_not_imply_hide_everything(self):
     with warn_only():
         run("ls /simple")
         assert sys.stdout.getvalue().strip() != ""
 def test_warn_only_is_same_as_settings_warn_only(self):
     with warn_only():
         eq_(run("hrm").failed, True)
Example #49
0
def create():
    """
    Create a new virtual environment for a project.
    Pulls the project's repo from version control, adds system-level
    configs for the project, and initialises the database with the
    live host.
    """

    #Create virtualenv
    sudo("mkdir -p %s" % env.venv_home, True)
    sudo("chown %s %s" % (env.user, env.venv_home), True)
    sudo("chown -R %s %s" % (env.user, env.python_dir), True)

    upload_template_and_reload('pgbouncer')
    upload_template_and_reload('pgbouncer_settings')
    upload_template_and_reload('pgbouncer_users')

    sudo("service pgbouncer restart")

    with cd(env.venv_home):
        if exists(env.proj_name):
            prompt = raw_input("\nVirtualenv exists: %s\nWould you like to replace it? (yes/no) " % env.proj_name)
            if prompt.lower() != "yes":
                print("\nAborting!")
                return False
            remove()
        run("virtualenv %s -p %s/bin/python3.3" % (env.proj_name, env.python_dir))
        vcs = "git" if env.git else "hg"
        run("%s clone %s %s" % (vcs, env.repo_url, env.proj_path))

    # Create DB and DB user.

    pw = db_pass()
    user_sql_args = (env.proj_name, pw.replace("'", "\'"))
    with warn_only():
        user_sql = "CREATE USER %s WITH ENCRYPTED PASSWORD '%s';" % user_sql_args
        psql(user_sql, show=False)
        psql("ALTER USER %s CREATEDB;" % env.proj_name)
        psql("ALTER USER %s SUPERUSER;" % env.proj_name)
    shadowed = "*" * len(pw)
    print_command(user_sql.replace("'%s'" % pw, "'%s'" % shadowed))

    #postgres("createuser --createdb %s;" % env.proj_name)
    #postgres("createdb %s;" % env.proj_name)
    with warn_only():
        psql("CREATE DATABASE %s WITH OWNER %s ENCODING = 'UTF8' "
             "LC_CTYPE = '%s' LC_COLLATE = '%s' TEMPLATE template0;" %
             (env.proj_name, env.proj_name, env.locale, env.locale))
        psql("CREATE EXTENSION postgis;", True, True)
        psql("CREATE EXTENSION postgis_topology;", True, True)

    uploadSSLCerts()

    # Set up project.
    upload_template_and_reload("settings")
    with project():
        if env.reqs_path:
            pip("setuptools")
            pip("-r %s/%s --allow-all-external" % (env.proj_path, env.reqs_path))
        pip("gunicorn setproctitle south psycopg2 python3-memcached gevent tornado")
        manage("syncdb --noinput")
        # prompts = []
        # prompts += expect('Password:'******'waverly4025')
        # prompts += expect('Password (again): ','waverly4025')

        #with expecting(prompts):
        with warn_only():
            manage("createsuperuser --user buddyup --email [email protected]")


        manage("migrate --noinput")
        #python("from django.conf import settings;"
        #      "from django.contrib.sites.models import Site;"
        #     "site, _ = Site.objects.get_or_create(id=settings.SITE_ID);"
        #    "site.domain = '" + env.live_host + "';"
        #   "site.save();")
        #shadowed = "*" * len(pw)
        #print_command(user_py.replace("'%s'" % pw, "'%s'" % shadowed))

    sudo("mkdir -p %s/logs" % env.venv_path)
    sudo("touch %s/logs/gunicorn_supervisor.log" % env.venv_path)
    sudo("mkdir %s/logs/celery" % env.venv_path)
    sudo("touch %s/logs/celery/worker.log" % env.venv_path)

    return True
Example #50
0
 def verify_repository(self):
     with hide('running'), warn_only(), hide('everything'):
         result = api.run('git status')
     return result.return_code == 0
Example #51
0
def initialise(instance):
    init(instance)
    if not confirm(red('Initialising a site will CHANGE THE DATABASE PASSWORD/SECRET KEY. Are you SURE you wish to continue?'), default=False):
        exit()

    env.secrets = {
        'db': _random(),
        'key': _random(64),
    }
    run(u'mkdir -p {env.virtualenv}/bin'.format(env=env))
    run(u'mkdir -p {}'.format(env.directory))
    generate_envvars()
    create_var_file()
    
    if not exists(u'{env.virtualenv}/bin/python'.format(env=env)):
        run(u'virtualenv {env.virtualenv}'.format(env=env))
        with virtualenv():
            run('pip install -U pip distribute')
    with cd(env.directory):
        with settings(warn_only=True):
            if not exists(u'{env.directory}/.git'.format(env=env)):
                run(u'git clone [email protected]:linkscreative/{env.project}/{env.repo}.git .clone'.format(
                    env=env
                ))
                run(u'rsync -avz .clone/ {env.directory}/'.format(env=env))
                run(u'rm -rf .clone')
            else:
                run('git pull --rebase')

    install_requirements()
    setup_database()
    

    if not hasattr(env, 'domains'):
        raise Exception('Need some domains!')

    nginx_config = {
        'type': 'nginx',
        'application': env.application,
        'parent': env.nginx_parent,
        'default': env.nginx_default,
        'ip': getattr(env, 'listen_ip', None),
    }
    if env.application == 'django':
        os.environ['SKIP_BROKER'] = 'y'
        from django.conf import settings as djsettings
        nginx_config['media_url'] = djsettings.MEDIA_URL
        nginx_config['static_url'] = djsettings.STATIC_URL
        nginx_config['uwsgi_socket'] = env.uwsgi_socket

    env.site = {
        'instances': [
            {
                'name': env.instance,
                'domains': env.domains,
            },
        ],
        'configs': [
            nginx_config,
        ],
    }
    
    conf_nginx()
    if env.application == 'django':
        with warn_only():
            manage('syncdb --noinput')
            manage('collectstatic --noinput')

        env.site['configs'].append({
            'type': 'uwsgi',
            'application': 'django',
            'parent': env.uwsgi_parent,
            'app': env.app,
            'env': env.envvars,
            'virtualenv': env.virtualenv,
            'instance': env.instance,
            'celery': env.celery,
            'memcached': env.memcached,
            'uwsgi_socket': env.uwsgi_socket,
            'fastrouter': True,
            'secure': env.uwsgi_secure,
        })
        conf_uwsgi()
        

    for k, v in env.envvars.items():
        print(green('{0}: "{1}"'.format(k, v.replace('"', '\"'))))
 def fin():
     with warn_only():
         local('rm -rf {}'.format(input_local_path))
         local('rm -rf {}'.format(ds_path))
Example #53
0
def _backup_file(file_path):
    with warn_only():
        env.run('cp %s %s.bak 2> /dev/null || :' % (file_path, file_path))
Example #54
0
 def verify_repository(self):
     with hide("running"), warn_only(), hide("everything"):
         result = api.run("git status")
     return result.return_code == 0
def clean_isotope_storage(sm_config):
    with warn_only():
        local('rm -rf {}'.format(sm_config['isotope_storage']['path']))
Example #56
0
 def get_workers():
     with warn_only():
         return run('ps -eo pid,command | grep [t]yr_worker')
 def clear_data_dirs(self):
     with warn_only():
         local('rm -rf {}'.format(self.data_dir_path))
def test_search_job_imzml_example(get_compute_img_metrics_mock, filter_sf_metrics_mock,
                                  post_images_to_annot_service_mock, MolDBServiceWrapperMock, MolDBServiceWrapperMock2,
                                  sm_config, create_fill_sm_database, es_dsl_search, clean_isotope_storage):
    init_mol_db_service_wrapper_mock(MolDBServiceWrapperMock)
    init_mol_db_service_wrapper_mock(MolDBServiceWrapperMock2)

    get_compute_img_metrics_mock.return_value = lambda *args: (0.9, 0.9, 0.9, [100.], [0], [10.])
    filter_sf_metrics_mock.side_effect = lambda x: x

    url_dict = {
        'iso_image_ids': ['iso_image_1', None, None, None]
    }
    post_images_to_annot_service_mock.return_value = {
        35: url_dict,
        44: url_dict
    }

    db = DB(sm_config['db'])

    try:
        ds_config_str = open(ds_config_path).read()
        upload_dt = datetime.now()
        ds_id = '2000-01-01_00h00m'
        db.insert(Dataset.DS_INSERT, [{
            'id': ds_id,
            'name': test_ds_name,
            'input_path': input_dir_path,
            'upload_dt': upload_dt,
            'metadata': '{}',
            'config': ds_config_str,
            'status': DatasetStatus.QUEUED,
            'is_public': True,
            'mol_dbs': ['HMDB-v4'],
            'adducts': ['+H', '+Na', '+K'],
            'ion_img_storage': 'fs'
        }])

        img_store = ImageStoreServiceWrapper(sm_config['services']['img_service_url'])
        job = SearchJob(img_store=img_store)
        job._sm_config['rabbitmq'] = {}  # avoid talking to RabbitMQ during the test
        ds = Dataset.load(db, ds_id)
        job.run(ds)

        # dataset table asserts
        rows = db.select('SELECT id, name, input_path, upload_dt, status from dataset')
        input_path = join(dirname(__file__), 'data', test_ds_name)
        assert len(rows) == 1
        assert rows[0] == (ds_id, test_ds_name, input_path, upload_dt, DatasetStatus.FINISHED)

        # ms acquisition geometry asserts
        rows = db.select('SELECT acq_geometry from dataset')
        assert len(rows) == 1
        assert rows[0][0] == ds.get_acq_geometry(db)
        assert rows[0][0] == {
            ACQ_GEOMETRY_KEYS.LENGTH_UNIT: 'nm',
            ACQ_GEOMETRY_KEYS.AcqGridSection.section_name: {
                ACQ_GEOMETRY_KEYS.AcqGridSection.REGULAR_GRID: True,
                ACQ_GEOMETRY_KEYS.AcqGridSection.PIXEL_COUNT_X : 3,
                ACQ_GEOMETRY_KEYS.AcqGridSection.PIXEL_COUNT_Y : 3,
                ACQ_GEOMETRY_KEYS.AcqGridSection.PIXEL_SPACING_X : 100,
                ACQ_GEOMETRY_KEYS.AcqGridSection.PIXEL_SPACING_Y : 100
            },
            ACQ_GEOMETRY_KEYS.PixelSizeSection.section_name: {
                ACQ_GEOMETRY_KEYS.PixelSizeSection.REGULAR_SIZE: True,
                ACQ_GEOMETRY_KEYS.PixelSizeSection.PIXEL_SIZE_X : 100,
                ACQ_GEOMETRY_KEYS.PixelSizeSection.PIXEL_SIZE_Y : 100
            }
        }

        # job table asserts
        rows = db.select('SELECT db_id, ds_id, status, start, finish from job')
        assert len(rows) == 1
        db_id, ds_id, status, start, finish = rows[0]
        assert (db_id, ds_id, status) == (0, '2000-01-01_00h00m', 'FINISHED')
        assert start < finish

        # image metrics asserts
        rows = db.select(('SELECT db_id, sf, adduct, stats, iso_image_ids '
                          'FROM iso_image_metrics '
                          'ORDER BY sf, adduct'))

        assert rows[0] == (0, 'C12H24O', '+K', {'chaos': 0.9, 'spatial': 0.9, 'spectral': 0.9,
                                                'total_iso_ints': [100.], 'min_iso_ints': [0], 'max_iso_ints': [10.]},
                           ['iso_image_1', None, None, None])
        assert rows[1] == (0, 'C12H24O', '+Na', {'chaos': 0.9, 'spatial': 0.9, 'spectral': 0.9,
                                                 'total_iso_ints': [100.], 'min_iso_ints': [0], 'max_iso_ints': [10.]},
                           ['iso_image_1', None, None, None])

        time.sleep(1)  # Waiting for ES
        # ES asserts
        ds_docs = es_dsl_search.query('term', _type='dataset').execute().to_dict()['hits']['hits']
        assert 1 == len(ds_docs)
        ann_docs = es_dsl_search.query('term', _type='annotation').execute().to_dict()['hits']['hits']
        assert len(ann_docs) == len(rows)
        for doc in ann_docs:
            assert doc['_id'].startswith(ds_id)

    finally:
        db.close()
        with warn_only():
            local('rm -rf {}'.format(data_dir_path))