Beispiel #1
0
    ds_man.delete(ds, del_raw_data=del_raw, force=force)


@post('/v1/datasets/<ds_id>/add-optical-image')
@sm_modify_dataset('ADD_OPTICAL_IMAGE')
def add_optical_image(ds_man, ds, params):
    img_id = params['url'].split('/')[-1]
    ds_man.add_optical_image(ds, img_id, params['transform'])


@post('/v1/datasets/<ds_id>/del-optical-image')
@sm_modify_dataset('DEL_OPTICAL_IMAGE')
def del_optical_image(ds_man, ds, params):
    ds_man.del_optical_image(ds)


if __name__ == '__main__':
    parser = argparse.ArgumentParser(description='SM Engine REST API')
    parser.add_argument('--config',
                        dest='config_path',
                        default='conf/config.json',
                        type=str,
                        help='SM config path')
    args = parser.parse_args()
    SMConfig.set_path(args.config_path)

    init_loggers(SMConfig.get_conf()['logs'])
    logger = logging.getLogger(name='api')
    logger.info('Starting SM api')
    run(**SMConfig.get_conf()['bottle'])
Beispiel #2
0
    "AND ROUND(sigma::numeric, 6) = %s AND charge = %s AND pts_per_mz = %s")

if __name__ == "__main__":
    parser = argparse.ArgumentParser(
        description='Exporting search results into a csv file')
    parser.add_argument('ds_name', type=str, help='Dataset name')
    parser.add_argument('csv_path', type=str, help='Path for the csv file')
    parser.add_argument('--config',
                        dest='sm_config_path',
                        type=str,
                        help='SM config path')
    parser.set_defaults(
        sm_config_path=path.join(proj_root(), 'conf/config.json'))
    args = parser.parse_args()

    SMConfig.set_path(args.sm_config_path)
    db = DB(SMConfig.get_conf()['db'])

    ds_config = db.select_one(DS_CONFIG_SEL, args.ds_name)[0]
    isotope_gen_config = ds_config['isotope_generation']
    charge = '{}{}'.format(isotope_gen_config['charge']['polarity'],
                           isotope_gen_config['charge']['n_charges'])
    export_rs = db.select(EXPORT_SEL, ds_config['database']['name'],
                          args.ds_name, isotope_gen_config['isocalc_sigma'],
                          charge, isotope_gen_config['isocalc_pts_per_mz'])

    header = '\t'.join(['formula_db', 'db_ids', 'sf_name', 'sf', 'adduct']) +'\t' + '\t'.join(metrics) + '\t' + \
             '\t'.join(['fdr', 'isocalc_sigma', 'isocalc_charge', 'isocalc_pts_per_mz', 'first_peak_mz']) + '\n'
    with open(args.csv_path, 'w') as f:
        f.write(header)
        f.writelines(['\t'.join(map(str, row)) + '\n' for row in export_rs])
Beispiel #3
0
        if transform != None:
            ds = api.Dataset.load(db=db, ds_id=id)
            img_store = ImageStoreServiceWrapper(
                config['services']['img_service_url'])
            img_store.storage_type = 'fs'
            sm = SMapiDatasetManager(db=db,
                                     es=ESExporter(db),
                                     image_store=img_store,
                                     mode='queue')
            ds_opt_img_query = 'SELECT optical_image from dataset {}'.format(
                'WHERE id = %s')
            img_id = db.select(ds_opt_img_query, params=(ds.id, ))
            sm._add_thumbnail_optical_image(ds, f"{img_id[0][0]}", transform)


SMConfig.set_path('conf/config.json')
sm_config = SMConfig.get_conf()
set_metadata_thumbnail(DB(sm_config['db']), sm_config, 'Untreated_3_434')

if __name__ == "__main__":
    parser = argparse.ArgumentParser(
        description="Updates thumbnail for a provided dataset")
    parser.add_argument('--ds-name',
                        dest='ds_name',
                        type=str,
                        help="Process specific dataset given by its name")
    parser.add_argument('--config',
                        dest='sm_config_path',
                        default='conf/config.json',
                        type=str,
                        help='SM config path')
    conf = SMConfig.get_conf()
    if ds_mask == '_all_':
        _reindex_all(conf)
    else:
        db = DB(conf['db'])
        es_exp = ESExporter(db)

        if ds_id:
            rows = db.select("select id, name, config from dataset where id = '{}'".format(ds_id))
        elif ds_mask:
            rows = db.select("select id, name, config from dataset where name like '{}%'".format(ds_mask))
        else:
            rows = []

        _reindex_datasets(rows, es_exp)


if __name__ == '__main__':
    parser = argparse.ArgumentParser(description='Reindex dataset results')
    parser.add_argument('--config', default='conf/config.json', help='SM config path')
    parser.add_argument('--ds-id', dest='ds_id', default='', help='DS id')
    parser.add_argument('--ds-name', dest='ds_name', default='', help='DS name prefix mask (_all_ for all datasets)')
    args = parser.parse_args()

    SMConfig.set_path(args.config)
    init_loggers(SMConfig.get_conf()['logs'])
    logger = logging.getLogger('engine')

    reindex_results(args.ds_id, args.ds_name)
    def clear_data_dirs(self):
        with warn_only():
            local('rm -rf {}'.format(self.data_dir_path))


if __name__ == '__main__':
    parser = argparse.ArgumentParser(description='Scientific tests runner')
    parser.add_argument('-r', '--run', action='store_true', help='compare current search results with previous')
    parser.add_argument('-s', '--save', action='store_true', help='store current search results')
    parser.add_argument('--config', dest='sm_config_path',
                        default=join(proj_root(), 'conf/config.json'),
                        help='path to sm config file')
    parser.add_argument('--mock-img-store', action='store_true', help='whether to mock the Image Store Service')
    args = parser.parse_args()

    SMConfig.set_path(args.sm_config_path)
    init_loggers(SMConfig.get_conf()['logs'])

    sci_tester = SciTester(args.sm_config_path)

    if args.run:
        run_search_successful = False
        search_results_different = False
        try:
            sci_tester.run_search(args.mock_img_store)
            run_search_successful = True
            search_results_different = sci_tester.search_results_are_different()
        except Exception as e:
            if not run_search_successful:
                raise Exception('Search was not successful!') from e
            elif search_results_different:
from pprint import pprint
from subprocess import check_call

import numpy as np
from fabric.api import local
from fabric.context_managers import warn_only

from sm.engine.db import DB
from sm.engine.util import proj_root, SMConfig


# def sm_config():
#     with open(join(proj_root(), 'conf/config.json')) as f:
#         return json.load(f)

SMConfig.set_path(join(proj_root(), 'conf/config.json'))
sm_config = SMConfig.get_conf()

ds_name = 'sci_test_spheroid_12h'
data_dir_path = join(SMConfig.get_conf()['fs']['base_path'], ds_name)
input_dir_path = join(proj_root(), 'test/data/sci_test_search_job_spheroid_dataset')
ds_config_path = join(input_dir_path, 'config.json')

SEARCH_RES_SELECT = ("select sf, adduct, stats "
                     "from iso_image_metrics s "
                     "join formula_db sf_db on sf_db.id = s.db_id "
                     "join agg_formula f on f.id = s.sf_id AND sf_db.id = f.db_id "
                     "join job j on j.id = s.job_id "
                     "join dataset ds on ds.id = j.ds_id "
                     "where ds.name = %s and sf_db.name = %s "
                     "ORDER BY sf, adduct ")
Beispiel #7
0
from os.path import join
from pprint import pprint
from subprocess import check_call

import numpy as np
from fabric.api import local
from fabric.context_managers import warn_only

from sm.engine.db import DB
from sm.engine.util import proj_root, SMConfig

# def sm_config():
#     with open(join(proj_root(), 'conf/config.json')) as f:
#         return json.load(f)

SMConfig.set_path(join(proj_root(), 'conf/config.json'))
sm_config = SMConfig.get_conf()

ds_name = 'sci_test_spheroid_12h'
data_dir_path = join(SMConfig.get_conf()['fs']['base_path'], ds_name)
input_dir_path = join(proj_root(),
                      'test/data/sci_test_search_job_spheroid_dataset')
ds_config_path = join(input_dir_path, 'config.json')

SEARCH_RES_SELECT = (
    "select sf, adduct, stats "
    "from iso_image_metrics s "
    "join formula_db sf_db on sf_db.id = s.db_id "
    "join agg_formula f on f.id = s.sf_id AND sf_db.id = f.db_id "
    "join job j on j.id = s.job_id "
    "join dataset ds on ds.id = j.ds_id "
Beispiel #8
0
from unittest.mock import MagicMock

import pytest
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Search
from fabric.api import local
from pysparkling import Context
import pandas as pd

from sm.engine.db import DB
from sm.engine.mol_db import MolecularDB
from sm.engine.util import proj_root, SMConfig, init_loggers
from sm.engine.es_export import ESIndexManager

TEST_CONFIG_PATH = 'conf/test_config.json'
SMConfig.set_path(Path(proj_root()) / TEST_CONFIG_PATH)

init_loggers(SMConfig.get_conf()['logs'])


@pytest.fixture(scope='session')
def sm_config():
    return SMConfig.get_conf(update=True)


class SparkContext(Context):
    def parallelize(self, x, numSlices=None):
        return super().parallelize(x, numPartitions=numSlices)


@pytest.fixture(scope='module')
UPD_DATASET_THUMB_OPTICAL_IMAGE = 'update dataset set thumbnail = %s WHERE id = %s'
SEL_OPTICAL_IMAGE_THUMBNAIL = 'SELECT thumbnail FROM dataset WHERE id = %s'

def set_metadata_thumbnail(db, config, ds_name):
    ds_thumb_query = 'SELECT id, transform, thumbnail from dataset {}'.format('WHERE name = %s' if ds_name != ALL_DS_MASK else '')
    for id, transform, thumbnail in db.select(ds_thumb_query, params=(ds_name,) if ds_name else None):
        if transform != None:
            ds = api.Dataset.load(db=db, ds_id=id)
            img_store = ImageStoreServiceWrapper(config['services']['img_service_url'])
            img_store.storage_type = 'fs'
            sm = SMapiDatasetManager(db=db, es=ESExporter(db), image_store=img_store, mode='queue')
            ds_opt_img_query = 'SELECT optical_image from dataset {}'.format('WHERE id = %s')
            img_id = db.select(ds_opt_img_query, params=(ds.id,))
            sm._add_thumbnail_optical_image(ds, f"{img_id[0][0]}", transform)

SMConfig.set_path('conf/config.json')
sm_config = SMConfig.get_conf()
set_metadata_thumbnail(DB(sm_config['db']), sm_config, 'Untreated_3_434')

if __name__ == "__main__":
    parser = argparse.ArgumentParser(description="Updates thumbnail for a provided dataset")
    parser.add_argument('--ds-name', dest='ds_name', type=str, help="Process specific dataset given by its name")
    parser.add_argument('--config', dest='sm_config_path', default='conf/config.json', type=str, help='SM config path')
    args = parser.parse_args()

    SMConfig.set_path(args.sm_config_path)
    sm_config = SMConfig.get_conf()

    db = DB(sm_config['db'])

    if args.ds_name:
def setupQueue(sm_config_path):
    SMConfig.set_path(sm_config_path)
    rabbitmq_config = SMConfig.get_conf()['rabbitmq']
    return QueuePublisher(rabbitmq_config, 'sm_annotate')