def test_add_ds(self, test_db, sm_config, ds_config): action_queue_mock = MagicMock(spec=QueuePublisher) es_mock = MagicMock(spec=ESExporter) db = DB(sm_config['db']) try: ds_man = create_ds_man(sm_config, db=db, es=es_mock, action_queue=action_queue_mock, sm_api=False) ds_id = '2000-01-01' ds_name = 'ds_name' input_path = 'input_path' upload_dt = datetime.now() metadata = {} ds = create_ds(ds_id=ds_id, ds_name=ds_name, input_path=input_path, upload_dt=upload_dt, metadata=metadata, ds_config=ds_config) ds_man.add(ds, search_job_factory=self.SearchJob) DS_SEL = 'select name, input_path, upload_dt, metadata, config from dataset where id=%s' assert db.select_one(DS_SEL, params=(ds_id, )) == (ds_name, input_path, upload_dt, metadata, ds_config) finally: db.close()
def fill_db(test_db, sm_config, ds_config): upload_dt = '2000-01-01 00:00:00' ds_id = '2000-01-01' meta = {"meta": "data"} db = DB(sm_config['db']) db.insert('INSERT INTO dataset (id, name, input_path, upload_dt, metadata, config, ' 'status, is_public, mol_dbs, adducts) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', rows=[(ds_id, 'ds_name', 'input_path', upload_dt, json.dumps(meta), json.dumps(ds_config), DatasetStatus.FINISHED, True, ['HMDB-v4'], ['+H'])]) db.insert("INSERT INTO job (id, db_id, ds_id) VALUES (%s, %s, %s)", rows=[(0, 0, ds_id)]) db.insert("INSERT INTO sum_formula (id, db_id, sf) VALUES (%s, %s, %s)", rows=[(1, 0, 'H2O')]) db.insert(("INSERT INTO iso_image_metrics (job_id, db_id, sf, adduct, iso_image_ids) " "VALUES (%s, %s, %s, %s, %s)"), rows=[(0, 0, 'H2O', '+H', ['iso_image_1_id', 'iso_image_2_id'])]) db.close()
def fill_db(test_db, sm_config, ds_config): upload_dt = '2000-01-01 00:00:00' ds_id = '2000-01-01' meta = {"meta": "data"} db = DB(sm_config['db']) db.insert('INSERT INTO dataset values (%s, %s, %s, %s, %s, %s, %s)', rows=[(ds_id, 'ds_name', 'input_path', upload_dt, json.dumps(meta), json.dumps(ds_config), DatasetStatus.FINISHED)]) db.insert("INSERT INTO job (id, db_id, ds_id) VALUES (%s, %s, %s)", rows=[(0, 0, ds_id)]) db.insert("INSERT INTO sum_formula (id, db_id, sf) VALUES (%s, %s, %s)", rows=[(1, 0, 'H2O')]) db.insert(( "INSERT INTO iso_image_metrics (job_id, db_id, sf, adduct, iso_image_ids) " "VALUES (%s, %s, %s, %s, %s)"), rows=[(0, 0, 'H2O', '+H', ['iso_image_1_id', 'iso_image_2_id'])]) db.close()
def test_add_ds(self, test_db, sm_config, ds_config): action_queue_mock = MagicMock(spec=QueuePublisher) es_mock = MagicMock(spec=ESExporter) db = DB(sm_config['db']) try: ds_man = create_ds_man(sm_config, db=db, es=es_mock, action_queue=action_queue_mock, sm_api=False) ds_id = '2000-01-01' ds_name = 'ds_name' input_path = 'input_path' upload_dt = datetime.now() metadata = {} ds = create_ds(ds_id=ds_id, ds_name=ds_name, input_path=input_path, upload_dt=upload_dt, metadata=metadata, ds_config=ds_config) ds_man.add(ds, search_job_factory=self.SearchJob) DS_SEL = 'select name, input_path, upload_dt, metadata, config from dataset where id=%s' assert db.select_one(DS_SEL, params=(ds_id,)) == (ds_name, input_path, upload_dt, metadata, ds_config) finally: db.close()
def _callback(self, msg): log_msg = " SM daemon received a message: {}".format(msg) logger.info(log_msg) self._post_to_slack('new', " [v] Received: {}".format(json.dumps(msg))) db = DB(self._sm_config['db']) try: ds_man = self._dataset_manager_factory( db=db, es=ESExporter(db), img_store=ImageStoreServiceWrapper(self._sm_config['services']['img_service_url']), mode='queue', status_queue=QueuePublisher(config=self._sm_config['rabbitmq'], qdesc=SM_DS_STATUS, logger=logger) ) ds_man.process(ds=Dataset.load(db, msg['ds_id']), action=msg['action'], search_job_factory=SearchJob, del_first=msg.get('del_first', False)) finally: if db: db.close()
def _callback(self, msg): log_msg = " SM daemon received a message: {}".format(msg) logger.info(log_msg) self._post_to_slack('new', " [v] Received: {}".format(json.dumps(msg))) db = DB(self._sm_config['db']) try: ds_man = self._dataset_manager_factory( db=db, es=ESExporter(db), img_store=ImageStoreServiceWrapper( self._sm_config['services']['img_service_url']), mode='queue', status_queue=QueuePublisher(config=self._sm_config['rabbitmq'], qdesc=SM_DS_STATUS, logger=logger)) ds_man.process(ds=Dataset.load(db, msg['ds_id']), action=msg['action'], search_job_factory=SearchJob, del_first=msg.get('del_first', False)) finally: if db: db.close()