def test_migrate_report_qa_2(self): engine = self._get_temp_db(5) t_db = TransactionDB(engine) last_message = { 'data': { 'report_qa_score_outcomes': { 'mdbrain_nd': 'good', 'mdbrain_ms': 'acceptable' } } } tr_1 = Transaction() tr_1.last_message = json.dumps(last_message) t_id = t_db.create_transaction(tr_1) tr_1 = t_db.get_transaction(t_id) # by default TransactionsDB doesn't set this field self.assertEqual(None, tr_1.qa_score) # execute migrate python script model = get_transaction_model(engine) migrations.migrate_qa_scores(t_db.session, model) t_db.session.commit() tr_2 = t_db.get_transaction(t_id) self.assertTrue('mdbrain_ms:acceptable' in tr_2.qa_score) self.assertTrue('mdbrain_nd:good' in tr_2.qa_score) t_db.close()
def test_migrate_institution(self): engine = self._get_temp_db(2) t_db = TransactionDB(engine) last_message = { 'data': { 'dicom_info': { 't1': { 'header': { 'InstitutionName': 'MockInstitution' } } } } } tr_1 = Transaction() tr_1.last_message = json.dumps(last_message) t_id = t_db.create_transaction(tr_1) # remove institution field session = t_db.session tr_2 = t_db.get_transaction(t_id) tr_2.institution = '' session.commit() self.assertEqual('', t_db.get_transaction(t_id).institution) # execute migrate python script model = get_transaction_model(engine) migrations.migrate_institution(session, model) session.commit() tr_2 = t_db.get_transaction(t_id) self.assertEqual('MockInstitution', tr_2.institution) t_db.close()
def test_migrate_study_date(self): engine = self._get_temp_db(4) t_db = TransactionDB(engine) last_message = { 'data': { 'dicom_info': { 't1': { 'header': { 'StudyDate': '20190101' } } } } } tr_1 = Transaction() tr_1.last_message = json.dumps(last_message) t_id = t_db.create_transaction(tr_1) tr_1 = t_db.get_transaction(t_id) # by default TransactionsDB doesn't set this field self.assertEqual(None, tr_1.study_date) # execute migrate python script model = get_transaction_model(engine) migrations.migrate_study_date(t_db.session, model) t_db.session.commit() tr_2 = t_db.get_transaction(t_id) self.assertEqual('20190101', tr_2.study_date) t_db.close()
def test_migrate_analysis_type_2(self): engine = self._get_temp_db(5) t_db = TransactionDB(engine) last_message = { 'data': { 'report_pdf_paths': { 'mdbrain_nd': 'path1', 'mdbrain_ms': 'path2' } } } tr_1 = Transaction() tr_1.last_message = json.dumps(last_message) t_id = t_db.create_transaction(tr_1) tr_1 = t_db.get_transaction(t_id) # by default TransactionsDB doesn't set this field self.assertEqual(None, tr_1.analysis_type) # execute migrate python script model = get_transaction_model(engine) migrations.migrate_analysis_types(t_db.session, model) t_db.session.commit() tr_2 = t_db.get_transaction(t_id) self.assertTrue('mdbrain_ms' in tr_2.analysis_type) self.assertTrue('mdbrain_nd' in tr_2.analysis_type) t_db.close()
def test_multi_session(self): engine = temp_db.get_temp_db() # no db created yet t = Transaction() t.name = 'Pere' t_db_1 = TransactionDB(engine, False) _ = TransactionDB(engine) t_db_1.create_transaction(t) t_1 = t_db_1.get_transaction(1) self.assertEqual('Pere', t_1.name)
def child_process_fn(folder, name): transaction_db = _get_temp_db(folder) for _ in range(TEST_RANGE): transaction_db.create_transaction(Transaction(name=name)) try: transaction_db.get_transaction(1) except TransactionDBException: pass
def test_read_transaction_from_dict_is_complete(self): # get all variables of transaction except non generic types # Test that these variables are read from the deserialization # function non_generic_vars = [ 'start_date', 'end_date', 'birth_date', 'task_state', 'data_uploaded', 'creation_date' ] CALLABLES = types.FunctionType, types.MethodType var = [ key for key, value in Transaction.__dict__.items() if not isinstance(value, CALLABLES) ] var = [ key for key in var if key[0] != '_' and key not in non_generic_vars ] t = Transaction() counter = 0 for key in var: counter += 1 setattr(t, key, counter) t2 = Transaction() t2.read_dict(t.to_dict()) counter = 0 for key in var: counter += 1 self.assertEqual(counter, getattr(t2, key))
def test_migrate_version(self): engine = self._get_temp_db(5) t_db = TransactionDB(engine) last_message = {'data': {'version': '2.2.1'}} tr_1 = Transaction() tr_1.last_message = json.dumps(last_message) t_id = t_db.create_transaction(tr_1) tr_1 = t_db.get_transaction(t_id) # by default TransactionsDB doesn't set this field self.assertEqual(None, tr_1.version) # execute migrate python script model = get_transaction_model(engine) migrations.migrate_version(t_db.session, model) t_db.session.commit() tr_2 = t_db.get_transaction(t_id) self.assertEqual('2.2.1', tr_2.version) t_db.close()
def test_read_dict_dates(self): # test that date and datetimes are parsed correctly engine = temp_db.get_temp_db() t_db = TransactionDB(engine) datetime_vars = ['start_date', 'end_date', 'data_uploaded'] date_vars = ['birth_date'] t = Transaction() for key in datetime_vars: setattr(t, key, datetime(2020, 2, 1, 18, 30, 4)) for key in date_vars: setattr(t, key, datetime(2020, 2, 1)) t_r = Transaction().read_dict(t.to_dict()) t_id = t_db.create_transaction(t_r) t_r_from_db = t_db.get_transaction(t_id) self.assertEqual(datetime(2020, 2, 1, 18, 30, 4), t_r_from_db.start_date) self.assertEqual(datetime(2020, 2, 1, 18, 30, 4), t_r_from_db.end_date) self.assertEqual(date(2020, 2, 1), t_r_from_db.birth_date)
def test_study_metadata_to_dict(self): md = StudiesMetadata() md.origin = 'dicom_grazer' md.c_move_time = datetime.utcnow() md.study_id = 's1' self.assertEqual( { 'origin': 'dicom_grazer', 'c_move_time': Transaction._datetime_to_str(md.c_move_time), 'study_id': 's1' }, md.to_dict())
def _get_test_transaction(self): t = Transaction() # we only need to fill metadata before creating a new transaction t.name = 'Pere' t.patient_id = '1' t.study_id = 'S1' t.birth_date = datetime(1982, 10, 29) return t
def test_migrate_sequences(self): engine = self._get_temp_db(3) t_db = TransactionDB(engine) last_message = { 'data': { 'dicom_info': { 't1': { 'header': { 'SeriesDescription': 'T1_sequence' } }, 't2': { 'header': { 'SeriesDescription': 'T2_sequence' } } } } } tr_1 = Transaction() tr_1.last_message = json.dumps(last_message) t_id = t_db.create_transaction(tr_1) # remove sequences field session = t_db.session tr_2 = t_db.get_transaction(t_id) tr_2.institution = '' session.commit() self.assertEqual('', t_db.get_transaction(t_id).institution) # execute migrate python script model = get_transaction_model(engine) migrations.migrate_sequences(session, model) session.commit() tr_2 = t_db.get_transaction(t_id) self.assertEqual('T1_sequence;T2_sequence', tr_2.sequences) t_db.close()
def test_read_transaction_from_dict(self): d = { 'transaction_id': 1, 'name': 'John Doe', 'birth_date': '01/02/2020' } t = Transaction().read_dict(d) engine = temp_db.get_temp_db() t_db = TransactionDB(engine) t_id = t_db.create_transaction(t) t_from_db = t_db.get_transaction(t_id) self.assertEqual(d['transaction_id'], t_from_db.transaction_id) self.assertEqual(d['name'], t_from_db.name) self.assertEqual(date(2020, 2, 1), t_from_db.birth_date)
def test_migrations(self): temp_folder = tempfile.mkdtemp( suffix='_test_migrations_transaction_db_') temp_db_path = os.path.join(temp_folder, 't_v1.db') shutil.copy('tests/fixtures/t_v1.db', temp_db_path) engine = create_engine('sqlite:///' + temp_db_path) # should execute all migrations code t_db = TransactionDB(engine, create_db=True, db_file_path=temp_db_path) self.assertTrue(os.path.exists(temp_db_path + '.v_1.bkp')) # add a new transaction with the current model t = Transaction() t_db.create_transaction(t) shutil.rmtree(temp_folder)
def test_read_dict_task_state(self): # test that task state is parsed correctly t = Transaction() t.task_state = TaskState.completed t_r = Transaction().read_dict(t.to_dict()) self.assertEqual(TaskState.completed, t_r.task_state)
def create_transaction( self, t: Transaction, user_id=None, product_id=None, analysis_type=None, qa_score=None, processing_state='waiting', task_state='queued') -> int: """will set the provided transaction object as queued, add it to the DB and return the transaction id. If the transaction has a last_message JSON with chosen T1/T2, it will index the sequence names as well. Parameters ---------- user_id: int product_id: int """ try: if task_state == 'failed': t.task_state = TaskState.failed else: t.task_state = TaskState.queued t.processing_state = processing_state if not t.creation_date: t.creation_date = datetime.datetime.utcnow() if product_id: t.product_id = product_id if analysis_type: t.analysis_type = analysis_type if qa_score: t.qa_score = qa_score self.session.add(t) # when we commit, we get the transaction ID self.session.commit() if user_id: user = self.session.query(User).get(user_id) if not user: raise TransactionDBException(("The provided user doesn't " "exist")) ut = UserTransaction() ut.user_id = user_id ut.transaction_id = t.transaction_id self.session.add(ut) self.session.commit() # set the transaction id in the task object if t.last_message: try: lm = json.loads(t.last_message) lm['t_id'] = t.transaction_id t.last_message = json.dumps(lm) except Exception: pass # index.set_index_institution(t) index.set_index_sequences(t) self.session.commit() return t.transaction_id except Exception: self.session.rollback() try: if(t.transaction_id): self.session.delete(t) except: pass raise