def __init__(self, wxObject, processname, inputdir, uuid, server, row): """Init Worker Thread Class.""" threading.Thread.__init__(self) self.wxObject = wxObject self.processname = processname self.inputdir = inputdir self.uuid = uuid self.server = server self.row = row self.db = DBI() self.db.connect() if self.db.conn is not None: # Dynamic process module pref = self.db.getProcessField('ref', processname) self.outputasfile = self.db.getProcessField( 'outputfile', processname) # file=1 or folder=0 self.module_name = self.db.getProcessModule(pref) self.class_name = self.db.getProcessClass(pref) # Instantiate module module = importlib.import_module(self.module_name) class_ = getattr(module, self.class_name) # Docker Class self.dcc = class_(self.processname) # Record configs to log if hasattr(self.dcc, 'CONTAINER_NAME'): msg = "Running Container: %s [input=%s output=%s]" % ( self.dcc.CONTAINER_NAME, self.dcc.INPUT_TARGET, join(self.dcc.OUTPUT_TARGET, self.dcc.OUTPUT)) print(msg) logging.info(msg) else: raise Exception('Cannot access Database')
def __init__(self, process=None): self.client = docker.from_env() db = DBI() db.connect() self.CONTAINER_NAME = db.getServerConfigByName('DOCKER_CONTAINER') self.INPUT_TARGET = db.getServerConfigByName('DOCKER_INPUTDIR') self.OUTPUT_TARGET = db.getServerConfigByName('DOCKER_OUTPUTDIR') self.OUTPUT = db.getServerConfigByName('DOCKER_OUTPUTFILE') db.closeconn()
class Config(ConfigPanel): def __init__(self, parent): super(Config, self).__init__(parent) self.parent = parent self.dbi = DBI() self.OnLoadData() def OnLoadData(self): self.dbi.connect() # load config values conf = self.dbi.getServerConfig() if conf is not None: rownum = 0 for k in conf.keys(): self.m_gridConfig.SetCellValue(rownum, 0, k) self.m_gridConfig.SetCellValue(rownum, 1, conf[k][0]) if conf[k][1] is not None: self.m_gridConfig.SetCellValue(rownum, 2, conf[k][1]) rownum += 1 self.dbi.closeconn() def OnSaveConfig(self, event): self.dbi.connect() # configid = self.cboConfigid.GetValue() configlist = [] data = self.m_gridConfig.GetTable() for rownum in range(0, data.GetRowsCount()): if not data.IsEmptyCell(rownum, 0): configlist.append((self.m_gridConfig.GetCellValue(rownum, 0), self.m_gridConfig.GetCellValue(rownum, 1), self.m_gridConfig.GetCellValue(rownum, 2))) print('Saved settings:', configlist) # Save to DB cnt = self.dbi.addServerConfig(configlist) # notification msg = "Settings saved: %s" % cnt self.m_txtStatus.SetLabelText(msg) self.dbi.closeconn() def OnAddRow(self, event): self.m_gridConfig.AppendRows(1, True) def OnAddProcess(self, event): dlg = ProcessViewer(self) dlg.ShowModal() dlg.Destroy()
def __init__(self, process=None): self.client = docker.from_env() db = DBI() db.connect() # DEFAULTS self.CONTAINER_NAME = db.getServerConfigByName('DOCKER_CONTAINER') self.INPUT_TARGET = db.getServerConfigByName('DOCKER_INPUTDIR') self.OUTPUT_TARGET = db.getServerConfigByName('DOCKER_OUTPUTDIR') self.OUTPUT = db.getServerConfigByName('DOCKER_OUTPUTFILE') # Load specific process configs if set self.process = process if process is not None: container = db.getServerConfigByName( db.getProcessField('container', process)) if container is not None: self.CONTAINER_NAME = container input = db.getServerConfigByName( db.getProcessField('containerinputdir', process)) if input is not None: self.INPUT_TARGET = input outputd = db.getServerConfigByName( db.getProcessField('containeroutputdir', process)) if outputd is not None: self.OUTPUT_TARGET = outputd ofile = db.getServerConfigByName( db.getProcessField('filename', process)) if ofile is not None: self.OUTPUT = ofile db.closeconn()
def __init__(self, uid): self.uid = uid self.db = DBI()
def __init__(self, parent): super(Config, self).__init__(parent) self.parent = parent self.dbi = DBI() self.OnLoadData()
def setUp(self): self.dbi = DBI() self.dbi.connect()
class TestDBquery(unittest2.TestCase): def setUp(self): self.dbi = DBI() self.dbi.connect() # try: # self.deleteData() # except OperationalError as e: # print(e.args[0]) # self.tearDown() def deleteData(self): # Reset all data so tests will work - so only use with a test db self.dbi.deleteData('seriesprocess') self.dbi.deleteData('dicomfiles') self.dbi.deleteData('dicomdata') def tearDown(self): self.dbi.conn.close() def test_getCaptions(self): data = self.dbi.getCaptions() expected = 0 print('Captions: ', data) self.assertIsNotNone(data) self.assertGreater(len(data), expected) def test_getRefs(self): data = self.dbi.getRefs() expected = 0 print('Refs: ', data) self.assertIsNotNone(data) self.assertGreater(len(data), expected) def test_getDescription(self): caption = 'QSM' data = self.dbi.getDescription(caption) expected = 0 print('Description: ', data) self.assertIsNotNone(data) self.assertGreater(len(data), expected) def test_getCaption(self): caption = 'qsm' data = self.dbi.getCaption(caption) expected = 0 print('Caption: ', data) self.assertIsNotNone(data) self.assertGreater(len(data), expected) def test_getProcessModule(self): caption = 'qsm' data = self.dbi.getProcessModule(caption) expected = 0 print('Module: ', data) self.assertIsNotNone(data) self.assertGreater(len(data), expected) def test_getProcessClass(self): caption = 'qsm' data = self.dbi.getProcessClass(caption) expected = 0 print('Class: ', data) self.assertIsNotNone(data) self.assertGreater(len(data), expected) def test_getFiles(self): uuid = '5d74a20b44ec1dfd0af4fbc6bb680e0f557c14a08a143b843ef40977697e2bea' data = self.dbi.getFiles(uuid) expected = 0 print('Files: ', data) self.assertIsNotNone(data) self.assertGreater(len(data), expected) def test_getNoFiles(self): uuid = 't10001' data = self.dbi.getFiles(uuid) expected = 0 print('Files: ', data) self.assertEqual(expected, len(data)) def test_addDicomdata(self): dicomdata = { 'uuid': 't10000', 'patientid': 'p1', 'patientname': 'test patient', 'seriesnum': '1.001.111', 'sequence': 'ftest', 'protocol': 'aaa', 'imagetype': 'M' } try: rtn = self.dbi.addDicomdata(dicomdata) self.assertEqual(rtn, 1, 'Dicom data add failed') except IntegrityError as e: self.skipTest(e.args[0]) def test_hasUuid(self): uuid = 't10000' self.assertIs(self.dbi.hasUuid(uuid), True, 'Data already added') def test_addDicomdataExisting(self): dicomdata = { 'uuid': 't10000', 'patientid': 'p1', 'patientname': 'test patient', 'seriesnum': '1.001.111', 'sequence': 'ftest', 'protocol': 'aaa', 'imagetype': 'M' } try: self.assertRaises(IntegrityError, self.dbi.addDicomdata, dicomdata) except AssertionError as e: self.skipTest(e.args[0]) def test_addDicomfileExisting(self): uuid = 't10000' dicomfile = "D:\\Projects\\XNAT\\data\\S1\\scans\\3\\1001DS.MR.RESEARCH_16022_OPTIMIZING_EXERCISE.0003.0001.2017.02.24.15.41.05.593750.93525560.IMA" try: self.assertRaises(IntegrityError, self.dbi.addDicomfile, uuid, dicomfile) #self.assertEqual(rtn, 1, 'Dicom file add failed') except AssertionError as e: self.skipTest(e.args[0]) def test_addDicomfile(self): uuid = 't10000' dicomfile = "D:\\Projects\\XNAT\\data\\S1\\scans\\3\\1001DS.MR.RESEARCH_16022_OPTIMIZING_EXERCISE.0003.0001.2017.02.24.15.41.05.593750.93525560.IMA" try: rtn = self.dbi.addDicomfile(uuid, dicomfile) self.assertEqual(rtn, 1, 'Dicom file add failed') except IntegrityError as e: self.skipTest(e.args[0]) def test_getUuids(self): data = self.dbi.getUuids() expected = 0 print('UUIDS: ', data) self.assertIsNotNone(data) self.assertGreater(len(data), expected) def test_getNumberFiles(self): uuid = '5d74a20b44ec1dfd0af4fbc6bb680e0f557c14a08a143b843ef40977697e2bea' data = self.dbi.getNumberFiles(uuid) expected = 0 print('Num files: ', data) self.assertIsNotNone(data) self.assertGreater(data, expected) def test_getDicomdataAll(self): uuid = 't10000' field = 'all' data = self.dbi.getDicomdata(uuid, field) expected = 0 print('Dicomdata for: ', field, '=', data) self.assertIsNotNone(data) self.assertGreater(data, expected) def test_getDicomdata(self): uuid = 't10000' field = 'protocol' data = self.dbi.getDicomdata(uuid, field) expected = 0 print('Dicomdata for: ', field, '=', data) self.assertIsNotNone(data) self.assertGreater(data, expected) def test_getRef(self): pname = 'QSM' expected = 'qsm' data = self.dbi.getRef(pname) self.assertEqual(expected, data) # INSERTING DATA - USE TEST DB ONLY # def test_setSeriesProcess(self): # uuid = '5d74a20b44ec1dfd0af4fbc6bb680e0f557c14a08a143b843ef40977697e2bea' # pid = 1 # server = 'AWS' # status = 1 # starttime = datetime.datetime.now() # try: # rtn = self.dbi.setSeriesProcess(uuid, pid,server,status,starttime) # self.assertEqual(rtn, 1, 'Series Process add failed') # except IntegrityError as e: # self.skipTest(e.args[0]) def test_getActiveProcesses(self): data = self.dbi.getActiveProcesses() print(data) def test_deleteSeriesData(self): uuid = '5d74a20b44ec1dfd0af4fbc6bb680e0f557c14a08a143b843ef40977697e2bea' self.dbi.deleteSeriesData(uuid)
def __init__(self, wxObject, filelist): threading.Thread.__init__(self) self.wxObject = wxObject self.filelist = filelist self.db = DBI()
class DicomThread(threading.Thread): def __init__(self, wxObject, filelist): threading.Thread.__init__(self) self.wxObject = wxObject self.filelist = filelist self.db = DBI() def run(self): print('Starting DICOM thread run') n = 1 try: event.set() lock.acquire(True) for filename in self.filelist: try: if not self.db.hasFile(filename): dcm = pydicom.read_file(filename) updatemsg = "Detecting DICOM data ... %d of %d" % ( n, len(self.filelist)) wx.PostEvent(self.wxObject, DataEvent((updatemsg, []))) # self.m_status.SetLabelText(updatemsg) n += 1 # Check DICOM header info series_num = str(dcm.SeriesInstanceUID) uuid = generateuid(series_num) imagetype = str(dcm.ImageType[2]) dicomdata = { 'uuid': uuid, 'patientid': str(dcm.PatientID), 'patientname': str(dcm.PatientName), 'seriesnum': series_num, 'sequence': str(dcm.SequenceName), 'protocol': str(dcm.ProtocolName), 'imagetype': imagetype } if not self.db.hasUuid(uuid): self.db.addDicomdata(dicomdata) if not self.db.hasFile(filename): self.db.addDicomfile(uuid, filename) except InvalidDicomError: logging.warning("Not valid DICOM - skipping: ", filename) continue ############## Load Series Info for suid in self.db.getUuids(): numfiles = self.db.getNumberFiles(suid) item = [ True, self.db.getDicomdata(suid, 'patientname'), self.db.getDicomdata(suid, 'sequence'), self.db.getDicomdata(suid, 'protocol'), self.db.getDicomdata(suid, 'imagetype'), str(numfiles), self.db.getDicomdata(suid, 'seriesnum') ] wx.PostEvent(self.wxObject, DataEvent((suid, item))) except Exception as e: msg = 'ERROR encountered during DICOM thread: %s' % e.args[0] finally: n = len(self.db.getNewUuids()) if n > 0: msg = "Total Series loaded: %d" % n logger.info(msg) elif len(self.db.getUuids()) > 0: msg = "Series already processed. Remove via Status Panel to repeat upload." logger.info(msg) else: logging.error(msg) if self.db.conn is not None: self.db.closeconn() wx.PostEvent(self.wxObject, DataEvent((msg, []))) # self.terminate() lock.release() event.clear()
def __init__(self): self.logger = self.__loadLogger() self.db = DBI() if self.db.c is None: self.db.connect()
class Controller(): def __init__(self): self.logger = self.__loadLogger() self.db = DBI() if self.db.c is None: self.db.connect() def __loadLogger(self, outputdir=None): #### LoggingConfig logger.setLevel(logging.DEBUG) homedir = expanduser("~") if outputdir is not None and access(outputdir, R_OK): homedir = outputdir if not access(join(homedir, ".d2c", "logs"), R_OK): mkdir(join(homedir, ".d2c", "logs")) self.logfile = join(homedir, ".d2c", "logs", 'd2c.log') handler = RotatingFileHandler(filename=self.logfile, maxBytes=10000000, backupCount=10) formatter = logging.Formatter( '[ %(asctime)s %(levelname)-4s ] %(filename)s %(lineno)d : (%(threadName)-9s) %(message)s' ) handler.setFormatter(formatter) logger.addHandler(handler) return logger # ---------------------------------------------------------------------- def RunProcess(self, wxGui, inputdir, uuid, processname, server, row): """ Run processing in a thread :param wxGui: Process Panel ref :param inputdir: Directory with DICOM files :param uuid: unique ID from series number :param processname: Process eg QSM :param server: Cloud server to use eg AWS :param row: Row number in progress table in Process Panel :return: """ filenames = self.db.getSessionFiles(uuid) try: if len(filenames) > 0: msg = "Load Process Threads: %s [row: %d]" % (processname, row) print(msg) # Run thread t = ProcessThread(wxGui, processname, inputdir, uuid, server, row) t.start() msg = "Running Thread: %s" % processname print(msg) # Load to database for remote monitoring self.db.setSessionProcess(uuid, self.db.getProcessId(processname), server, 1, datetime.datetime.now(), inputdir) else: msg = "No files to process" logger.error(msg) raise ValueError(msg) except ValueError as e: raise e except Exception as e: raise e def removeInputFiles(self, uuid, inputdir): """ Remove temporary files in outputdir (assumes tar file created and uploaded to Docker) Remove file entries? :return: """ files = iglob(join(inputdir, '*.IMA')) for f in files: os.remove(f) # remove database entry - dicomdata and dicomfiles self.db.deleteSessionData(uuid) def checkRemote(self): # Check if cloud processing is done and update database seriesprocesses = self.db.getActiveProcesses() for series in seriesprocesses: print("CheckRemote:", series) seriesid = series[0] server = series[2].lower() outputdir = series[6] if outputdir is None or len(outputdir) <= 0: files = self.db.getFiles(seriesid) outputdir = dirname(files[0]) # Get uploader class and query uploaderClass = get_class(server) if uploaderClass is not None: uploader = uploaderClass(seriesid) if uploader.isDone(): downloadfile = join(outputdir, seriesid, 'download.tar') uploader.download(downloadfile) msg = 'Series: %s \n\tSTATUS: Complete (%s)\n' % ( seriesid, downloadfile) print(msg) self.db.setSessionProcessFinished(seriesid) else: # Still in progress self.db.setSeriesProcessInprogress(seriesid) else: # assume done msg = 'Series: %s \n\tSTATUS: Complete\n' % seriesid print(msg) self.db.setSessionProcessFinished(seriesid) def parseDicom(self, wxObject, filelist): ''' Read DICOM headers for filelist and load series info to db :param filelist: :return: ''' t = DicomThread(wxObject, filelist) t.start()
class ProcessThread(threading.Thread): """Multi Worker Thread Class.""" # wxGui, processname, self.cmodules[processref], targetdir, uuid, server, filenames, row, containername # ---------------------------------------------------------------------- def __init__(self, wxObject, processname, inputdir, uuid, server, row): """Init Worker Thread Class.""" threading.Thread.__init__(self) self.wxObject = wxObject self.processname = processname self.inputdir = inputdir self.uuid = uuid self.server = server self.row = row self.db = DBI() self.db.connect() if self.db.conn is not None: # Dynamic process module pref = self.db.getProcessField('ref', processname) self.outputasfile = self.db.getProcessField( 'outputfile', processname) # file=1 or folder=0 self.module_name = self.db.getProcessModule(pref) self.class_name = self.db.getProcessClass(pref) # Instantiate module module = importlib.import_module(self.module_name) class_ = getattr(module, self.class_name) # Docker Class self.dcc = class_(self.processname) # Record configs to log if hasattr(self.dcc, 'CONTAINER_NAME'): msg = "Running Container: %s [input=%s output=%s]" % ( self.dcc.CONTAINER_NAME, self.dcc.INPUT_TARGET, join(self.dcc.OUTPUT_TARGET, self.dcc.OUTPUT)) print(msg) logging.info(msg) else: raise Exception('Cannot access Database') # ---------------------------------------------------------------------- def run(self): print('Starting thread run') msg = '' ctr = 0 try: event.set() lock.acquire(True) # Convert IMA files to MNC via Docker image print('Running Docker image') containerId = self.dcc.startDocker(join(self.inputdir, self.uuid)) if containerId is None: raise Exception("ERROR: Unable to initialize Docker") else: print('Container ID:', containerId) while (not self.dcc.checkIfDone(containerId)): time.sleep(1) wx.PostEvent( self.wxObject, ResultEvent((self.row, ctr, self.uuid, self.processname, 'Converting'))) ctr += 1 # restart for long running if ctr == 100: ctr = 1 # Check that everything ran ok (0 = success) if self.dcc.getExitStatus(containerId): raise Exception( "ERROR: Docker unable to anonymize the dataset") # Get the resulting mnc file back to the original directory outputfile = self.dcc.finalizeJob(containerId, self.inputdir, self.uuid, self.outputasfile) print('Output:', outputfile) if self.server.lower() != 'none': msg = self.uploadCloud(outputfile) else: msg = 'Done: %s' % outputfile ctr = 100 print(msg) logger.info(msg) except Exception as e: msg = e.args[0] print("ERROR:", msg) ctr = -1 finally: if lock.locked(): lock.release() if event.is_set(): event.clear() wx.PostEvent( self.wxObject, ResultEvent((self.row, ctr, self.uuid, self.processname, msg))) def uploadCloud(self, mncfile): """ Send file to cloud for processing :param mncfile: :return: """ uploaderClass = get_class(self.server) if uploaderClass is not None: uploader = uploaderClass(self.uuid) uploader.upload(mncfile, self.processname) msg = 'Uploading to server[%s]: %s' % (self.server, mncfile) else: msg = 'No Uploader class available' return msg