def get_default_manager(): """Get the default tool manager, configured using tools_config.json.""" toolsconffile = os.path.join(rootconfig.path.tools, 'ui_programs', 'tools_config.json') toolsconf = load_json(toolsconffile) manager = ToolManager() manager.load(toolsconf) return manager
def get_old(atlasname): """Get an atlasobj with name. This is typically what you want when to get a atlas object. """ jsonFileName = atlasname + '.json' jsonFilePath = os.path.join(rootconfig.path.atlas, jsonFileName) atlasconf = loadsave.load_json(jsonFilePath) return Atlas(atlasconf)
def insert_mrirow(self, scan, hasT1, hasT2, hasBOLD, hasDWI): """Insert one mriscan record.""" # check if scan already exist try: ret = self.session.query( exists().where(MRIScan.filename == scan)).scalar() if ret: # record exists return 0 except MultipleResultsFound: print('Error when importing: multiple scan records found for %s' % scan) return 1 mrifolder = rootconfig.dms.folder_mridata scaninfo = load_json(os.path.join(mrifolder, scan, 'scan_info.json')) machine = self.add_and_get_mrimachine(scaninfo['Machine']) name, date = name_date(scan) dateobj = clock.simple_to_time(date) db_mriscan = MRIScan(date=dateobj, hasT1=hasT1, hasT2=hasT2, hasBOLD=hasBOLD, hasDWI=hasDWI, filename=scan) machine.mriscans.append(db_mriscan) try: ret = self.session.query(exists().where( and_(Person.name == name, Person.patientid == scaninfo['Patient']['ID']))).scalar() if ret: self.session.add(db_mriscan) person = self.session.query(Person).filter_by(name=name).one() person.mriscans.append(db_mriscan) self.session.commit() print('Old patient new scan %s inserted' % scan) return 0 except MultipleResultsFound: print( 'Error when importing: multiple person records found for %s' % name) return 2 db_person = self.build_person(name, scaninfo) self.db_people[name] = db_person self.session.add(db_person) self.db_people[name].mriscans.append(db_mriscan) self.session.commit() print('New patient new scan %s inserted' % scan)
def update_PMAR_database(scan_folder_name, name_zh): scan_info = loadsave.load_json( os.path.join(rootconfig.dms.folder_mridata, scan_folder_name, 'scan_info.json')) search_res = pmar.search_patients(name=name_zh) print(search_res) if len(search_res) < 1: # new patient search_res = pmar.add_patient(name_zh, scan_info['Patient']['Birth'], scan_info['Patient']['Gender'], scan_info['Patient']['ID']) else: assert len(search_res) == 1 search_res = search_res[0] ret = pmar.add_scan(search_res['id'], scan_info['StudyDate'].replace(' ', 'T')) print('update PMAR result: ', ret)
def run(self): """Run the para. finalfolders is the constructed folder in which to run the job in parallel, Or sequential if configured that way. Env MMDPS_NEWLIST_TXT will override folderlist. Env MMDPS_SECONDLIST_TXT will override secondlist. """ if self.folderlist == 'listdir': originalfolders = path.clean_listdir(self.mainfolder) else: originalfolders = loadsave.load_txt( path.env_override(self.folderlist, 'MMDPS_NEWLIST_TXT')) folders = [os.path.join(self.mainfolder, f) for f in originalfolders] if self.bsecond: finalfolders = [] if type(self.secondlist) is list: secondfolders = self.secondlist elif self.secondlist == 'all': secondfolders = loadsave.load_txt( os.path.join(rootconfig.path.atlas, 'atlas_list.txt')) else: secondfolders = loadsave.load_txt( path.env_override(self.secondlist, 'MMDPS_SECONDLIST_TXT')) for folder in folders: for secondfolder in secondfolders: newfolder = os.path.join(folder, secondfolder) path.makedirs(newfolder) finalfolders.append(newfolder) else: finalfolders = folders currentJob = job.create_from_dict( loadsave.load_json(path.fullfile(self.jobconfig))) if self.runmode == 'FirstOnly': return self.run_seq(currentJob, finalfolders[0:1]) if self.runmode == 'Parallel': return self.run_para(currentJob, finalfolders) if self.runmode == 'Sequential': return self.run_seq(currentJob, finalfolders) else: print('Error: no such runmode as', self.runmode) return None
def update_patient_scan(): scan_dict = loadsave.load_csv_to_dict('I:/new_data_mapping.csv', 'en') for name, data_dict in scan_dict.items(): if data_dict['en'] != 'wangguojun': continue scan = name + '_' + data_dict['date'] scan_info = loadsave.load_json('X:/MRIData/%s/scan_info.json' % scan) print(scan, data_dict['zh'], data_dict['id'], scan_info['Patient']['ID'], scan_info['Patient']['Birth'], scan_info['Patient']['Gender']) search_res = search_patients(name=data_dict['zh']) print(search_res) if len(search_res) < 1: # new patient search_res = add_patient(data_dict['zh'], scan_info['Patient']['Birth'], scan_info['Patient']['Gender'], scan_info['Patient']['ID']) else: assert len(search_res) == 1 search_res = search_res[0] add_scan(search_res['id'], scan_info['StudyDate'].replace(' ', 'T'))
def check_RSN(self): if not hasattr(self, 'RSNConfig'): self.RSNConfig = loadsave.load_json( self.fullpath('RSN_%s.json' % self.name))
""" self.check_RSN() return self.RSNConfig['RSN order'] def adjust_vec_Circos(self, vec): vec_adjusted = np.zeros(vec.shape) self.set_brainparts('default') adjustedTicks, nodeCount = self.brainparts.get_region_list() for i in range(self.count): realpos = self.ticks.index(adjustedTicks[i]) vec_adjusted[i] = vec[realpos] return vec_adjusted brodmann_lr = Atlas( loadsave.load_json(os.path.join(rootconfig.path.atlas, 'brodmann_lr.json'))) brodmann_lrce = Atlas( loadsave.load_json( os.path.join(rootconfig.path.atlas, 'brodmann_lrce.json'))) aal = Atlas(loadsave.load_json(os.path.join(rootconfig.path.atlas, 'aal.json'))) aal2 = Atlas( loadsave.load_json(os.path.join(rootconfig.path.atlas, 'aal2.json'))) aicha = Atlas( loadsave.load_json(os.path.join(rootconfig.path.atlas, 'aicha.json'))) bnatlas = Atlas( loadsave.load_json(os.path.join(rootconfig.path.atlas, 'bnatlas.json'))) def get(atlasname, suppress=True): if not suppress:
"""Run a job.""" import sys, os import argparse from mmdps.proc import job from mmdps.util import loadsave from mmdps.util import path if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--config', help='job config json file', required=True) parser.add_argument('--folder', help='job run in this folder', default=None) args = parser.parse_args() print('Runjob Folder:', args.folder) configfile = path.fullfile(args.config) print('Runjob File:', configfile) configdict = loadsave.load_json(configfile) currentJob = job.create_from_dict(configdict) job.runjob(currentJob, args.folder) sys.stdin.close() sys.stdout.close() sys.stderr.close()
# print(self.get_total_time_points()) for start in range(0, self.get_total_time_points()-self.windowLength+1, self.stepSize): ts = self.gen_timeseries(start) if ts is None: # print(start,) raise Exception('Dynamic sliding window exceeds total time points') save_csvmat(self.outpath('timeseries-%d-%d.csv' % (start, start+self.windowLength)), ts) tscorr = np.corrcoef(ts) save_csvmat(self.outpath('corrcoef-%d-%d.csv' % (start, start+self.windowLength)), tscorr) def run(self): self.gen_net() if __name__=="__main__": atlasobj = path.curatlas() volumename = '3mm' img = load_nii(os.path.join(path.curparent(), 'pBOLD.nii')) json_path = path.fullfile("inter_attr_dynamic.json") argsDict = load_json(json_path) outfolder = os.path.join(os.getcwd(),'bold_net','dynamic_'+str(argsDict['stepSize'])+"_"+str(argsDict['windowLength'])) os.makedirs(outfolder, exist_ok = True) # c = Calc(atlasobj, volumename, img, outfolder) # c.run() # atlasobj, volumename, img, outfolder, windowLength = 100, stepSize = 3 # print(outfolder) # print(volumename) cal = Calc(atlasobj = atlasobj, volumename = volumename, img = img, outfolder = outfolder,windowLength=argsDict['windowLength'],stepSize=argsDict['stepSize']) cal.run()
def from_json(self, json_name): self.argsDict = load_json(json_name) pass
def loadconf(confname): return load_json(os.path.join(ConfDir, confname+'.json'))
"""Run a Para.""" import sys import argparse from mmdps.proc import para from mmdps.util.loadsave import load_json from mmdps.util import path if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--config', help='para config json file', required=True) args = parser.parse_args() configpath = path.fullfile(args.config) print('Runpara: configpath', configpath) configDict = load_json(configpath) currentPara = para.load(configDict) currentPara.run() sys.stdin.close() sys.stdout.close() sys.stderr.close()
def get_brainparts_config(self, name='default'): circosfile = 'circosparts_{}.json'.format(name) return loadsave.load_json(os.path.join(self.circosfolder, circosfile))
def load_from_file(configfile): """Load a job from file.""" configDict = load_json(configfile) return create_from_dict(configDict)
def insert_mrirow(self, scan, hasT1, hasT2, hasBOLD, hasDWI, mrifolder=rootconfig.dms.folder_mridata): """Insert one mriscan record.""" # check if scan already exist try: ret = self.session.query( exists().where(tables.MRIScan.filename == scan)).scalar() if ret: # record exists return 0 except MultipleResultsFound: print('Error when importing: multiple scan records found for %s' % scan) return 1 # check MRIMachine scan_info = loadsave.load_json( os.path.join(mrifolder, scan, 'scan_info.json')) ret = self.session.query(exists().where( and_( tables.MRIMachine.institution == scan_info['Machine'] ['Institution'], tables.MRIMachine.manufacturer == scan_info['Machine']['Manufacturer'], tables.MRIMachine.modelname == scan_info['Machine'] ['ManufacturerModelName']))).scalar() if ret: machine = self.session.query(tables.MRIMachine).filter( and_( tables.MRIMachine.institution == scan_info['Machine'] ['Institution'], tables.MRIMachine.manufacturer == scan_info['Machine']['Manufacturer'], tables.MRIMachine.modelname == scan_info['Machine'] ['ManufacturerModelName'])).one() else: # insert new MRIMachine machine = tables.MRIMachine( institution=scan_info['Machine']['Institution'], manufacturer=scan_info['Machine']['Manufacturer'], modelname=scan_info['Machine']['ManufacturerModelName']) # check Person name = scan_info['Patient']['Name'] try: dateobj = datetime.datetime.strptime(scan_info['StudyDate'], '%Y-%m-%d %H:%M:%S') except ValueError: dateobj = None db_mriscan = tables.MRIScan(date=dateobj, hasT1=hasT1, hasT2=hasT2, hasBOLD=hasBOLD, hasDWI=hasDWI, filename=scan) machine.mriscans.append(db_mriscan) try: ret = self.session.query(exists().where( and_(tables.Person.name == name, tables.Person.patientid == scan_info['Patient']['ID']))).scalar() if ret: person = self.session.query(tables.Person).filter( and_(tables.Person.name == name, tables.Person.patientid == scan_info['Patient']['ID'])).one() person.mriscans.append(db_mriscan) self.session.add(db_mriscan) self.session.commit() print('Old patient new scan %s inserted' % scan) return 0 except MultipleResultsFound: print( 'Error when importing: multiple person records found for %s' % name) return 2 db_person = tables.Person.build_person(name, scan_info) db_person.mriscans.append(db_mriscan) self.session.add(db_person) self.session.commit() print('New patient new scan %s inserted' % scan) return 0
def set_brainparts(self, name): from mmdps.vis import braincircos circosfile = 'circosparts_{}.json'.format(name) self.brainparts = braincircos.BrainParts( loadsave.load_json(os.path.join(self.circosfolder, circosfile)))
default=False) parser.add_argument('--datasource', help='datasource for MMDPDatabase', default=None) parser.add_argument( '--force', help='True/False. If overwrite existing feature record', default=False) args = parser.parse_args() if args.modal is not None: feature_exporter.check_modal( args.modal, os.path.join(rootconfig.path.dms, 'export_mainconf.json')) data_config = loadsave.load_json( os.path.join(rootconfig.path.dms, 'export_dataconf.json')) main_config = loadsave.load_json( os.path.join(rootconfig.path.dms, 'export_mainconf.json')) if args.modal is not None: print('Will search default folder for %s' % (args.modal)) if args.database and args.datasource is None: raise Exception('Datasource unknown') elif args.database: print('Will export to database. datasource = %s' % (args.datasource)) if args.force: print('Force mode. Will overwrite existing features') exporter = feature_exporter.MRIScanProcExporter(main_config, data_config, args.modal, args.database, args.datasource,