def loadSpecificNets(boldPath, atlasobj, timeCase=1, subjectList=None): """ This function is an implementation on the new mmdps version. This function is used to load the first/second/etc scans of subjects. Specify which subjects to load as a list of strings or a file path in subjectList. If no subjectList is given, load all scans. """ subjectList = process_subject_list(subjectList) ret = [] subjectName = 'None' lastSubjectName = 'Unknown' for scan in sorted(os.listdir(boldPath)): if scan.find('_') != -1: subjectName = scan[:scan.find('_')] else: subjectName = scan if subjectName != lastSubjectName: occurrenceCounter = 0 lastSubjectName = subjectName occurrenceCounter += 1 if subjectList is not None and subjectName not in subjectList: continue if occurrenceCounter == timeCase: try: ret.append( netattr.Net( loadsave.load_csvmat( os.path.join(boldPath, scan, atlasobj.name, 'bold_net.csv')), atlasobj)) except FileNotFoundError as e: print('File %s not found.' % os.path.join( boldPath, scan, atlasobj.name, 'bold_net.csv')) print(e) return ret
def load_single_dynamic_attr(scan, atlasobj, attrname, dynamic_conf, rootFolder = rootconfig.path.feature_root): """ Return a DynamicAttr (attr.data[tickIdx, timeIdx]) """ if type(atlasobj) is str: atlasobj = atlas.get(atlasobj) window_length = dynamic_conf[0] step_size = dynamic_conf[1] # fix dynamic attr feature_name issue if attrname.find('bc') != -1 or attrname.find('BC') != -1: feature_name = 'BOLD.BC.inter' elif attrname.find('ccfs') != -1 or attrname.find('CCFS') != -1: feature_name = 'BOLD.CCFS.inter' elif attrname.find('le') != -1 or attrname.find('LE') != -1: feature_name = 'BOLD.LE.inter' elif attrname.find('wd') != -1 or attrname.find('WD') != -1: feature_name = 'BOLD.WD.inter' else: raise Exception('Unknown feature_name %s' % attrname) dynamic_attr = netattr.DynamicAttr(None, atlasobj, window_length, step_size, scan = scan, feature_name = feature_name) start = 0 dynamic_foler_path = os.path.join(rootFolder, scan, atlasobj.name, 'bold_net_attr', 'dynamic %d %d' % (step_size, window_length)) while True: dynamic_attr_filepath = os.path.join(dynamic_foler_path, '%s-%d.%d.csv' % (attrname, start, start + window_length)) if os.path.exists(dynamic_attr_filepath): dynamic_attr.append_one_slice(load_csvmat(dynamic_attr_filepath)) start += step_size else: # print('loaded %d attrs for %s' % (len(ret[scan]), scan)) break return dynamic_attr
def loadAllTemporalNets(boldPath, totalTimeCase, atlasobj, subjectList=None, specificTime=None): """ This function is used to load temporal scans. All person with up to totalTimeCase number of scans will be loaded and returned in a dict. The key of the dict is the subject name. Each element in the dict is the temporal scans of one person. The data are stored as a list of BrainNet. Parameters: - subjectList: a list of strs or a path to a text file - specificTime: a dict, with key = subject name, value = [timeStr1, timeStr2, ...] The length of value should equal to totalTimeCase """ subjectList = process_subject_list(subjectList) ret = {} currentPersonScans = [] currentPersonTime = [] subjectName = 'None' lastSubjectName = 'Unknown' occurrenceCounter = 0 for scan in sorted(os.listdir(boldPath)): subjectName = scan[:scan.find('_')] if subjectName != lastSubjectName: if occurrenceCounter >= totalTimeCase: if specificTime is not None and lastSubjectName in specificTime: ret[lastSubjectName] = [ currentPersonScans[currentPersonTime.index(timeStr)] for timeStr in specificTime[lastSubjectName] ] else: ret[lastSubjectName] = currentPersonScans[:totalTimeCase] occurrenceCounter = 0 lastSubjectName = subjectName currentPersonScans = [] currentPersonTime = [] if subjectList is not None and subjectName not in subjectList: continue occurrenceCounter += 1 currentPersonScans.append( netattr.Net( loadsave.load_csvmat( os.path.join(boldPath, scan, atlasobj.name, 'bold_net', 'corrcoef.csv')), atlasobj)) currentPersonTime.append(scan[scan.find('_') + 1:]) return ret
def loadDynamicNets(self, loadPath): """ Deprecated. Do not use this function """ raise Exception start = 0 while True: filePath = Path( os.path.join( loadPath, 'corrcoef-%d.%d.csv' % (start, start + self.window_length))) if filePath.exists(): self.dynamic_nets.append( Net(load_csvmat(filePath), self.atlasobj)) else: break start += self.step_size
def loaddata(self, mriscan, netattrname, csvfilename = None): """ Load the feature data specified by mriscan and feature name. Return a np mat or vec Use set_preproc to set a pre-processing function. """ if csvfilename is not None: csvfile = self.loadfilepath(mriscan, netattrname, csvfilename) else: csvfile = self.loadfilepath(mriscan, netattrname) resmat = load_csvmat(csvfile) if type(self.f_preproc) is dict: if mriscan in self.f_preproc: f = self.f_preproc[mriscan] if f: resmat = f(resmat) elif self.f_preproc: resmat = self.f_preproc(resmat) return resmat
def loadAllDynamicNets(boldPath, atlasobj, dynamicDict, timeCase=1, subjectList=None): """ This function loads all dynamic networks from the given subjects in the list Only data from timeCase session are loaded DynamicDict contains: 'windowLength' and 'stepSize', specified as integers """ subjectList = process_subject_list(subjectList) ret = [] subjectName = 'None' lastSubjectName = 'Unknown' for scan in sorted(os.listdir(boldPath)): if scan.find('_') != -1: subjectName = scan[:scan.find('_')] else: subjectName = scan if subjectName != lastSubjectName: occurrenceCounter = 0 lastSubjectName = subjectName occurrenceCounter += 1 if subjectList is not None and subjectName not in subjectList: continue if occurrenceCounter == timeCase: try: for file in sorted( os.listdir( os.path.join(boldPath, scan, atlasobj.name, 'bold_net'))): if file.find('-') != -1: ret.append( netattr.Net( loadsave.load_csvmat( os.path.join(boldPath, scan, atlasobj.name, 'bold_net', file)), atlasobj)) except FileNotFoundError as e: print('File %s not found.' % os.path.join( boldPath, scan, atlasobj.name, 'bold_net', 'corrcoef.csv')) print(e) return ret
def loadAllNets(boldPath, atlasobj, scanList=None): """ This script is used to load all scans. The given list contains scan names. """ scanList = process_subject_list(scanList) ret = [] if scanList is None: scanList = sorted(os.listdir(boldPath)) for scan in sorted(os.listdir(boldPath)): if scan not in scanList: continue try: ret.append( netattr.Net( loadsave.load_csvmat( os.path.join(boldPath, scan, atlasobj.name, 'bold_net', 'corrcoef.csv')), atlasobj)) except FileNotFoundError: print('File %s not found.' % os.path.join( boldPath, scan, atlasobj.name, 'bold_net', 'corrcoef.csv')) return ret
def load_single_dynamic_network(scan, atlasobj, dynamic_conf, rootFolder = rootconfig.path.feature_root): """ Return a DynamicNet (net.data[timeIdx, tickIdx, tickIdx]) """ if type(atlasobj) is str: atlasobj = atlas.get(atlasobj) window_length = dynamic_conf[0] step_size = dynamic_conf[1] start = 0 dynamic_foler_path = os.path.join(rootFolder, scan, atlasobj.name, 'bold_net', 'dynamic %d %d' % (step_size, window_length)) time_slice_count = len(list(os.listdir(dynamic_foler_path))) - 1 # get rid of timeseries.csv dynamic_net = netattr.DynamicNet(np.zeros((atlasobj.count, atlasobj.count, time_slice_count)), atlasobj, window_length, step_size, scan = scan, feature_name = 'BOLD.net') timeIdx = 0 while True: dynamic_net_filepath = os.path.join(dynamic_foler_path, 'corrcoef-%d.%d.csv' % (start, start+window_length)) if os.path.exists(dynamic_net_filepath): time_slice_net = load_csvmat(dynamic_net_filepath) dynamic_net.data[:, :, timeIdx] = time_slice_net timeIdx += 1 start += step_size else: break return dynamic_net
def loadRandomDynamicNets(boldPath, atlasobj, totalNum=0, scanList=None): """ This function is used to randomly load the dynamic nets of subjects. Specify how many nets in total you would like to get in totalNum. Specify which scans to load as a list of strings or a file path in scanList. Logic: Randomly load one dynamic net for each scan (make sure not repeat) and add it. If the total number is enough, return. If not, continue load one more dynamic net. """ retList = [] scanList = process_subject_list(scanList) ret = {} scanName = 'None' lastScanName = 'Unknown' iterationCounter = 0 # counter for total iteration, equals num of dynamic nets of each scan in ret while len(retList) < totalNum: iterationCounter += 1 currentList = [] for scanName in sorted(os.listdir(boldPath)): if scanName != lastScanName: occurrenceCounter = 0 lastScanName = scanName occurrenceCounter += 1 if scanList is not None and scanName not in scanList: continue # randomly load one dynamic net in this subject if scanName not in ret: ret[scanName] = [] else: pass try: # randomly search for one non-in net dynamicList = sorted( os.listdir( os.path.join(boldPath, scanName, atlasobj.name, 'bold_net'))) dynamicList.remove('corrcoef.csv') dynamicList.remove('timeseries.csv') flag = True while flag: flag = False # get a random idx = random.randint(0, len(dynamicList) - 1) for net in ret[scanName]: if net.name == dynamicList[idx]: flag = True break ret[scanName].append( netattr.Net(loadsave.load_csvmat( os.path.join(boldPath, scanName, atlasobj.name, 'bold_net', dynamicList[idx])), atlasobj, name=dynamicList[idx])) currentList.append( netattr.Net(loadsave.load_csvmat( os.path.join(boldPath, scanName, atlasobj.name, 'bold_net', dynamicList[idx])), atlasobj, name=dynamicList[idx])) except FileNotFoundError as e: print('File %s not found.' % os.path.join(boldPath, scanName, atlasobj.name, 'bold_net', 'corrcoef.csv')) print(e) # check if we add all these people in, the total amount would exceed if len(currentList) + len(retList) > totalNum: # only add some people in random.shuffle(currentList) retList += currentList[:(totalNum - len(retList))] else: # add all people in retList += currentList return retList
from mmdps.vis.bnv import gen_matlab, get_mesh from mmdps.vis import braincircos import numpy as np atlasobj = atlas.get('brodmann_lrce') subject_list = [ 'tanenci_20170601', 'tanenci_20170706', 'tanenci_20170814', 'tanenci_20170922', 'tanenci_20171117' ] # subject_list = ['wangwei_20171107', 'wangwei_20171221', 'wangwei_20180124', 'wangwei_20180211', 'wangwei_20180520'] # subject_list = ['xiezhihao_20180416', 'xiezhihao_20180524'] for i in range(0, len(subject_list)): # load in the given subject's net net1 = netattr.Net( loadsave.load_csvmat('Y:/BOLD/%s/brodmann_lrce/bold_net/corrcoef.csv' % subject_list[i - 1]), atlasobj) # net1.data = abs(net1.data) # set a threshold mask netList = sorted(abs(net1.data.ravel())) threshold = netList[int(0.95 * len(netList))] net1.data[abs(net1.data) < threshold] = 0 builder = braincircos.CircosPlotBuilder( atlasobj, '%s %dth orig top 5%%' % (subject_list[i].replace('_', ' '), i + 1), '%s/%s %dth circos orig top 5%%.png' % (subject_list[i].split('_')[0], subject_list[i].replace('_', ' '), i + 1)) builder.add_circoslink(braincircos.CircosLink(net1, threshold=0)) builder.plot()
ret1.data[mask] = 0 ret2.data[mask] = 0 return (ret1, ret2, ratio) atlasobj = atlas.get('brodmann_lrce') # subject_list = ['tanenci_20170601', 'tanenci_20170706', 'tanenci_20170814', 'tanenci_20170922', 'tanenci_20171117'] subject_list = [ 'wangwei_20171107', 'wangwei_20171221', 'wangwei_20180124', 'wangwei_20180211', 'wangwei_20180520' ] # subject_list = ['xiezhihao_20180416', 'xiezhihao_20180524'] # load in the given subject's net net1 = netattr.Net( loadsave.load_csvmat('Y:/BOLD/%s/brodmann_lrce/bold_net/corrcoef.csv' % subject_list[0]), atlasobj) wd1 = netattr.Attr( loadsave.load_csvmat( 'Y:/BOLD/%s/brodmann_lrce/bold_net_attr/inter-region_wd.csv' % subject_list[0]), atlasobj) # net1.data = abs(net1.data) net2 = netattr.Net( loadsave.load_csvmat('Y:/BOLD/%s/brodmann_lrce/bold_net/corrcoef.csv' % subject_list[-1]), atlasobj) wd2 = netattr.Attr( loadsave.load_csvmat( 'Y:/BOLD/%s/brodmann_lrce/bold_net_attr/inter-region_wd.csv' % subject_list[-1]), atlasobj) # net2.data = abs(net2.data) net1, net2, ratio = all_neg(net1, net2)
def run_feature(self, feature_name, feature_config): """ Override super run_feature. Stores csv files to MongoDB directly """ if feature_config['file_type'] != '.csv': # only supports csv features return in_file_list, out_file_list = self.get_feature_file_path(feature_config) if self.is_dynamic and feature_config['modal'] == 'BOLD': if len(in_file_list) < 1: print('==Not Exist:', self.mriscan, self.atlasname, feature_name) return if feature_name.find('net') != -1: feature = netattr.DynamicNet(None, self.atlasname, self.dataconfig['dynamic']['window_length'], self.dataconfig['dynamic']['step_size'], scan = self.mriscan, feature_name = feature_name) for file in in_file_list: feature.append_one_slice(load_csvmat(file)) try: self.mdb.save_dynamic_network(feature) except mongodb_database.MultipleRecordException: if self.force: # delete and overwrite self.mdb.remove_dynamic_network(self.mriscan, self.dataconfig['dynamic']['window_length'], self.dataconfig['dynamic']['step_size'], self.atlasname) self.mdb.save_dynamic_network(feature) else: print('!!!Already Exist: %s %s %s. Skipped' % (self.mriscan, self.atlasname, feature_name)) else: feature = netattr.DynamicAttr(None, self.atlasname, self.dataconfig['dynamic']['window_length'], self.dataconfig['dynamic']['step_size'], scan = self.mriscan, feature_name = feature_name) for file in in_file_list: feature.append_one_slice(load_csvmat(file)) try: self.mdb.save_dynamic_attr(feature) except mongodb_database.MultipleRecordException: if self.force: # delete and overwrite self.mdb.remove_dynamic_attr(self.mriscan, feature_name, self.dataconfig['dynamic']['window_length'], self.dataconfig['dynamic']['step_size'], self.atlasname) self.mdb.save_dynamic_attr(feature) else: print('!!!Already Exist: %s %s %s. Skipped' % (self.mriscan, self.atlasname, feature_name)) elif self.is_dynamic: # dynamic but not BOLD feature return else: # not dynamic for file in in_file_list: if not os.path.isfile(file): print('==Not Exist:', self.mriscan, self.atlasname, feature_name) continue if feature_name.find('net') != -1: feature = netattr.Net(load_csvmat(file), self.atlasname, self.mriscan, feature_name) else: feature = netattr.Attr(load_csvmat(file), self.atlasname, self.mriscan, feature_name) try: self.mdb.save_static_feature(feature) except mongodb_database.MultipleRecordException: if self.force: # delete and overwrite self.mdb.remove_static_feature(self.mriscan, self.atlasname, feature_name) self.mdb.save_static_feature(feature) else: print('!!!Already Exist: %s %s %s. Skipped' % (self.mriscan, self.atlasname, feature_name))
import csv from mmdps.proc import netattr, atlas, job, parabase from mmdps.util import loadsave from mmdps.vis.bnv import gen_matlab, get_mesh import numpy as np atlasobj = atlas.get('brodmann_lrce') # subject_list = ['tanenci_20170601', 'tanenci_20170706', 'tanenci_20170814', 'tanenci_20170922', 'tanenci_20171117'] subject_list = [ 'wangwei_20171107', 'wangwei_20171221', 'wangwei_20180124', 'wangwei_20180211', 'wangwei_20180520' ] # load in the given subject's net net1 = netattr.Net( loadsave.load_csvmat( 'Y:/BOLD/xiezhihao_20180416/brodmann_lrce/bold_net/corrcoef.csv'), atlasobj) net2 = netattr.Net( loadsave.load_csvmat( 'Y:/BOLD/xiezhihao_20180524/brodmann_lrce/bold_net/corrcoef.csv'), atlasobj) net2.data -= net1.data # set a threshold mask netList = sorted(abs(net2.data.ravel())) threshold = netList[int(0.8 * len(netList))] net2.data[abs(net2.data) < threshold] = 0 loadsave.save_csvmat( 'E:/Changgung works/jixieshou_20180703/xiezhihao/xiezhihao 21 link.edge', net2.data,
sigConnections = [] # a list of tuples with open( 'Z:/changgeng/jixieshou/controlexperimental/bold_net/net_ttest/original_value/patientE_after-before_paired_ttest_report.csv' ) as f: reader = csv.DictReader(f, delimiter=',') for row in reader: connection = (row['RegionA'], row['RegionB']) reversedConnection = (row['RegionB'], row['RegionA']) if connection in sigConnections or reversedConnection in sigConnections: continue sigConnections.append(connection) # load in the given subject's net atlasobj = atlas.get('brodmann_lrce') net = netattr.Net( loadsave.load_csvmat( 'Y:/BOLD/wangwei_20171107/brodmann_lrce/bold_net/corrcoef.csv'), atlasobj) # find the appropriate nodes and links and output as BNV expected plotNet = netattr.Net(np.zeros((atlasobj.count, atlasobj.count)), atlasobj) # all zero matrix for connection in sigConnections: plotNet.data[atlasobj.ticks.index(connection[0]), atlasobj.ticks.index(connection[1])] = 1.0 plotNet.data[atlasobj.ticks.index(connection[1]), atlasobj.ticks.index(connection[0])] = 1.0 loadsave.save_csvmat( 'E:/Changgung works/jixieshou_20180703/wangwei_20171107_link.edge', plotNet.data)
from mmdps.proc import netattr, atlas, job from mmdps.util import loadsave from mmdps.vis.bnv import gen_matlab, get_mesh import numpy as np # load in the given subject's attributes atlasobj = atlas.get('brodmann_lrce') subject_list = [ 'tanenci_20170601', 'tanenci_20170706', 'tanenci_20170814', 'tanenci_20170922', 'tanenci_20171117' ] # subject_list = ['wangwei_20171107', 'wangwei_20171221', 'wangwei_20180124', 'wangwei_20180211', 'wangwei_20180520'] attr1 = netattr.Attr( loadsave.load_csvmat( 'Y:/BOLD/xiezhihao_20180416/brodmann_lrce/bold_net_attr/inter-region_wd.csv' ), atlasobj) attr2 = netattr.Attr( loadsave.load_csvmat( 'Y:/BOLD/xiezhihao_20180524/brodmann_lrce/bold_net_attr/inter-region_wd.csv' ), atlasobj) attr2.data -= attr1.data # prepare BNV node file atlasobj.bnvnode.change_value(attr2.data) atlasobj.bnvnode.change_modular([int(d) for d in (attr2.data > 0)]) atlasobj.bnvnode.write( 'E:/Changgung works/jixieshou_20180703/xiezhihao/xiezhihao 21 node.node') mstr = gen_matlab( 'E:/Changgung works/jixieshou_20180703/xiezhihao/xiezhihao 21 node.node',