def readNet(inEdgeFile, atlasobj): """ Read in an edge file and return a net """ from mmdps.proc import netattr data = np.loadtxt(inEdgeFile, delimiter = '\t') return netattr.Net(data, atlasobj)
def loadSpecificNets(boldPath, atlasobj, timeCase=1, subjectList=None): """ This function is an implementation on the new mmdps version. This function is used to load the first/second/etc scans of subjects. Specify which subjects to load as a list of strings or a file path in subjectList. If no subjectList is given, load all scans. """ subjectList = process_subject_list(subjectList) ret = [] subjectName = 'None' lastSubjectName = 'Unknown' for scan in sorted(os.listdir(boldPath)): if scan.find('_') != -1: subjectName = scan[:scan.find('_')] else: subjectName = scan if subjectName != lastSubjectName: occurrenceCounter = 0 lastSubjectName = subjectName occurrenceCounter += 1 if subjectList is not None and subjectName not in subjectList: continue if occurrenceCounter == timeCase: try: ret.append( netattr.Net( loadsave.load_csvmat( os.path.join(boldPath, scan, atlasobj.name, 'bold_net.csv')), atlasobj)) except FileNotFoundError as e: print('File %s not found.' % os.path.join( boldPath, scan, atlasobj.name, 'bold_net.csv')) print(e) return ret
def trans_netattr(self,subject_scan, atlas_name, feature_name, value): if value.ndim == 1: # 这里要改一下 arr = netattr.Attr(value, atlas.get(atlas_name),subject_scan, feature_name) return arr else: net = netattr.Net(value, atlas.get(atlas_name), subject_scan, feature_name) return net
def get_static_net(self, scan, atlas_name, comment={}): """ Return to an static net object directly """ query = dict(scan=scan, comment=comment) col = self.getcol(atlas_name, 'BOLD.net') count = self.sndb[col].count_documents(query) if count == 0: raise NoRecordFoundException(scan + atlas_name + 'BOLD.net') elif count > 1: raise MultipleRecordException(scan + atlas_name + 'BOLD.net') else: NetData = pickle.loads(self.sndb[col].find_one(query)['value']) atlasobj = atlas.get(atlas_name) net = netattr.Net(NetData, atlasobj, scan, 'BOLD.net') return net
def get_net(self, scan, atlas_name, feature): #return to an net object directly if self.exist_static(scan, atlas_name, feature): binary_data = self.query_static(scan, atlas_name, feature)['value'] netdata = pickle.loads(binary_data) atlasobj = atlas.get(atlas_name) net = netattr.Net(netdata, atlasobj, scan, feature) return net else: print( "can't find the document you look for. scan: %s, atlas: %s, feature: %s." % (scan, atlas_name, feature)) raise NoRecordFoundException(scan) return None
def loadAllTemporalNets(boldPath, totalTimeCase, atlasobj, subjectList=None, specificTime=None): """ This function is used to load temporal scans. All person with up to totalTimeCase number of scans will be loaded and returned in a dict. The key of the dict is the subject name. Each element in the dict is the temporal scans of one person. The data are stored as a list of BrainNet. Parameters: - subjectList: a list of strs or a path to a text file - specificTime: a dict, with key = subject name, value = [timeStr1, timeStr2, ...] The length of value should equal to totalTimeCase """ subjectList = process_subject_list(subjectList) ret = {} currentPersonScans = [] currentPersonTime = [] subjectName = 'None' lastSubjectName = 'Unknown' occurrenceCounter = 0 for scan in sorted(os.listdir(boldPath)): subjectName = scan[:scan.find('_')] if subjectName != lastSubjectName: if occurrenceCounter >= totalTimeCase: if specificTime is not None and lastSubjectName in specificTime: ret[lastSubjectName] = [ currentPersonScans[currentPersonTime.index(timeStr)] for timeStr in specificTime[lastSubjectName] ] else: ret[lastSubjectName] = currentPersonScans[:totalTimeCase] occurrenceCounter = 0 lastSubjectName = subjectName currentPersonScans = [] currentPersonTime = [] if subjectList is not None and subjectName not in subjectList: continue occurrenceCounter += 1 currentPersonScans.append( netattr.Net( loadsave.load_csvmat( os.path.join(boldPath, scan, atlasobj.name, 'bold_net', 'corrcoef.csv')), atlasobj)) currentPersonTime.append(scan[scan.find('_') + 1:]) return ret
def loadAllDynamicNets(boldPath, atlasobj, dynamicDict, timeCase=1, subjectList=None): """ This function loads all dynamic networks from the given subjects in the list Only data from timeCase session are loaded DynamicDict contains: 'windowLength' and 'stepSize', specified as integers """ subjectList = process_subject_list(subjectList) ret = [] subjectName = 'None' lastSubjectName = 'Unknown' for scan in sorted(os.listdir(boldPath)): if scan.find('_') != -1: subjectName = scan[:scan.find('_')] else: subjectName = scan if subjectName != lastSubjectName: occurrenceCounter = 0 lastSubjectName = subjectName occurrenceCounter += 1 if subjectList is not None and subjectName not in subjectList: continue if occurrenceCounter == timeCase: try: for file in sorted( os.listdir( os.path.join(boldPath, scan, atlasobj.name, 'bold_net'))): if file.find('-') != -1: ret.append( netattr.Net( loadsave.load_csvmat( os.path.join(boldPath, scan, atlasobj.name, 'bold_net', file)), atlasobj)) except FileNotFoundError as e: print('File %s not found.' % os.path.join( boldPath, scan, atlasobj.name, 'bold_net', 'corrcoef.csv')) print(e) return ret
def loadAllNets(boldPath, atlasobj, scanList=None): """ This script is used to load all scans. The given list contains scan names. """ scanList = process_subject_list(scanList) ret = [] if scanList is None: scanList = sorted(os.listdir(boldPath)) for scan in sorted(os.listdir(boldPath)): if scan not in scanList: continue try: ret.append( netattr.Net( loadsave.load_csvmat( os.path.join(boldPath, scan, atlasobj.name, 'bold_net', 'corrcoef.csv')), atlasobj)) except FileNotFoundError: print('File %s not found.' % os.path.join( boldPath, scan, atlasobj.name, 'bold_net', 'corrcoef.csv')) return ret
def loadRandomDynamicNets(boldPath, atlasobj, totalNum=0, scanList=None): """ This function is used to randomly load the dynamic nets of subjects. Specify how many nets in total you would like to get in totalNum. Specify which scans to load as a list of strings or a file path in scanList. Logic: Randomly load one dynamic net for each scan (make sure not repeat) and add it. If the total number is enough, return. If not, continue load one more dynamic net. """ retList = [] scanList = process_subject_list(scanList) ret = {} scanName = 'None' lastScanName = 'Unknown' iterationCounter = 0 # counter for total iteration, equals num of dynamic nets of each scan in ret while len(retList) < totalNum: iterationCounter += 1 currentList = [] for scanName in sorted(os.listdir(boldPath)): if scanName != lastScanName: occurrenceCounter = 0 lastScanName = scanName occurrenceCounter += 1 if scanList is not None and scanName not in scanList: continue # randomly load one dynamic net in this subject if scanName not in ret: ret[scanName] = [] else: pass try: # randomly search for one non-in net dynamicList = sorted( os.listdir( os.path.join(boldPath, scanName, atlasobj.name, 'bold_net'))) dynamicList.remove('corrcoef.csv') dynamicList.remove('timeseries.csv') flag = True while flag: flag = False # get a random idx = random.randint(0, len(dynamicList) - 1) for net in ret[scanName]: if net.name == dynamicList[idx]: flag = True break ret[scanName].append( netattr.Net(loadsave.load_csvmat( os.path.join(boldPath, scanName, atlasobj.name, 'bold_net', dynamicList[idx])), atlasobj, name=dynamicList[idx])) currentList.append( netattr.Net(loadsave.load_csvmat( os.path.join(boldPath, scanName, atlasobj.name, 'bold_net', dynamicList[idx])), atlasobj, name=dynamicList[idx])) except FileNotFoundError as e: print('File %s not found.' % os.path.join(boldPath, scanName, atlasobj.name, 'bold_net', 'corrcoef.csv')) print(e) # check if we add all these people in, the total amount would exceed if len(currentList) + len(retList) > totalNum: # only add some people in random.shuffle(currentList) retList += currentList[:(totalNum - len(retList))] else: # add all people in retList += currentList return retList
from mmdps.vis.bnv import gen_matlab, get_mesh from mmdps.vis import braincircos import numpy as np atlasobj = atlas.get('brodmann_lrce') subject_list = [ 'tanenci_20170601', 'tanenci_20170706', 'tanenci_20170814', 'tanenci_20170922', 'tanenci_20171117' ] # subject_list = ['wangwei_20171107', 'wangwei_20171221', 'wangwei_20180124', 'wangwei_20180211', 'wangwei_20180520'] # subject_list = ['xiezhihao_20180416', 'xiezhihao_20180524'] for i in range(0, len(subject_list)): # load in the given subject's net net1 = netattr.Net( loadsave.load_csvmat('Y:/BOLD/%s/brodmann_lrce/bold_net/corrcoef.csv' % subject_list[i - 1]), atlasobj) # net1.data = abs(net1.data) # set a threshold mask netList = sorted(abs(net1.data.ravel())) threshold = netList[int(0.95 * len(netList))] net1.data[abs(net1.data) < threshold] = 0 builder = braincircos.CircosPlotBuilder( atlasobj, '%s %dth orig top 5%%' % (subject_list[i].replace('_', ' '), i + 1), '%s/%s %dth circos orig top 5%%.png' % (subject_list[i].split('_')[0], subject_list[i].replace('_', ' '), i + 1)) builder.add_circoslink(braincircos.CircosLink(net1, threshold=0)) builder.plot()
ChanggungPatientNets = io_utils.loadSpecificNets( mmdps_locale.ChanggungAllFullPath, atlasobj, subjectList=os.path.join(mmdps_locale.ChanggungRootPath, 'CS_subjects.txt')) ChanggungHealthyNets = io_utils.loadSpecificNets( mmdps_locale.ChanggungAllFullPath, atlasobj, subjectList=os.path.join(mmdps_locale.ChanggungRootPath, 'normal_subjects.txt')) sig_connections = stats_utils.filter_sigdiff_connections_Bonferroni( ChanggungPatientNets, ChanggungHealthyNets) sigDiffNet = netattr.Net(np.zeros((atlasobj.count, atlasobj.count)), atlasobj) for conn in sig_connections: sigDiffNet.data[conn[0], conn[1]] = 1 title = 'CS_signet' outfilepath = 'E:/Results/CS_signet/test.png' builder = braincircos.CircosPlotBuilder(atlasobj, title, outfilepath) builder.add_circoslink(braincircos.CircosLink(sigDiffNet)) builder.add_circosvalue( braincircos.CircosValue( netattr.Attr(np.random.uniform(size=atlasobj.count), atlasobj))) builder.customizeSize('0.80', '10p') builder.plot()
def run_feature(self, feature_name, feature_config): """ Override super run_feature. Stores csv files to MongoDB directly """ if feature_config['file_type'] != '.csv': # only supports csv features return in_file_list, out_file_list = self.get_feature_file_path(feature_config) if self.is_dynamic and feature_config['modal'] == 'BOLD': if len(in_file_list) < 1: print('==Not Exist:', self.mriscan, self.atlasname, feature_name) return if feature_name.find('net') != -1: feature = netattr.DynamicNet(None, self.atlasname, self.dataconfig['dynamic']['window_length'], self.dataconfig['dynamic']['step_size'], scan = self.mriscan, feature_name = feature_name) for file in in_file_list: feature.append_one_slice(load_csvmat(file)) try: self.mdb.save_dynamic_network(feature) except mongodb_database.MultipleRecordException: if self.force: # delete and overwrite self.mdb.remove_dynamic_network(self.mriscan, self.dataconfig['dynamic']['window_length'], self.dataconfig['dynamic']['step_size'], self.atlasname) self.mdb.save_dynamic_network(feature) else: print('!!!Already Exist: %s %s %s. Skipped' % (self.mriscan, self.atlasname, feature_name)) else: feature = netattr.DynamicAttr(None, self.atlasname, self.dataconfig['dynamic']['window_length'], self.dataconfig['dynamic']['step_size'], scan = self.mriscan, feature_name = feature_name) for file in in_file_list: feature.append_one_slice(load_csvmat(file)) try: self.mdb.save_dynamic_attr(feature) except mongodb_database.MultipleRecordException: if self.force: # delete and overwrite self.mdb.remove_dynamic_attr(self.mriscan, feature_name, self.dataconfig['dynamic']['window_length'], self.dataconfig['dynamic']['step_size'], self.atlasname) self.mdb.save_dynamic_attr(feature) else: print('!!!Already Exist: %s %s %s. Skipped' % (self.mriscan, self.atlasname, feature_name)) elif self.is_dynamic: # dynamic but not BOLD feature return else: # not dynamic for file in in_file_list: if not os.path.isfile(file): print('==Not Exist:', self.mriscan, self.atlasname, feature_name) continue if feature_name.find('net') != -1: feature = netattr.Net(load_csvmat(file), self.atlasname, self.mriscan, feature_name) else: feature = netattr.Attr(load_csvmat(file), self.atlasname, self.mriscan, feature_name) try: self.mdb.save_static_feature(feature) except mongodb_database.MultipleRecordException: if self.force: # delete and overwrite self.mdb.remove_static_feature(self.mriscan, self.atlasname, feature_name) self.mdb.save_static_feature(feature) else: print('!!!Already Exist: %s %s %s. Skipped' % (self.mriscan, self.atlasname, feature_name))
import csv from mmdps.proc import netattr, atlas, job, parabase from mmdps.util import loadsave from mmdps.vis.bnv import gen_matlab, get_mesh import numpy as np atlasobj = atlas.get('brodmann_lrce') # subject_list = ['tanenci_20170601', 'tanenci_20170706', 'tanenci_20170814', 'tanenci_20170922', 'tanenci_20171117'] subject_list = [ 'wangwei_20171107', 'wangwei_20171221', 'wangwei_20180124', 'wangwei_20180211', 'wangwei_20180520' ] # load in the given subject's net net1 = netattr.Net( loadsave.load_csvmat( 'Y:/BOLD/xiezhihao_20180416/brodmann_lrce/bold_net/corrcoef.csv'), atlasobj) net2 = netattr.Net( loadsave.load_csvmat( 'Y:/BOLD/xiezhihao_20180524/brodmann_lrce/bold_net/corrcoef.csv'), atlasobj) net2.data -= net1.data # set a threshold mask netList = sorted(abs(net2.data.ravel())) threshold = netList[int(0.8 * len(netList))] net2.data[abs(net2.data) < threshold] = 0 loadsave.save_csvmat( 'E:/Changgung works/jixieshou_20180703/xiezhihao/xiezhihao 21 link.edge', net2.data,
def loadSingle(self, mriscan, attrname = 'BOLD.net'): """Load the net object, with atlasobj.""" netdata = self.loaddata(mriscan, attrname) net = netattr.Net(netdata, self.atlasobj, mriscan, attrname) return net
sigConnections = [] # a list of tuples with open( 'Z:/changgeng/jixieshou/controlexperimental/bold_net/net_ttest/original_value/patientE_after-before_paired_ttest_report.csv' ) as f: reader = csv.DictReader(f, delimiter=',') for row in reader: connection = (row['RegionA'], row['RegionB']) reversedConnection = (row['RegionB'], row['RegionA']) if connection in sigConnections or reversedConnection in sigConnections: continue sigConnections.append(connection) # load in the given subject's net atlasobj = atlas.get('brodmann_lrce') net = netattr.Net( loadsave.load_csvmat( 'Y:/BOLD/wangwei_20171107/brodmann_lrce/bold_net/corrcoef.csv'), atlasobj) # find the appropriate nodes and links and output as BNV expected plotNet = netattr.Net(np.zeros((atlasobj.count, atlasobj.count)), atlasobj) # all zero matrix for connection in sigConnections: plotNet.data[atlasobj.ticks.index(connection[0]), atlasobj.ticks.index(connection[1])] = 1.0 plotNet.data[atlasobj.ticks.index(connection[1]), atlasobj.ticks.index(connection[0])] = 1.0 loadsave.save_csvmat( 'E:/Changgung works/jixieshou_20180703/wangwei_20171107_link.edge', plotNet.data)