def load_contrasts_from_mat(spm_mat_file): """Returns a dictionary from an SPM.mat file of contrasts and the string linear combination of conditions for the corresponding contrast. Parameter --------- spm_mat_file : string path to the SPM.mat file to load contrasts from Returns ------- contrasts : dict dictionary of {contrast_name: linear_condition_combination} """ spm = loadmat(spm_mat_file) spm = spm["SPM"] conditions = get_conditions_list_from_mat(spm) contrasts_vect = get_contrasts(spm) return OrderedDict( (clean_string_for_xml(str(contrast[0][0])), contrast_vec_to_name(contrast[1][:len(conditions)], conditions)) for contrast in contrasts_vect if str(contrast[2][0]) == "T")
def load_data(thefile): print "Loading data ..." matfile = mio.loadmat(thefile) data = matfile["data"] triggers = matfile["triggers"] return data, triggers
def load_contrasts_from_mat(spm_mat_file): """Returns a dictionary from an SPM.mat file of contrasts and the string linear combination of conditions for the corresponding contrast. Parameter --------- spm_mat_file : string path to the SPM.mat file to load contrasts from Returns ------- contrasts : dict dictionary of {contrast_name: linear_condition_combination} """ spm = loadmat(spm_mat_file) spm = spm["SPM"] conditions = get_conditions_list_from_mat(spm) contrasts_vect = get_contrasts(spm) return OrderedDict( (clean_string_for_xml(str(contrast[0][0])), contrast_vec_to_name(contrast[1][: len(conditions)], conditions)) for contrast in contrasts_vect if str(contrast[2][0]) == "T" )
def _check_case(self, name, files, case): for file_name in files: matdict = loadmat(file_name) label = "test %s; file %s" % (name, file_name) for k, expected in case.items(): k_label = "%s, variable %s" % (label, k) assert k in matdict, "Missing key at %s" % k_label self._check_level(k_label, expected, matdict[k])
def flickr_spectral_clustering(matfilename): myhash = loadmat(matfilename) data = myhash['data'] data = exp(data +(random([len(data), len(data)]))*0.0001) # - 0.00005) keys = myhash['names'] #maxval = max(data) #data = max_val-data+(randn(len(data), len(data))*0.0001) classes, k = spectral_clustering_W(data, 20) for i in range(k): print "*************" for j in range(len(classes)): if(i == int(classes[j])): print keys[j]
def load_regnames(spmMatFile): d = loadmat(spmMatFile) spm = d['SPM'] return [s[0] for s in spm['xX'][0, 0]['name'][0, 0][0]]
def load_scalefactor_from_mat(spmMatFile): d = loadmat(spmMatFile) spm = d['SPM'] return float(get_field(get_field(spm, 'xGX')[0][0], 'gSF')[0][0][0][0])
def readHeader(self): self.h=loadmat(self.headerFilename)
def load_scalefactor_from_mat(spmMatFile): d = loadmat(spmMatFile) spm = d['SPM'] return float(get_field(get_field(spm,'xGX')[0][0],'gSF')[0][0][0][0])
def load_contrasts(spmMatFile): d = loadmat(spmMatFile) spm = d['SPM'] return get_constrasts(spm)
def load_paradigm_from_mat(spmMatFile): d = loadmat(spmMatFile) spm = d['SPM'] return (get_onsets_from_spm_dict(spm), get_tr_from_spm_dict(spm))
#pp1 = wpwr(data_mean)[0] #p2 = wpwr(data_tspca)[0] #pp2 = wpwr(data_tspca_mean)[0] #print "TSPCA done. ", 100*p2/p1, " of raw power remains" #print "trial-averaged: ", 100*pp2/pp1, " of raw power remains" # apply SNS nneighbors = 10 print 'SNS ...' data_tspca_sns = sns(data_tspca, nneighbors) # apply DSS print "DSS ..." ## Keep all PC components data_tspca_sns = demean(data_tspca_sns)[0] todss, fromdss, ratio, pwr = dss1(data_tspca_sns) ## c3 = DSS components data_tspca_sns_dss = fold(dot(unfold(data_tspca_sns), todss), data_tspca_sns.shape[0]) return data_tspca_sns x = mio.loadmat('data2.mat') data = x['data'] ref = x['ref'] cleandata = clean(data, ref)
# p1 = wpwr(data)[0] # pp1 = wpwr(data_mean)[0] # p2 = wpwr(data_tspca)[0] # pp2 = wpwr(data_tspca_mean)[0] # print "TSPCA done. ", 100*p2/p1, " of raw power remains" # print "trial-averaged: ", 100*pp2/pp1, " of raw power remains" # apply SNS nneighbors = 10 print "SNS ..." data_tspca_sns = sns(data_tspca, nneighbors) # apply DSS print "DSS ..." ## Keep all PC components data_tspca_sns = demean(data_tspca_sns)[0] todss, fromdss, ratio, pwr = dss1(data_tspca_sns) ## c3 = DSS components data_tspca_sns_dss = fold(dot(unfold(data_tspca_sns), todss), data_tspca_sns.shape[0]) return data_tspca_sns x = mio.loadmat("data2.mat") data = x["data"] ref = x["ref"] cleandata = clean(data, ref)
def readData(self): self.d=loadmat(self.dataFilename) self.tvec=arange(len(self.d['di0']))*0.04
def load_regnames(spmMatFile): d = loadmat(spmMatFile) spm = d["SPM"] return [s[0] for s in spm["xX"][0, 0]["name"][0, 0][0]]
def load_scalefactor_from_mat(spmMatFile): d = loadmat(spmMatFile) spm = d["SPM"] return float(get_field(get_field(spm, "xGX")[0][0], "gSF")[0][0][0][0])
def load_regnames(spmMatFile): d = loadmat(spmMatFile) spm = d['SPM'] return [s[0] for s in spm['xX'][0,0]['name'][0,0][0]]
def testOnSpmMat(self): spmFile = '/export/TMP/thomas/Data/Localizer/bru2698/functional/fMRI/'\ 'spm_analysis/SPM.mat' if os.path.exists(spmFile): from scipy.io.mio import loadmat spm = loadmat(spmFile)
def loadOnsets(spmMatFile): d = loadmat(spmMatFile) spm = d['SPM'] return get_onsets_from_spm_dict(spm)
from os.path import exists if len(argv) != 6: print "usage: %s DataDir QueueType BatchSize WriteOutData(yes/no) Timeout"%(argv[0]) exit(1) datadir = argv[1] queue = argv[2] batch_size = int(argv[3]) write_data = argv[4] timeout = int(argv[5]) CPCluster=path[0] # Load Batch_data and figure out the sets that need running batch_info = loadmat("%(datadir)s/Batch_data.mat"%(locals())) num_sets = batch_info['handles'].Current.NumberOfImageSets # Loop over batches, check status file, print out commands for those that need it for start in range(1, num_sets + 1, batch_size): end = start + batch_size - 1 if end > num_sets: end = num_sets status_file_name = "%(datadir)s/status/Batch_%(start)d_to_%(end)d_DONE.mat"%(locals()) if not exists(status_file_name): print "bsub -q %(queue)s -o %(datadir)s/txt_output/%(start)s_to_%(end)s.txt %(CPCluster)s/CPCluster.py %(datadir)s/Batch_data.mat %(start)s %(end)s %(datadir)s/status Batch_ %(write_data)s %(timeout)d"%(locals()) # for i in `; do # echo bsub -q $QueueType -o ${BatchTxtOutputDir}/${i}.txt $CPCluster/CPCluster.sh ${BatchDataDir}/${BatchPrefix}data.mat $i $BatchStatusDir $BatchPrefix
my_batch = { "email": form_data["email"].value, "queue": form_data["queue"].value, "data_dir": form_data["data_dir"].value, "write_data": (form_data["write_data"].value.upper()=="Y" and 1) or 0, "batch_size": int(form_data["batch_size"].value), "timeout": float(form_data["timeout"].value), "cpcluster": CPCluster, "batch_file": batch_file } error = CheckParameters(my_batch) if error: exception = RuntimeError() exception.message = error raise exception batch_info = loadmat(batch_file) my_batch["num_sets"] = batch_info['handles'].Current.NumberOfImageSets runs = PartitionBatches(my_batch) batch_id = RunBatch.CreateBatchRun(my_batch) results = RunBatch.RunAll(batch_id) text=[] text.append("<html>") text.append("<head><title>Batch # %d</title>"%(batch_id)) text.append("<style type='text/css'>") text.append(""" table { border-spacing: 0px; border-collapse: collapse; } td { text-align: left;