1]  #used to be a loop but now runs on cluster in parallel
chunk_num = [job_table[job_num][2]]
filename = sub + '_fset' + str(fset_num) + '_chunk' + str(chunk_num[0])

print sub

print np.array(job_table).shape

behav_file = 'sub' + sub + '_attr.txt'

bold_fname = os.path.join(cwd1, sub, 'betas_sub' + sub +
                          '.nii.gz')  #full functional timeseries (beta series)

attr_fname = os.path.join(cwd1, 'all_attr',
                          behav_file)  #codes stimuli number and run number
attr = mvpa2.SampleAttributes(attr_fname)  #loads attributes into pymvpa

ds = mvpa2.fmri_dataset(bold_fname, targets=attr.targets, chunks=attr.chunks)

ds = mvpa2.remove_nonfinite_features(ds)
ds = mvpa2.remove_invariant_features(ds)

#this basically breaks up the brain into 100 different areas (to parallelize the searchlight)
try:
    ds = ds[:, fset_num * 1000:(fset_num * 1000) + 1000]
except:
    ds = ds[:, fset_num * 1000:]

stimuli = []
for i in range(0, 54):
    stimuli.append(ds.uniquetargets[i])
Пример #2
0
    )
    dataset = pickle.load(gzip.open(trimmedCache, 'rb'))
else:
    if os.path.isfile(preprocessedCache) and False:
        print 'loading cached preprocessed dataset', preprocessedCache, datetime.datetime.now(
        )
        dataset = pickle.load(gzip.open(preprocessedCache, 'rb', 5))
    else:
        # if not, generate directly, and then cache
        print 'loading and creating dataset', datetime.datetime.now()
        # chunksTargets_boldDelay="chunksTargets_boldDelay4-4.txt" #Modified
        chunksTargets_boldDelay = "chunksTargets_boldDelay{0}-{1}-direction.txt".format(
            boldDelay, stimulusWidth)

        volAttribrutes = M.SampleAttributes(
            os.path.join(sessionPath, 'behavioural',
                         chunksTargets_boldDelay))  # default is 3.txt.
        # print volAttribrutes.targets
        # print len(volAttribrutes.targets)
        # print volAttribrutes.chunks
        # print len(volAttribrutes.chunks)
        dataset = M.fmri_dataset(
            samples=os.path.join(sessionPath,
                                 'analyze/functional/functional4D.nii'),
            targets=volAttribrutes.
            targets,  # I think this was "labels" in versions 0.4.*
            chunks=volAttribrutes.chunks,
            mask=os.path.join(sessionPath,
                              'analyze/structural/lc2ms_deskulled.hdr'))

        # DATASET ATTRIBUTES (see AttrDataset)