Exemplo n.º 1
0
def make_mask(mask, volume, mask_file):
    from pyhrf.ndarray import xndarray
    m = xndarray.load(mask)
    v = xndarray.load(volume)
    
    mask = m.copy()
    #mask.data = np.zeros_like(m.data)
    mask.data[np.where(v.data == 0.)] = 0
    mask.save(mask_file)
    return 
Exemplo n.º 2
0
def make_mask(mask, volume, mask_file):
    from pyhrf.ndarray import xndarray
    m = xndarray.load(mask)
    v = xndarray.load(volume)
    
    mask = m.copy()
    #mask.data = np.zeros_like(m.data)
    mask.data[np.where(v.data == 0.)] = 0
    mask.save(mask_file)
    return 
Exemplo n.º 3
0
    def setUp(self):
        cmask = xndarray.load(xcore.DEFAULT_MASK)
        self.mask_vol = cmask.data
        self.vol_meta_data = cmask.meta_data

        self.bold_vol = xndarray.load(xcore.DEFAULT_BOLD).data
        m = np.where(self.mask_vol)
        self.bold_flat = self.bold_vol[m[0], m[1], m[2],:].T

        self.onsets = xcore.DEFAULT_ONSETS
        self.durations = xcore.DEFAULT_STIM_DURATIONS
Exemplo n.º 4
0
def plot_maps(plot_params, anat_fn, anat_slice_def, fig_dir,
              orientation=['axial','sagittal'], crop_extension=None,
              plot_anat=True, plot_fontsize=25, fig_dpi=75):

    ldata = []
    for p in plot_params:
        c = xndarray.load(p['fn']).sub_cuboid(**p['slice_def'])
        c.set_orientation(orientation)
        ldata.append(c.data)

    c_anat = xndarray.load(anat_fn).sub_cuboid(**anat_slice_def)
    c_anat.set_orientation(orientation)

    resolution = c_anat.meta_data[1]['pixdim'][1:4]
    slice_resolution = resolution[MRI4Daxes.index(orientation[0])], \
      resolution[MRI4Daxes.index(orientation[1])]

    all_data = np.array(ldata)

    if 'prl' in plot_params[0]['fn']:
        norm = normalize(all_data.min(), all_data.max()*1.05)
        print 'norm:', (all_data.min(), all_data.max())
    else:
        norm = normalize(all_data.min(), all_data.max())

    print 'norm:', (all_data.min(), all_data.max())
    for data, plot_param in zip(all_data, plot_params):
        fn = plot_param['fn']
        plt.figure()
        print 'fn:', fn
        print '->', (data.min(), data.max())
        if plot_anat:
            anat_data = c_anat.data
        else:
            anat_data = None
        plot_func_slice(data, anatomy=anat_data,
                        parcellation=plot_param.get('mask'),
                        func_cmap=cmap,
                        parcels_line_width=1., func_norm=norm,
                        resolution=slice_resolution,
                        crop_extension=crop_extension)
        set_ticks_fontsize(plot_fontsize)

        fig_fn = op.join(fig_dir, '%s.png' %op.splitext(op.basename(fn))[0])
        output_fig_fn = plot_param.get('output_fig_fn', fig_fn)

        print 'Save to: %s' %output_fig_fn
        plt.savefig(output_fig_fn, dpi=fig_dpi)
        autocrop(output_fig_fn)
    return norm
Exemplo n.º 5
0
    def __init__(self,
                 contrasts={'dummy_contrast_example': '3*audio-video/3'},
                 contrast_test_baseline=0.0,
                 hrf_model='Canonical',
                 drift_model='Cosine',
                 hfcut=128.,
                 residuals_model='spherical',
                 fit_method='ols',
                 outputPrefix='glm_',
                 rescale_results=False,
                 rescale_factor_file=None,
                 fir_delays=[0],
                 output_fit=False):

        xmlio.XmlInitable.__init__(self)
        FMRIAnalyser.__init__(self, outputPrefix)

        self.output_fit = output_fit
        self.hrf_model = hrf_model
        self.drift_model = drift_model
        self.fir_delays = fir_delays
        self.hfcut = hfcut
        self.residuals_model = residuals_model
        self.fit_method = fit_method
        self.contrasts = contrasts
        self.contrasts.pop('dummy_contrast_example', None)
        self.con_bl = contrast_test_baseline
        self.rescale_results = rescale_results

        if rescale_factor_file is not None:
            self.rescale_factor = xndarray.load(rescale_factor_file).data
        else:
            self.rescale_factor = None
Exemplo n.º 6
0
    def __init__(self, contrasts={'dummy_contrast_example':'3*audio-video/3'},
                 contrast_test_baseline=0.0,
                 hrf_model='Canonical', drift_model='Cosine', hfcut=128.,
                 residuals_model='spherical',fit_method='ols',
                 outputPrefix='glm_', rescale_results=False,
                 rescale_factor_file='', fir_delays=[0]):

        FMRIAnalyser.__init__(self, outputPrefix)
        xmlio.XMLable2.__init__(self)


        self.hrf_model = hrf_model
        self.drift_model = drift_model
        self.fir_delays = fir_delays
        self.hfcut = hfcut
        self.residuals_model = residuals_model
        self.fit_method = fit_method
        self.contrasts = contrasts
        self.contrasts.pop('dummy_contrast_example',None)
        self.con_bl = contrast_test_baseline
        self.rescale_results = rescale_results

        if rescale_factor_file != '':
            self.rescale_factor = xndarray.load(rescale_factor_file).data
        else:
            self.rescale_factor = None
Exemplo n.º 7
0
    def setUp(self):
        tag = 'subj0_%s.nii.gz'
        func_file = pyhrf.get_data_file_name(tag % 'bold_session0')
        anatomy_file = pyhrf.get_data_file_name(tag % 'anatomy')
        roi_mask_file = pyhrf.get_data_file_name(tag % 'parcellation')

        islice = 24
        cfunc = xndarray.load(func_file).sub_cuboid(time=0, axial=islice)
        cfunc.set_orientation(['coronal', 'sagittal'])
        self.func_data = cfunc.data

        canat = xndarray.load(anatomy_file).sub_cuboid(axial=islice*3)
        canat.set_orientation(['coronal', 'sagittal'])
        self.anat_data = canat.data

        croi_mask = xndarray.load(roi_mask_file).sub_cuboid(axial=islice)
        croi_mask.set_orientation(['coronal', 'sagittal'])
        self.roi_data = croi_mask.data
Exemplo n.º 8
0
 def load_func_data(self, mask):
     # Load func data for all sessions and flatten them according to mask
     mask = mask != self.bg_label
     cfdata = [xndarray.load(f).flatten(mask, self.spatial_axes, 'voxel') \
               for f in self.func_files]
     # flatten along sessions:
     cfdata = stack_cuboids(cfdata, 'session').reorient(['session','time']+\
                                                        self.spatial_axes)
     return np.concatenate(cfdata.data)
Exemplo n.º 9
0
    def setUp(self):
        tag = 'subj0_%s.nii.gz'
        func_file = pyhrf.get_data_file_name(tag%'bold_session0')
        anatomy_file = pyhrf.get_data_file_name(tag%'anatomy')
        roi_mask_file = pyhrf.get_data_file_name(tag%'parcellation')

        islice = 24
        cfunc = xndarray.load(func_file).sub_cuboid(time=0,axial=islice)
        cfunc.set_orientation(['coronal', 'sagittal'])
        self.func_data = cfunc.data

        canat = xndarray.load(anatomy_file).sub_cuboid(axial=islice*3)
        canat.set_orientation(['coronal', 'sagittal'])
        self.anat_data = canat.data

        croi_mask = xndarray.load(roi_mask_file).sub_cuboid(axial=islice)
        croi_mask.set_orientation(['coronal', 'sagittal'])
        self.roi_data = croi_mask.data
Exemplo n.º 10
0
    def setUp(self):
        tag = "subj0_%s.nii.gz"
        func_file = pyhrf.get_data_file_name(tag % "bold_session0")
        anatomy_file = pyhrf.get_data_file_name(tag % "anatomy")
        roi_mask_file = pyhrf.get_data_file_name(tag % "parcellation")

        islice = 24
        cfunc = xndarray.load(func_file).sub_cuboid(time=0, axial=islice)
        cfunc.set_orientation(["coronal", "sagittal"])
        self.func_data = cfunc.data

        canat = xndarray.load(anatomy_file).sub_cuboid(axial=islice * 3)
        canat.set_orientation(["coronal", "sagittal"])
        self.anat_data = canat.data

        croi_mask = xndarray.load(roi_mask_file).sub_cuboid(axial=islice)
        croi_mask.set_orientation(["coronal", "sagittal"])
        self.roi_data = croi_mask.data
Exemplo n.º 11
0
def make_mask_from_points(pois, mask_file, new_mask_file):

    m = xndarray.load(mask_file)
    new_m = xndarray.xndarray_like(m)
    for poi in pois:
        i,j,k = poi['sagittal'], poi['coronal'], poi['axial']
        new_m.data[i,j,k] = 1

    new_m.save(new_mask_file)
Exemplo n.º 12
0
def plot_estimation_results(fig_dir, poi, jde_roi, cond, plot_label,
                            glm_fir_output_dir, rfir_output_dir,
                            jde_output_dir, ymin=-1.55, ymax=1.05,
                            plot_fontsize=25):


    ## HRF plots

    fn = op.join(glm_fir_output_dir, 'glm_fir_hrf.nii.gz')
    fir = xndarray.load(fn).sub_cuboid(condition=cond, **poi)
    #fir /= (fir**2).sum()**.5
    fir /= fir.max()

    fn = op.join(rfir_output_dir, 'rfir_ehrf.nii.gz')
    rfir = xndarray.load(fn).sub_cuboid(condition=cond, **poi)
    #rfir /= (rfir**2).sum()**.5
    rfir /= rfir.max()

    fn = op.join(jde_output_dir, 'jde_mcmc_hrf_pm.nii.gz')
    jde = xndarray.load(fn).sub_cuboid(ROI=jde_roi)
    jde /= jde.max()

    plt.figure()
    pargs = {'linewidth' : 2.7}
    plot_cub_as_curve(fir, show_axis_labels=False, plot_kwargs=pargs)
    plot_cub_as_curve(rfir, show_axis_labels=False, plot_kwargs=pargs)
    plot_cub_as_curve(jde, show_axis_labels=False, plot_kwargs=pargs)

    from pyhrf.boldsynth.hrf import getCanoHRF
    time_points, hcano = getCanoHRF()
    hcano /= hcano.max()
    plt.plot(time_points, hcano, 'k.-',linewidth=1.5)

    set_ticks_fontsize(plot_fontsize)
    plt.xlim(0,25)
    plt.ylim(ymin, ymax)

    plt.gca().xaxis.grid(True, 'major', linestyle='--', linewidth=1.2,
                         color='gray')

    hrf_fig_fn = op.join(fig_dir, 'real_data_hrfs_%s.png' %plot_label)
    print 'hrf_fig_fn:', hrf_fig_fn
    plt.savefig(hrf_fig_fn)
    autocrop(hrf_fig_fn)
Exemplo n.º 13
0
def make_parcellation_from_files(betaFiles,
                                 maskFile,
                                 outFile,
                                 nparcels,
                                 method,
                                 dry=False,
                                 spatial_weight=10.):

    if not op.exists(maskFile):
        print 'Error, file %s not found' % maskFile
        return

    betaFiles = sorted(betaFiles)
    for b in betaFiles:
        if not op.exists(b):
            raise Exception('Error, file %s not found' % b)

    logger.info('Mask image: %s', op.basename(maskFile))
    logger.info('Betas: %s ... %s', op.basename(betaFiles[0]),
                op.basename(betaFiles[-1]))
    logger.info("Method: %s, nb parcels: %d", method, nparcels)
    logger.info('Spatial weight: %f', spatial_weight)

    if not dry:
        logger.info('Running parcellation ... ')
        logger.info('Start date is: %s', strftime('%c', localtime()))
        t0 = time()
        v = logger.getEffectiveLevel() <= logging.INFO
        lpa = fixed_parcellation(maskFile,
                                 betaFiles,
                                 nparcels,
                                 nn=6,
                                 method=method,
                                 fullpath=outFile,
                                 verbose=v,
                                 mu=spatial_weight)

        from pyhrf.ndarray import xndarray
        c = xndarray.load(outFile)
        if c.min() == -1:
            c.data += 1

        for i in np.unique(c.data):
            # remove parcel with size < 2:
            if i != 0 and len(c.data == 1) < 2:
                c.data[np.where(c.data == i)] = 0

        c.save(outFile)

        logger.info('Parcellation complete, took %s',
                    format_duration(time() - t0))
        return lpa
    else:
        logger.info('Dry run.')
Exemplo n.º 14
0
    def load_and_get_fdata_params(self):
        pyhrf.verbose(1,'Load mask from: %s' %self.mask_file)
        if self.data_type == 'surface':
            pyhrf.verbose(2,'Read mesh from: %s' %self.mesh_file)
        p = {'mask' : xndarray.load(self.mask_file).data}
        pyhrf.verbose(1, 'Mask shape %s' %str(p['mask'].shape))

        if self.data_type == 'surface':
            p['graph'] = graph_from_mesh(read_mesh(self.mesh_file))

        return p
Exemplo n.º 15
0
def compute_T_Pvalue(betas, stds_beta, mask_file, null_hyp=True):
    '''
    Compute Tvalues statistic and Pvalue based upon estimates
    and their standard deviation
    beta and std_beta for all voxels
    beta: shape (nb_vox, 1)
    std: shape (1)
    Assume null hypothesis if null_hyp is True
    '''
    from pyhrf.ndarray import xndarray

    import sys
    sys.path.append("/home/i2bm/BrainVisa/source/pyhrf/pyhrf-free/trunk/script/WIP/Scripts_IRMf_Adultes_Solv/Scripts_divers_utiles/Scripts_utiles/")
    from Functions_fit import Permutation_test, stat_mean, stat_Tvalue, stat_Wilcoxon

    mask = xndarray.load(mask_file).data #to save P and Tval on a map

    BvalC = xndarray(betas, axes_names=['sagittal', 'coronal', 'axial'])
    Betasval = BvalC.flatten(mask, axes=['sagittal', 'coronal', 'axial'], new_axis='position').data

    Stdsval = stds_beta

    Tval = xndarray(Betasval/Stdsval, axes_names=['position']).data

    nb_vox = Betasval.shape[0]
    nb_reg = betas.shape[1]
    dof = nb_vox - nb_reg #degrees of freedom for STudent distribution
    assert dof>0

    Probas=np.zeros(Betasval.shape)
    for i in xrange(nb_vox):
        if null_hyp:
            #STudent distribution
            from scipy.stats import t
            fmix = lambda x: t.pdf(x, dof)
        else:
            fmix = lambda t:  1/np.sqrt(2*np.pi*Stdsval[i]**2)*np.exp(- (t - Betasval[i])**2 / (2*Stdsval[i]**2) )
        Probas[i] = quad(fmix, Tval[i], float('inf'))[0]

    Tvalues_ = xndarray(Tval, axes_names=['position'])
    Pvalues_ = xndarray(Probas, axes_names=['position'])
    Tvalues = Tvalues_.expand(mask, 'position', ['sagittal','coronal','axial'])
    Pvalues = Pvalues_.expand(mask, 'position', ['sagittal','coronal','axial'])

    #Computation of Pvalue using permutations
    #not possible to do this actually...it was used for group level stats
    #Pvalue_t = np.zeros(Betasval.shape)
    #for i in xrange(nb_vox):
        #Pvalue_t[i] = Permutation_test(Betasval[i], n_permutations=10000, \
                    #stat = stat_Tvalue, two_tailed=False, plot_histo=False)

    return Tvalues.data, Pvalues.data
Exemplo n.º 16
0
    def load_and_get_fdata_params(self, sessions_data, mask):
        params = stack_trees([sd.to_dict() for sd in sessions_data])

        fns = params.pop('func_data_file')
        pyhrf.verbose(1, 'Load functional data from: %s' %',\n'.join(fns))
        fdata = stack_cuboids([xndarray.load(fn) for fn in fns], 'session')

        fdata = np.concatenate(fdata.data) #scan sessions along time axis
        pio.discard_bad_data(fdata, mask)
        pyhrf.verbose(1, 'Functional data shape %s' %str(fdata.shape))
        params['func_data'] = fdata

        return params
Exemplo n.º 17
0
    def test_voronoi_with_seeds(self):

        import os.path as op
        from pyhrf.ndarray import xndarray
        import pyhrf
        fn = 'subj0_parcellation.nii.gz'
        mask_file = pyhrf.get_data_file_name(fn)

        orientation = ['axial', 'coronal', 'sagittal']
        seeds = xndarray.xndarray_like(
            xndarray.load(mask_file)).reorient(orientation)

        seed_coords = np.array([[24, 35, 8],  # axial, coronal, sagittal
                                [27, 35, 5],
                                [27, 29, 46],
                                [31, 28, 46]])

        seeds.data[:] = 0
        seeds.data[tuple(seed_coords.T)] = 1

        seed_file = op.join(self.tmp_dir, 'voronoi_seeds.nii')
        seeds.save(seed_file, set_MRI_orientation=True)

        output_file = op.join(self.tmp_dir, 'voronoi_parcellation.nii')
        cmd = 'pyhrf_parcellate_spatial %s -m voronoi -c %s -o %s -v %d' \
            % (mask_file, seed_file, output_file, logger.getEffectiveLevel())

        if os.system(cmd) != 0:
            raise Exception('"' + cmd + '" did not execute correctly')

        logger.info('cmd: %s', cmd)

        assert op.exists(output_file)
        parcellation = xndarray.load(output_file)

        n_parcels = len(np.unique(parcellation.data)) - 1

        self.assertEqual(n_parcels, len(seed_coords))
Exemplo n.º 18
0
def make_parcellation_from_files(betaFiles, maskFile, outFile, nparcels,
                                 method, dry=False, spatial_weight=10.):

    if not op.exists(maskFile):
        print 'Error, file %s not found' %maskFile
        return

    betaFiles = sorted(betaFiles)
    for b in betaFiles:
        if not op.exists(b):
            print 'Error, file %s not found' %b
            return

    pyhrf.verbose(1, 'Mask image: ' + op.basename(maskFile))
    pyhrf.verbose(1, 'Betas: ' + op.basename(betaFiles[0]) + ' ... ' + \
                      op.basename(betaFiles[-1]))
    pyhrf.verbose(1, "Method: %s, nb parcels: %d" %(method, nparcels))
    pyhrf.verbose(1, 'Spatial weight: %f' %spatial_weight)

    if not dry:
        pyhrf.verbose(1, 'Running parcellation ... ')
        pyhrf.verbose(1, 'Start date is: %s' %strftime('%c',localtime()))
        t0 = time()
        #lpa = one_subj_parcellation(maskFile, betaFiles, nparcels, 6,
        #                            method, 10, 1, fullpath=outFile)
        v = pyhrf.verbose.verbosity
        lpa = fixed_parcellation(maskFile, betaFiles, nparcels, nn=6,
                                 method=method, fullpath=outFile, verbose=v,
                                 mu=spatial_weight)

        from pyhrf.ndarray import xndarray
        c = xndarray.load(outFile)
        if c.min() == -1:
            c.data += 1

        for i in np.unique(c.data):
            # remove parcel with size < 2:
            if i!=0 and len(c.data==1) < 2:
                c.data[np.where(c.data==i)] = 0

        c.save(outFile)

        pyhrf.verbose(1, 'Parcellation complete, took %s' \
                          %format_duration(time() - t0))
        return lpa
    else:
        pyhrf.verbose(1, 'Dry run.')
Exemplo n.º 19
0
    def load_and_get_fdata_params(self, mask):

        if op.splitext(self.paradigm_file)[-1] == '.csv':
            onsets, durations = pio.load_paradigm_from_csv(self.paradigm_file)
        else:
            raise Exception('Only CSV file format support for paradigm')

        fns = self.func_data_files
        pyhrf.verbose(1, 'Load functional data from: %s' %',\n'.join(fns))
        fdata = stack_cuboids([xndarray.load(fn) for fn in fns], 'session')

        fdata = np.concatenate(fdata.data) #scan sessions along time axis
        pio.discard_bad_data(fdata, mask)
        pyhrf.verbose(1, 'Functional data shape %s' %str(fdata.shape))

        return {'stim_onsets': onsets, 'stim_durations':durations,
                'func_data': fdata}
Exemplo n.º 20
0
    def item_list_popup(self, pos):

        sel_indexes = self.ui.item_list.selectionModel().selection().indexes()
        if len(sel_indexes) > 0:
            menu = QtGui.QMenu(self.ui.item_list)
            reloadAction = menu.addAction("Reload")
            action = menu.exec_(self.ui.item_list.mapToGlobal(pos))
            if action == reloadAction:
                fn_to_cuboid = {}
                for i in sel_indexes:
                    r, c = i.row(), i.column()
                    item = str(self.ui.item_list.itemAt(r, c).text())
                    # only load cuboids once
                    fn = self.item_to_filenames[item]
                    if not fn_to_cuboid.has_key(fn):
                        fn_to_cuboid[fn] = xndarray.load(fn)
                    self.main_browser.set_new_cuboid(fn_to_cuboid[fn], item)
                    fn_to_cuboid = None #garbage collect
Exemplo n.º 21
0
def make_parcellation_from_files(betaFiles, maskFile, outFile, nparcels,
                                 method, dry=False, spatial_weight=10.):

    if not op.exists(maskFile):
        print 'Error, file %s not found' % maskFile
        return

    betaFiles = sorted(betaFiles)
    for b in betaFiles:
        if not op.exists(b):
            raise Exception('Error, file %s not found' % b)

    logger.info('Mask image: %s', op.basename(maskFile))
    logger.info('Betas: %s ... %s', op.basename(betaFiles[0]),
                op.basename(betaFiles[-1]))
    logger.info("Method: %s, nb parcels: %d", method, nparcels)
    logger.info('Spatial weight: %f', spatial_weight)

    if not dry:
        logger.info('Running parcellation ... ')
        logger.info('Start date is: %s', strftime('%c', localtime()))
        t0 = time()
        v = logger.getEffectiveLevel() <= logging.INFO
        lpa = fixed_parcellation(maskFile, betaFiles, nparcels, nn=6,
                                 method=method, fullpath=outFile, verbose=v,
                                 mu=spatial_weight)

        from pyhrf.ndarray import xndarray
        c = xndarray.load(outFile)
        if c.min() == -1:
            c.data += 1

        for i in np.unique(c.data):
            # remove parcel with size < 2:
            if i != 0 and len(c.data == 1) < 2:
                c.data[np.where(c.data == i)] = 0

        c.save(outFile)

        logger.info(
            'Parcellation complete, took %s', format_duration(time() - t0))
        return lpa
    else:
        logger.info('Dry run.')
Exemplo n.º 22
0
    def test_ward_spatial_cmd(self):
        from pyhrf.parcellation import parcellation_dist

        output_file = op.join(self.tmp_dir, 'parcellation_output_test.nii')

        nparcels = 4
        cmd = 'pyhrf_parcellate_glm -m %s %s %s -o %s -v %d ' \
            '-n %d -t ward_spatial ' \
            % (self.mask_fn, self.p1_fn, self.p2_fn, output_file,
               logger.getEffectiveLevel(), nparcels)
        if os.system(cmd) != 0:
            raise Exception('"' + cmd + '" did not execute correctly')
        logger.info('cmd: %s', cmd)

        labels = xndarray.load(output_file).data
        logger.info('labels.dtype:%s', str(labels.dtype))
        dist = parcellation_dist(self.p1, labels)[0]
        logger.info('dist:%d', dist)
        self.assertEqual(dist, 0)
Exemplo n.º 23
0
    def test_ward_spatial_cmd(self):
        from pyhrf.parcellation import parcellation_dist

        #pyhrf.verbose.verbosity = 2
        output_file = op.join(self.tmp_dir,'parcellation_output_test.nii')

        nparcels = 4
        cmd = 'pyhrf_parcellate_glm -m %s %s %s -o %s -v %d ' \
            '-n %d -t ward_spatial ' \
        %(self.mask_fn, self.p1_fn, self.p2_fn, output_file,
          pyhrf.verbose.verbosity, nparcels)
        if os.system(cmd) != 0 :
            raise Exception('"' + cmd + '" did not execute correctly')
        pyhrf.verbose(1, 'cmd: %s' %cmd)

        labels = xndarray.load(output_file).data
        pyhrf.verbose(2, 'labels.dtype:%s' %str(labels.dtype))
        dist = parcellation_dist(self.p1, labels)[0]
        pyhrf.verbose(2, 'dist:%d' %dist)
        self.assertEqual(dist, 0)
Exemplo n.º 24
0
    def add_file(self, filename, open_plot=True):
        """
        Load a file, place into item list and create a browser/viewer.

        Args:
            - filename (str): path to the data file to load
            - open_plot (bool): open plot window on load (TODO)

        Modifies attributes:
            - item_to_filenames: associate filename to new item id
            - filenames_to_items: associate item id to filename
        """
        if not op.exists(filename):
            QtGui.QMessageBox.critical(self, 'File not found: %s' %filename)

        item_id = self.get_unique_item_id(op.basename(filename))
        #print 'item_id:', item_id
        self.item_to_filenames[item_id] = filename
        self.filenames_to_items[filename] = item_id

        self.ui.item_list.addItem(item_id)
        self.main_browser.add_cuboid(xndarray.load(filename), item_id)
Exemplo n.º 25
0
def compute_jde_glm_rescaling(jde_path, glm_path, output_file):

    #TODO: check consistency of condition order btwn GLM & JDE !!!

    # load matX from JDE results (same for all parcels)
    # matX is a matrix of shape (time x nb_hrf_coeff)
    # matX = sum_m(X^m * m) where X is the matrix defined in the fwd JDE model
    # jde_dm_fn = op.join(jde_path,'jde_mcmc_matX.nii.gz')
    # jde_dm = xndarray.load(jde_dm_fn).sub_cuboid(ROI=12).reorient(('time','P'))

    # print 'jde_dm:'
    # print jde_dm.descrip()

    # ny,lgCI = jde_dm.data.shape
    # nbConditions = len(np.unique(jde_dm.data)) - 1

    # # reconstruct all X^m from matX
    # varX = np.zeros((nbConditions,ny,lgCI))
    # for j in xrange(nbConditions):
    #     varX[j,:,:] = (jde_dm.data == j).astype(int)


    jde_varX_fn = op.join(jde_path,'jde_mcmc_varX.nii.gz')
    jde_varX = xndarray.load(jde_varX_fn).sub_cuboid(ROI=1)
    jde_varX = jde_varX.reorient(('condition', 'time','P'))

    print 'jde_dm varX:'
    print jde_varX.descrip()

    nbConditions,ny,lgCI = jde_varX.data.shape
    varX = jde_varX.data

    # More convenient matrix structure to perfom product with HRF afterwards
    stackX = np.zeros((ny*nbConditions,lgCI), dtype=int)

    for j in xrange(nbConditions):
        stackX[ny*j:ny*(j+1), :] = varX[j,:,:]

    print 'stackX:', stackX.shape

    # Load HRFs from JDE results
    jde_hrf_fn =  op.join(jde_path, 'jde_mcmc_hrf_pm.nii.gz')
    jde_hrf = xndarray.load(jde_hrf_fn).reorient(('ROI','time'))

    print 'jde_hrf:'
    print jde_hrf.descrip()

    roi_ids = jde_hrf.axes_domains['ROI']
    jde_xh = np.zeros((len(roi_ids),ny,nbConditions))

    for iroi,roi in enumerate(roi_ids):
        h = jde_hrf.sub_cuboid(ROI=roi).data[1:-1]
        # make sure that the HRF is normalized:
        h /= (h**2).sum()**.5
        stackXh = np.dot(stackX, h)
        jde_xh[iroi,:,:] = np.reshape(stackXh, (nbConditions,ny)).transpose()

    jde_roi_mask_fn =  op.join(jde_path, 'jde_mcmc_roi_mapping.nii.gz')
    print 'jde_roi_mask_fn:', jde_roi_mask_fn
    jde_roi_mask = xndarray.load(jde_roi_mask_fn)

    glm_dm_fn =  op.join(glm_path, 'glm_hcano_design_matrix.nii.gz')
    glm_dm = xndarray.load(glm_dm_fn).sub_cuboid(ROI=1).reorient(('time',
                                                                  'regressor'))

    print 'glm_dm:'
    print glm_dm.descrip()

    # align condition axis of GLM design matrix onto condition axis of JDE
    # design matrix:
    jde_xh_tmp = jde_xh.copy()
    cond_domain = []
    for cidx,cn in enumerate(glm_dm.axes_domains['regressor']):
        if cn in jde_varX.axes_domains['condition']:
            jde_cidx = np.where(jde_varX.axes_domains['condition']==cn)[0][0]
            print 'cidx:', cidx
            print 'jde_cidx:', jde_cidx
            jde_xh_tmp[:,:,cidx] = jde_xh[:,:,jde_cidx]
            cond_domain.append(cn)

    #glm_norm_weights = (glm_dm.data**2).sum(0)**.5
    glm_norm_weights = glm_dm.data.ptp(0)

    jde_norm_weights = np.zeros(jde_roi_mask.data.shape + (nbConditions,))
    scale_factor = np.zeros_like(jde_norm_weights)
    for iroi,roi in enumerate(roi_ids):
        m = np.where(jde_roi_mask.data == roi)
        #jde_w = (jde_xh[iroi]**2).sum(0)**.5
        jde_w = jde_xh[iroi].ptp(0)
        jde_norm_weights[m[0],m[1],m[2],:] = jde_w
        scale_factor[m[0],m[1],m[2],:] =  glm_norm_weights[:len(jde_w)] / jde_w

    from pyhrf.ndarray import MRI3Daxes
    csf = xndarray(scale_factor, axes_names=MRI3Daxes+['condition'],
                 axes_domains={'condition':np.array(cond_domain)},
                 meta_data=jde_hrf.meta_data)

    print 'rescale factor:'
    print csf.descrip()

    print 'scale_factor file:', output_file
    csf.reorient(['condition']+MRI3Daxes).save(output_file)
Exemplo n.º 26
0
def plot_detection_results(fig_dir, poi, condition, coi, parcellation_file,
                           plot_label, jde_output_dir, glm_hcano_rs_output_dir,
                           fig_dpi=100):
    """
    coi (str): contrast of interest
    poi (dict): defines the point of interest for plots of HRFs and maps
    """
    if condition == 'audio':
        condition = 'phraseaudio'

    orientation = ['coronal',  'sagittal']
    axial_slice =  poi['axial']

    anat_file = get_data_file_name('real_data_vol_4_regions_anatomy.nii.gz')

    parcellation = xndarray.load(parcellation_file)
    parcellation = parcellation.sub_cuboid(axial=axial_slice)
    parcellation = parcellation.reorient(orientation)

    ## Detection maps
    detection_plots_params = []

    #JDE NRLs
    fn = op.join(jde_output_dir, 'jde_mcmc_nrl_pm.nii.gz')

    slice_def = {'axial':axial_slice, 'condition':condition}
    fig_fn = op.join(fig_dir, 'real_data_jde_mcmc_nrls_%s.png' %condition)
    detection_plots_params.append({'fn':fn, 'slice_def':slice_def,
                                   'mask': parcellation.data,
                                   'output_fig_fn':fig_fn})

    #GLM hcano
    fn = op.join(glm_hcano_rs_output_dir, 'glm_hcano_rs_beta_%s.nii.gz' %condition)

    slice_def = {'axial':axial_slice}

    fig_fn = op.join(fig_dir, 'real_data_glm_hcano_rs_%s.png'%condition)

    detection_plots_params.append({'fn':fn, 'slice_def':slice_def,
                                   'mask': (parcellation.data != 0),
                                   'output_fig_fn':fig_fn})


    perf_norm = plot_maps(detection_plots_params, anat_file,
                          {'axial':axial_slice*3},
                          fig_dir, orientation=orientation,
                          crop_extension=None, plot_anat=True)

    palette_fig_fn = op.join(fig_dir, 'real_data_detection_%s_palette.png' \
                             %condition)
    plot_palette(cmap, perf_norm, 45)
    plt.savefig(palette_fig_fn, dpi=fig_dpi)
    autocrop(palette_fig_fn)


    #JDE Contrast
    fn = op.join(jde_output_dir, 'jde_mcmc_nrl_contrasts.nii.gz')

    slice_def = {'axial':axial_slice, 'contrast':coi}
    fig_fn = op.join(fig_dir, 'real_data_jde_mcmc_con_%s.png' %coi)
    detection_plots_params.append({'fn':fn, 'slice_def':slice_def,
                                   'mask': parcellation.data,
                                   'output_fig_fn':fig_fn})

    #GLM hcano
    fn = op.join(glm_hcano_rs_output_dir, 'glm_hcano_rs_con_effect_%s.nii.gz'%coi)

    slice_def = {'axial':axial_slice}

    fig_fn = op.join(fig_dir, 'real_data_glm_hcano_rs_con_%s.png' %coi)

    detection_plots_params.append({'fn':fn, 'slice_def':slice_def,
                                   'mask': (parcellation.data != 0),
                                   'output_fig_fn':fig_fn})


    perf_norm = plot_maps(detection_plots_params, anat_file,
                          {'axial':axial_slice*3},
                          fig_dir, orientation=orientation,
                          crop_extension=None, plot_anat=True)

    palette_fig_fn = op.join(fig_dir, 'real_data_detection_con_%s_palette.png'
                             %coi)
    plot_palette(cmap, perf_norm, 45)
    plt.savefig(palette_fig_fn, dpi=fig_dpi)
    autocrop(palette_fig_fn)
Exemplo n.º 27
0
#
"""
Compute the mean of BOLD signal within parcels.

This is an example of several operations for xndarray:
- explosion of data according to a parcellation mask
- mean over voxel
- merge of several xndarray objects
"""
import os.path as op
from pyhrf import get_data_file_name, get_tmp_path
from pyhrf.ndarray import xndarray, merge

func_data = xndarray.load(get_data_file_name("subj0_bold_session0.nii.gz"))
parcellation = xndarray.load(get_data_file_name("subj0_parcellation.nii.gz"))
parcel_fdata = func_data.explode(parcellation)
parcel_means = dict((parcel_id, d.copy().fill(d.mean("position"))) for parcel_id, d in parcel_fdata.items())
parcel_means = merge(parcel_means, parcellation, axis="position")
output_fn = op.join(get_tmp_path(), "./subj0_bold_parcel_means.nii")
print "File saved to:", output_fn
parcel_means.save(output_fn)
# TODO test full script
Exemplo n.º 28
0
# -*- coding: utf-8 -*-
#
"""
Compute the mean of BOLD signal within parcels.

This is an example of several operations for xndarray:
- explosion of data according to a parcellation mask
- mean over voxel
- merge of several xndarray objects
"""
import os.path as op
from pyhrf import get_data_file_name, get_tmp_path
from pyhrf.ndarray import xndarray, merge

func_data = xndarray.load(get_data_file_name('subj0_bold_session0.nii.gz'))
parcellation = xndarray.load(get_data_file_name('subj0_parcellation.nii.gz'))
parcel_fdata = func_data.explode(parcellation)
parcel_means = dict((parcel_id, d.copy().fill(d.mean('position')))
                    for parcel_id, d in parcel_fdata.items())
parcel_means = merge(parcel_means, parcellation, axis='position')
output_fn = op.join(get_tmp_path(), './subj0_bold_parcel_means.nii')
print 'File saved to:', output_fn
parcel_means.save(output_fn)
#TODO test full script
Exemplo n.º 29
0
Arquivo: _io.py Projeto: ainafp/pyhrf
def cread_volume(fileName):
    from pyhrf.ndarray import xndarray
    return xndarray.load(fileName)
Exemplo n.º 30
0
def cread_volume(fileName):
    from pyhrf.ndarray import xndarray
    return xndarray.load(fileName)
Exemplo n.º 31
0
def make_parcellation(subject, dest_dir="parcellation", roi_mask_file=None):
    """
    Perform a functional parcellation from input fmri data
    
    Return: parcellation file name (str)
    """
    # Loading names for folders and files
    # - T maps (input)
    # func_files = glob(op.join(op.join(op.join('./', subject), \
    #                    't_maps'), 'BOLD*nii'))
    # func_files = glob(op.join('./', subject, 'ASLf', 'spm_analysis', \
    #                            'Tmaps*img'))
    func_files = glob(op.join("./", subject, "ASLf", "spm_analysis", "spmT*img"))
    print "Tmap files: ", func_files

    # - Mask (input)
    # spm_mask_file = op.join(spm_maps_dir, 'mask.img')
    mask_dir = op.join("./", subject, "preprocessed_data")
    if not op.exists(mask_dir):
        os.makedirs(mask_dir)
    mask_file = op.join(mask_dir, "mask.nii")
    mask = op.join(mask_dir, "rcut_tissue_mask.nii")
    volume = op.join("./", subject, "ASLf", "funct", "coregister", "mean" + subject + "_ASLf_correctionT1_0001.nii")
    make_mask(mask, volume, mask_file)

    # - parcellation (output)
    parcellation_dir = op.join("./", subject, dest_dir)
    if not op.exists(parcellation_dir):
        os.makedirs(parcellation_dir)
    pfile = op.join(parcellation_dir, "parcellation_func.nii")

    # Parcellation
    from pyhrf.parcellation import make_parcellation_from_files

    # make_parcellation_from_files(func_files, mask_file, pfile,
    #                             nparcels=200, method='ward_and_gkm')

    # Masking with a ROI so we just consider parcels inside
    # a certain area of the brain
    # if roi_mask_file is not None:
    if 0:  # for ip in np.array([11, 51, 131, 194]):
        # ip = 200

        # print 'Masking parcellation with roi_mask_file: ', roi_mask_file
        print "Masking ROI: ", ip
        pfile_masked = op.join(parcellation_dir, "parcellation_func_masked_roi" + str(ip) + ".nii")

        from pyhrf.ndarray import xndarray

        parcellation = xndarray.load(pfile)
        # m = xndarray.load(roi_mask_file)
        # parcels_to_keep = np.unique(parcellation.data * m.data)
        masked_parcellation = xndarray.xndarray_like(parcellation)
        # for ip in parcels_to_keep:
        #    masked_parcellation.data[np.where(parcellation.data==ip)] = ip

        masked_parcellation.data[np.where(parcellation.data == ip)] = ip
        masked_parcellation.save(pfile_masked)

    from pyhrf.ndarray import xndarray

    for tmap in func_files:
        func_file_i = xndarray.load(tmap)
        func_data = func_file_i.data
        # func_file_i.data[np.where(func_data<0.1*func_data.max())] = 0

        parcellation = xndarray.load(pfile)
        print func_data.max()
        print func_data.argmax()
        # ip = parcellation.data[func_data.argmax()]
        ip = parcellation.data[np.unravel_index(func_data.argmax(), parcellation.data.shape)]
        print ip.shape

        masked_parcellation = xndarray.xndarray_like(parcellation)
        print masked_parcellation.data.shape
        print parcellation.data.shape
        masked_parcellation.data[np.where(parcellation.data == ip)] = ip
        masked_parcellation.save(tmap[:-4] + "_parcelmax.nii")

    return pfile
plt.rc('xtick', labelsize=15)
plt.rc('ytick', labelsize=15)

fig = plt.figure(figsize=(20, 9))
gs = gridspec.GridSpec(2, 2)

# Parcellation image
parcels_img = load_img(PARCELLATION_MASK)
parcels_img_data = np.array(parcels_img.get_data())

# Anatomical image
ANAT_IMAGE = glob.glob(os.path.join(INPUT_FOLDER, 'bold.nii')).pop()
anat_img = load_img(ANAT_IMAGE)

# Estimated HRF (3)
hrf_s = xndarray.load(os.path.join(PYHRF_OUTPUT, 'jde_vem_hrf.nii'))
time_axis, canonical_hrf = getCanoHRF(duration=HRF_DURATION, dt=DT)

# Plot Canonical HRF (3)
ax_hrf = plt.subplot(gs[1, 0])
ax_hrf.plot(time_axis, canonical_hrf, ":", label="Canonical")
ax_hrf.set_xlim([0, np.max(time_axis)])
ax_hrf.set_ylabel("amplitude", size=15)
ax_hrf.set_xlabel("time (s)", size=15)

# Plot PPM (2)
ax_ppm = plt.subplot(gs[0, 1])
ppm_img = load_img(os.path.join(PYHRF_OUTPUT, ppm_nii))
try:
    plot_stat_map(stat_map_img=ppm_img,
                  threshold=PPM_THRESHOLD,
Exemplo n.º 33
0
def cread_volume(fileName):
    return xndarray.load(fileName)
Exemplo n.º 34
0
def make_parcellation(subject, dest_dir='parcellation', roi_mask_file=None):
    """
    Perform a functional parcellation from input fmri data
    
    Return: parcellation file name (str)
    """
    # Loading names for folders and files
    # - T maps (input)
    #func_files = glob(op.join(op.join(op.join('./', subject), \
    #                    't_maps'), 'BOLD*nii'))
    #func_files = glob(op.join('./', subject, 'ASLf', 'spm_analysis', \
    #                            'Tmaps*img'))
    func_files = glob(op.join('./', subject, 'ASLf', 'spm_analysis', \
                                'spmT*img'))
    print 'Tmap files: ', func_files 

    # - Mask (input)
    #spm_mask_file = op.join(spm_maps_dir, 'mask.img')
    mask_dir = op.join('./', subject, 'preprocessed_data')
    if not op.exists(mask_dir): os.makedirs(mask_dir)
    mask_file = op.join(mask_dir, 'mask.nii')
    mask = op.join(mask_dir, 'rcut_tissue_mask.nii')
    volume = op.join('./', subject, 'ASLf', 'funct', 'coregister', \
                     'mean' + subject + '_ASLf_correctionT1_0001.nii')
    make_mask(mask, volume, mask_file)

    # - parcellation (output)
    parcellation_dir = op.join('./', subject, dest_dir)
    if not op.exists(parcellation_dir): os.makedirs(parcellation_dir)
    pfile = op.join(parcellation_dir, 'parcellation_func.nii')

    # Parcellation
    from pyhrf.parcellation import make_parcellation_from_files 
    #make_parcellation_from_files(func_files, mask_file, pfile, 
    #                             nparcels=200, method='ward_and_gkm')

    # Masking with a ROI so we just consider parcels inside 
    # a certain area of the brain
    #if roi_mask_file is not None:   
    if 0: #for ip in np.array([11, 51, 131, 194]):
        #ip = 200

        #print 'Masking parcellation with roi_mask_file: ', roi_mask_file
        print 'Masking ROI: ', ip
        pfile_masked = op.join(parcellation_dir, 'parcellation_func_masked_roi' + str(ip) + '.nii')

        from pyhrf.ndarray import xndarray
        parcellation = xndarray.load(pfile)
        #m = xndarray.load(roi_mask_file)
        #parcels_to_keep = np.unique(parcellation.data * m.data)
        masked_parcellation = xndarray.xndarray_like(parcellation)
        #for ip in parcels_to_keep:
        #    masked_parcellation.data[np.where(parcellation.data==ip)] = ip
        
        masked_parcellation.data[np.where(parcellation.data==ip)] = ip
        masked_parcellation.save(pfile_masked)

    from pyhrf.ndarray import xndarray
    for tmap in func_files:
        func_file_i = xndarray.load(tmap)
        func_data = func_file_i.data
        #func_file_i.data[np.where(func_data<0.1*func_data.max())] = 0

        parcellation = xndarray.load(pfile)
        print func_data.max()
        print func_data.argmax()
        #ip = parcellation.data[func_data.argmax()]
        ip = parcellation.data[np.unravel_index(func_data.argmax(), parcellation.data.shape)]
        print ip.shape

        masked_parcellation = xndarray.xndarray_like(parcellation)
        print masked_parcellation.data.shape
        print parcellation.data.shape
        masked_parcellation.data[np.where(parcellation.data==ip)] = ip
        masked_parcellation.save(tmap[:-4] + '_parcelmax.nii')

    return pfile