Exemplo n.º 1
0
    def test_gls_recursive(self):
        cmd = ["pyhrf_gls", "-r", self.tmp_dir]
        output = check_output(cmd)

        pyhrf.verbose(1, "output:")
        pyhrf.verbose(1, output)
        expected_ouput = """%s:
%s/subject1:
%s/subject1/fmri:
paradigm.csv

%s/subject1/fmri/analysis:
analysis_result_1.nii
analysis_result_2.csv
analysis_summary.txt

%s/subject1/fmri/run1:
bold_scan_[1...3].nii

%s/subject1/fmri/run2:
bold_scan_[1...3].nii

%s/subject1/t1mri:
anatomy.{hdr,img}

""" % (
            (self.tmp_dir,) * 7
        )
        if output != expected_ouput:
            raise Exception(
                "Output of command %s is not as expected.\n"
                "Output is:\n%sExcepted:\n%s" % (" ".join(cmd), output, expected_ouput)
            )
Exemplo n.º 2
0
Arquivo: stats.py Projeto: Solvi/pyhrf
    def __init__(self, sampled_variables, nb_its_max, obs_pace=1, burnin=.3,
                 sample_hist_pace=-1, obs_hist_pace=-1,):

        self.variables = {}
        self.sampled_variables = sampled_variables

        for v in sampled_variables:
            self.set_variable(v.name, v)

        def get_fraction_or_nb(nb, tot):
            if nb>0. and nb<1.:
                return int(round(tot * nb))
            else:
                return nb

        self.nb_its_max = nb_its_max
        self.burnin = get_fraction_or_nb(burnin, nb_its_max)
        self.smpl_hist_pace = get_fraction_or_nb(sample_hist_pace, nb_its_max)
        self.obs_hist_pace = get_fraction_or_nb(obs_hist_pace, nb_its_max)
        self.tracked_quantities = {}

        pyhrf.verbose(1, 'GibbsSampler init. Burnin: %d, nb_its_max: %d, '\
                      'smpl_hist_pace: %d, obs_hist_pace: %d,'\
                      %(self.burnin, self.nb_its_max, self.smpl_hist_pace,
                        self.obs_hist_pace))
Exemplo n.º 3
0
    def computeComponentsApost(self, variables, j, XhtQXh):
        sIMixtP = variables[self.samplerEngine.I_MIXT_PARAM]
        var = sIMixtP.getCurrentVars()
        mean = sIMixtP.getCurrentMeans()
        rb = variables[self.samplerEngine.I_NOISE_VAR].currentValue
        nrls = self.currentValue
        
        #for j in xrange(self.nbConditions):
        gTQgjrb = XhtQXh[:,j]/rb   # de taille nbVox
    
        ej = self.varYtilde + repmat(nrls[j,:], self.ny, 1) * self.varXh[:,:,j].swapaxes(0,1)
        numpy.divide(diag(dot(self.varXhtQ[:,j,:],ej)), rb, self.varXjhtQjeji)

        for c in xrange(self.nbClasses):            #ici classe: 0 (inactif) ou 1 (actif)
            self.varClassApost[c,j,:] = 1./(1./var[c,j] + gTQgjrb)
            numpy.sqrt(self.varClassApost[c,j,:], self.sigClassApost[c,j,:])
            if c > 0: # assume 0 stands for inactivating class
                numpy.multiply(self.varClassApost[c,j,:],
                               add(mean[c,j]/var[c,j], self.varXjhtQjeji),
                               self.meanClassApost[c,j,:])
            else:
                multiply(self.varClassApost[c,j,:], self.varXjhtQjeji,
                         self.meanClassApost[c,j,:])
                
            pyhrf.verbose(5, 'meanClassApost %d cond %d :'%(c,j))
            pyhrf.verbose.printNdarray(5, self.meanClassApost[c,j,:])
Exemplo n.º 4
0
Arquivo: jde.py Projeto: Solvi/pyhrf
    def packSamplerInput(self, roiData):

        try:
            shrf = self.sampler.getVariable('hrf')
        except KeyError:
            shrf = self.sampler.getVariable('brf')
            
        hrfDuration = shrf.duration
        zc = shrf.zc

        simu = None

        if simu != None and shrf.sampleFlag==0:
            hrfDuration = (len(simu.hrf.get_hrf(0,0))-1)*simu.hrf.dt
            pyhrf.verbose(6,'Found simulation data and hrf is '\
                          'not sampled, setting hrfDuration to:' \
                          +str(hrfDuration))

        pyhrf.verbose(2,'building BOLDSamplerInput ...')

        if simu == None or shrf.sampleFlag:
            dt = self.dt if (self.dt!=None and self.dt!=0.) else -self.dtMin
        elif simu != None and shrf.sampleFlag == 0:
            dt = simu.hrf.dt

        samplerInput = self.sampler.inputClass(roiData, dt=dt,
                                               typeLFD=self.driftLfdType,
                                               paramLFD=self.driftLfdParam,
                                               hrfZc=zc,
                                               hrfDuration=hrfDuration)
        return samplerInput
Exemplo n.º 5
0
 def _compute_graph(self):
     if self.data_type != 'volume':
         raise Exception('Can only compute graph for volume data')
     pyhrf.verbose(6, 'FmriData._compute_graph() ...')
     to_discard = [self.backgroundLabel]
     self._graph = parcels_to_graphs(self.roiMask, kerMask3D_6n,
                                     toDiscard=to_discard)
Exemplo n.º 6
0
Arquivo: drift.py Projeto: Solvi/pyhrf
    def checkAndSetInitValue(self, variables):
        smplVarDrift = variables[self.samplerEngine.I_ETA]
        smplVarDrift.checkAndSetInitValue(variables)
        varDrift = smplVarDrift.currentValue

        if self.useTrueValue :
            if self.trueValue is not None:
                self.currentValue = self.trueValue
            else:
                raise Exception('Needed a true value for drift init but '\
                                    'None defined')

        if 0 and self.currentValue is None :
            #if not self.sampleFlag and self.dataInput.simulData is None :
                #self.currentValue = self.dataInput.simulData.drift.lfd
                #pyhrf.verbose(6,'drift dimensions :' \
                              #+str(self.currentValue.shape))
                #pyhrf.verbose(6,'self.dimDrift :' \
                              #+str(self.dimDrift))
                #assert self.dimDrift == self.currentValue.shape[0]
            #else:
            self.currentValue = np.sqrt(varDrift) * \
                np.random.randn(self.dimDrift, self.nbVox)

        if self.currentValue is None:
            pyhrf.verbose(1,"Initialisation of Drift from the data")
            ptp = numpy.dot(self.P.transpose(),self.P)
            invptp = numpy.linalg.inv(ptp)
            invptppt = numpy.dot(invptp, self.P.transpose())
            self.currentValue = numpy.dot(invptppt,self.dataInput.varMBY)

        self.updateNorm()
        self.matPl = dot(self.P, self.currentValue)
        self.ones_Q_J = np.ones((self.dimDrift, self.nbVox))
        self.ones_Q   = np.ones((self.dimDrift))
Exemplo n.º 7
0
def remote_copy(files, host, user, path, transfer_tool='ssh'):
    if transfer_tool == 'paramiko':
        import paramiko
        pyhrf.verbose(1, 'Copying files to remote destination %s@%s:%s ...' \
                          %(host,user,path))
        ssh = paramiko.SSHClient()
        known_hosts_file = os.path.join("~", ".ssh", "known_hosts")
        ssh.load_host_keys(os.path.expanduser(known_hosts_file))
        ssh.connect(host, username=user)
        sftp = ssh.open_sftp()
        for f in files:
            remotepath = op.join(path,op.basename(f))
            pyhrf.verbose(2, f + ' -> ' + remotepath + ' ...')
            flocal = open(f)
            remote_file = sftp.file(remotepath, "wb")
            remote_file.set_pipelined(True)
            remote_file.write(flocal.read())
            flocal.close()
            remote_file.close()
        sftp.close()
        ssh.close()
    else:
        sfiles = string.join(['"%s"'%f for f in files], ' ')

        scp_cmd = 'scp -C %s "%s@%s:%s"' %(sfiles, user, host, path)
        pyhrf.verbose(1, 'Data files transfer with scp ...')
        pyhrf.verbose(2, scp_cmd)
        if os.system(scp_cmd) != 0:
            raise Exception('Error while scp ...')

    pyhrf.verbose(1, 'Copy done!')

    return [op.join(path,op.basename(f)) for f in files]
Exemplo n.º 8
0
    def sampleNextInternal(self, variables):
        #TODO : comment
        sIMixtP = variables[self.samplerEngine.I_MIXT_PARAM_NRLS_BAR]
        varCI = sIMixtP.currentValue[sIMixtP.I_VAR_CI]
        varCA = sIMixtP.currentValue[sIMixtP.I_VAR_CA]
        meanCA = sIMixtP.currentValue[sIMixtP.I_MEAN_CA]

        self.labelsSamples = rand(self.nbConditions, self.nbVox)
        self.nrlsSamples = randn(self.nbConditions, self.nbVox)

        if self.imm:
            #self.sampleNrlsParallel(varXh, rb, h, varLambda, varCI,
            #                        varCA, meanCA, gTQg, variables)
            raise NotImplementedError("IMM with drift sampling is not available")
        else: #smm
            self.sampleNrlsSerial(varCI, varCA, meanCA, variables)
            #self.computeVarYTildeOpt(varXh)
            #matPl = self.samplerEngine.getVariable('drift').matPl
            #self.varYbar = self.varYtilde - matPl

        if (self.currentValue >= 1000).any() and \
                pyhrf.__usemode__ == pyhrf.DEVEL:
            pyhrf.verbose(2, "Weird NRL values detected ! %d/%d" \
                              %((self.currentValue >= 1000).sum(),
                                self.nbVox*self.nbConditions) )
            #pyhrf.verbose.set_verbosity(6)

        if pyhrf.verbose.verbosity >= 4:
            self.reportDetection()

        self.printState(4)
        self.iteration += 1 #TODO : factorize !!
Exemplo n.º 9
0
    def from_py_object(self, label, obj, parent=None):

        pyhrf.verbose(6, "UiNode.from_py_object(label=%s,obj=%s) ..." % (label, str(obj)))

        if isinstance(obj, Initable):
            n = obj.to_ui_node(label, parent)
        else:
            if UiNode._pyobj_has_leaf_type(obj):
                if isinstance(obj, np.ndarray):
                    dt = str(obj.dtype.name)
                    sh = str(obj.shape)
                    n = UiNode(label, attributes={"type": "ndarray", "dtype": dt, "shape": sh})
                    s = " ".join(str(e) for e in obj.ravel())
                    n.add_child(UiNode(s))
                elif obj is None:
                    n = UiNode(label, attributes={"type": "None"})
                    n.add_child(UiNode("None"))
                else:
                    n = UiNode(label, attributes={"type": obj.__class__})
                    n.add_child(UiNode(str(obj)))
            elif isinstance(obj, list):
                n = UiNode(label, attributes={"type": "list"})
                for i, sub_val in enumerate(obj):
                    n.add_child(UiNode.from_py_object("item%d" % i, sub_val))
            elif isinstance(obj, (dict, OrderedDict)):
                t = ["odict", "dict"][obj.__class__ == dict]
                n = UiNode(label, attributes={"type": t})
                for k, v in obj.iteritems():
                    n.add_child(UiNode.from_py_object(k, v))
            else:
                raise Exception(
                    "In UiNode.from_py_object, unsupported object: " "%s (type: %s)" % (str(obj), str(type(obj)))
                )
        return n
Exemplo n.º 10
0
    def analyse_roi(self, atomData):
        """
        Launch the JDE Gibbs Sampler on a parcel-specific data set *atomData*
        Args:
            - atomData (pyhrf.core.FmriData): parcel-specific data
        Returns:
            JDE sampler object
        """
        #print 'atomData:', atomData

        if self.copy_sampler:
            sampler = copyModule.deepcopy(self.sampler)
        else:
            sampler = self.sampler
        sInput = self.packSamplerInput(atomData)
        sampler.linkToData(sInput)
        #if self.parameters[self.P_RANDOM_SEED] is not None:
        #    np.random.seed(self.parameters[self.P_RANDOM_SEED])
        # #HACK:
        # if len(self.roi_ids) > 0:
        #     if atomData.get_roi_id() not in self.roi_ids:
        #         return None

        pyhrf.verbose(1, 'Treating region %d' %(atomData.get_roi_id()))
        sampler.runSampling()
        pyhrf.verbose(1, 'Cleaning memory ...')
        sampler.dataInput.cleanMem()
        return sampler
Exemplo n.º 11
0
def project_fmri(input_mesh, data_file, output_tex_file, 
                 output_kernels_file=None, data_resolution=None,
                 geod_decay=5., norm_decay=2., kernel_size=7, 
                 tex_bin_threshold=None):

    if output_kernels_file is None:
        tmp_dir = tempfile.mkdtemp(prefix='pyhrf_surf_proj',
                                   dir=pyhrf.cfg['global']['tmp_path'])

        kernels_file = op.join(tmp_dir, add_suffix(op.basename(data_file),
                                                   '_kernels'))
        tmp_kernels_file = True
    else:
        kernels_file = output_kernels_file
        tmp_kernels_file = False

    if data_resolution is not None:
        resolution = data_resolution
    else:
        resolution = read_spatial_resolution(data_file)

    pyhrf.verbose(1,'Data resolution: %s' %resolution)
    pyhrf.verbose(2,'Projection parameters:')
    pyhrf.verbose(2,'   - geodesic decay: %f mm' %geod_decay)
    pyhrf.verbose(2,'   - normal decay: %f mm' %norm_decay)
    pyhrf.verbose(2,'   - kernel size: %f voxels' %kernel_size)
    
    create_projection_kernels(input_mesh, kernels_file, resolution, 
                              geod_decay, norm_decay, kernel_size)
    
    project_fmri_from_kernels(input_mesh, kernels_file, data_file,
                              output_tex_file, tex_bin_threshold)

    if tmp_kernels_file:
        os.remove(kernels_file)
Exemplo n.º 12
0
    def test_ward_spatial_real_data(self):
        from pyhrf.glm import glm_nipy_from_files
        #pyhrf.verbose.verbosity = 2

        fn = 'subj0_parcellation.nii.gz'
        mask_file = pyhrf.get_data_file_name(fn)

        bold = 'subj0_bold_session0.nii.gz'
        bold_file = pyhrf.get_data_file_name(bold)

        paradigm_csv_file = pyhrf.get_data_file_name('paradigm_loc_av.csv')
        output_dir = self.tmp_dir
        output_file = op.join(output_dir,
                              'parcellation_output_test_real_data.nii')

        tr=2.4
        bet = glm_nipy_from_files(bold_file, tr,
                                paradigm_csv_file, output_dir,
                                mask_file, session=0,
                                contrasts=None,
                                hrf_model='Canonical',
                                drift_model='Cosine', hfcut=128,
                                residuals_model='spherical',
                                fit_method='ols', fir_delays=[0])[0]

        pyhrf.verbose(2, 'betas_files: %s' %' '.join(bet))

        cmd = 'pyhrf_parcellate_glm -m %s %s -o %s -v %d -n %d '\
            '-t ward_spatial ' \
        %(mask_file, ' '.join(bet), output_file,
          pyhrf.verbose.verbosity, 10)

        if os.system(cmd) != 0 :
            raise Exception('"' + cmd + '" did not execute correctly')
        pyhrf.verbose(1, 'cmd: %s' %cmd)
Exemplo n.º 13
0
    def checkAndSetInitHabit(self, variables) :
     
        # init habituation speed factors and time-varying NRLs
        if self.habits == None :  # if no habituation speed specified 
            if not self.sampleHabitFlag :
                if self.dataInput.simulData != None : # Attention: on a un probleme si on fait tourner ce modele sur des donnees simulees par le modele stationnaire. Dans ce cas, il faut forcer ici a passer et prendre des habits nulles
                   ## using simulated Data for HABITUATION sampling
                   print "load Habituation from simulData", self.dataInput.simulData.habitspeed.data
                   self.habits = self.dataInput.simulData.habitspeed.data
                   self.timeNrls = self.dataInput.simulData.nrls.timeNrls
                   self.Gamma = self.setupGamma()
                else : # sinon, on prend que des zeros (modele stationnaire)
                    self.habits = numpy.zeros((self.nbConditions, self.nbVox), dtype=float)
                    self.setupTimeNrls()
            else:
                pyhrf.verbose(2, "Uniform set up of habituation factors")
                habitCurrent = numpy.zeros((self.nbConditions, self.nbVox), dtype=float)
                for nc in xrange(self.nbConditions):
                    habitCurrent[nc,self.voxIdx[1][nc]] = numpy.random.rand(self.cardClass[1][nc])
                self.habits = habitCurrent

        if self.outputRatio :
            self.ratio = zeros((self.nbConditions, self.nbVox, 2), dtype = float)
            self.ratiocourbe = zeros((self.nbConditions, self.nbVox, 100, 5), dtype = float)
            self.compteur = numpy.zeros((self.nbConditions, self.nbVox), dtype=float)
         
        self.setupTimeNrls()
        pyhrf.verbose(4, 'habituation initiale')
        pyhrf.verbose.printNdarray(5, self.habits)
Exemplo n.º 14
0
def create_hrf_from_territories(hrf_territories, primary_hrfs):

    pyhrf.verbose(1,'create_hrf_from_territories ...')
    pyhrf.verbose(1,' inputs: hrf_territories %s, primary_hrfs (%d,%d)' \
                      %(str(hrf_territories.shape), len(primary_hrfs),
                        len(primary_hrfs[0][0])))
    assert hrf_territories.ndim == 1
    hrfs = np.zeros((hrf_territories.size,primary_hrfs[0][0].size))
    territories = np.unique(hrf_territories)
    territories.sort()
    if territories.min() == 1:
        territories = territories - 1

    assert territories.min() == 0
    assert territories.max() <= len(primary_hrfs)
    #print hrfs.shape

    #sm = ','.join(['m[%d]'%d for d in range(hrf_territories.ndim)] + [':'])
    for territory in territories:
        #TODO: test consitency in hrf lengths
        m = np.where(hrf_territories==territory)[0]
        # print 'm:',m
        # print 'hrfs[m,:].shape:', hrfs[m,:].shape
        # print 'primary_hrfs[territory][1].shape:', \
        #     primary_hrfs[territory][1].shape
        # print primary_hrfs[territory][1]
        #print hrfs[m,:].shape
        #exec('hrfs[%s] = primary_hrfs[territory][1]' %sm)
        hrfs[m,:] = primary_hrfs[territory][1]
        #print hrfs[m,:]
    return hrfs.transpose()
Exemplo n.º 15
0
def create_asl_from_stim_induced(bold_stim_induced, perf_stim_induced,
                                 ctrl_tag_mat, dsf,
                                 perf_baseline, noise, drift=None, outliers=None):
    """
    Downsample stim_induced signal according to downsampling factor 'dsf' and
    add noise and drift (nuisance signals) which has to be at downsampled
    temporal resolution.
    """
    bold = bold_stim_induced[0:-1:dsf,:].copy()
    perf = np.dot(ctrl_tag_mat, (perf_stim_induced[0:-1:dsf,:].copy() + \
                                 perf_baseline))

    pyhrf.verbose(3, 'create_asl_from_stim_induced ...')
    pyhrf.verbose(3, 'bold shape: %s, perf_shape: %s, noise shape: %s, '\
                  'drift shape: %s' %(str(bold.shape), str(perf.shape),
                                      str(noise.shape), str(drift.shape)))

    asl = bold + perf
    if drift is not None:
        asl += drift
    if outliers is not None:
        asl += outliers
    asl += noise

    return asl
Exemplo n.º 16
0
def create_polynomial_drift(bold_shape, tr, drift_order, drift_var):
    p = buildPolyMat(drift_order, bold_shape[0], tr)
    nvox = bold_shape[1]
    coeff = drift_var**.5 * randn(p.shape[1],nvox)
    drift = np.dot(p, coeff)
    pyhrf.verbose(3, 'Drift shape: %s' %str(drift.shape))
    return drift
Exemplo n.º 17
0
    def updateObsersables(self):
        GibbsSamplerVariable.updateObsersables(self)
        sHrf = self.samplerEngine.getVariable('hrf')
        sScale = self.samplerEngine.getVariable('scale')

        if sHrf.sampleFlag and np.allclose(sHrf.normalise,0.) and \
                not sScale.sampleFlag and self.sampleFlag:
            pyhrf.verbose(6, 'Normalizing Posterior mean of Mixture Parameters at each iteration ...')
            #print '%%%% scaling NRL PME %%% - hnorm = ', sHrf.norm
            # Undo previous calculation:
            self.cumul -= self.currentValue
            #self.cumul2 -= self.currentValue**2
            self.cumul3 -=  (self.currentValue - self.mean)**2

            # Use scaled quantities instead:
            cur_m_CA = self.currentValue[self.I_MEAN_CA]
            cur_v_CA = self.currentValue[self.I_VAR_CA]
            cur_v_CI = self.currentValue[self.I_VAR_CI]
            self.cumul[self.I_MEAN_CA] +=  cur_m_CA * sHrf.norm
            #self.cumul2[self.I_MEAN_CA] += (cur_m_CA * sHrf.norm)**2
            self.cumul[self.I_VAR_CA] +=  cur_v_CA * sHrf.norm**2
            #self.cumul2[self.I_VAR_CA] += (cur_v_CA * sHrf.norm**2)**2
            self.cumul[self.I_VAR_CI] +=  cur_v_CI * sHrf.norm**2
            #self.cumul2[self.I_VAR_CI] += (cur_v_CI * sHrf.norm**2)**2

            self.mean = self.cumul / self.nbItObservables

            self.cumul3[self.I_MEAN_CA] +=  (cur_m_CA * sHrf.norm - self.mean[self.I_MEAN_CA])**2
            self.cumul3[self.I_VAR_CA] +=  (cur_v_CA * sHrf.norm**2 - self.mean[self.I_VAR_CA])**2
            self.cumul3[self.I_VAR_CI] +=  (cur_v_CI * sHrf.norm**2 - self.mean[self.I_VAR_CI])**2

            #self.error = self.cumul2 / self.nbItObservables - \
                #self.mean**2
            self.error = self.cumul3 / self.nbItObservables
Exemplo n.º 18
0
    def sampleNrlsSerial(self, rb, h, varCI, varCA, meanCA ,
                         gTg, variables):

        pyhrf.verbose(3, 'Sampling Nrls (serial, spatial prior) ...')
        sIMixtP = variables[self.samplerEngine.I_MIXT_PARAM]
        var = sIMixtP.getCurrentVars()
        mean = sIMixtP.getCurrentMeans()
        rb = variables[self.samplerEngine.I_NOISE_VAR].currentValue

        # Add one dimension to be consistent with habituation model
        varXh = variables[self.samplerEngine.I_HRF].varXh
        varXht = varXh.transpose()
        nrls = self.currentValue

        neighbours = self.dataInput.neighboursIndexes

        beta = self.samplerEngine.getVariable('beta').currentValue
        voxOrder = permutation(self.nbVox)

        sampleSmmNrl2(voxOrder.astype(np.int32), rb.astype(np.float64),
                      neighbours.astype(np.int32), self.varYbar,
                      self.labels, np.array([varXh], dtype=np.float64),
                      self.currentValue,
                      self.nrlsSamples.astype(np.float64),
                      self.labelsSamples.astype(np.float64),
                      np.array([varXht], dtype=np.float64),
                      gTg.astype(np.float64),
                      beta.astype(np.float64), mean.astype(np.float64),
                      var.astype(np.float64), self.meanClassApost,
                      self.varClassApost, self.nbClasses,
                      self.sampleLabelsFlag+0, self.iteration,
                      self.nbConditions)

        self.countLabels(self.labels, self.voxIdx, self.cardClass)
Exemplo n.º 19
0
    def __init__(self, label, parent=None, attributes=None):

        super(UiNode, self).__init__()

        pyhrf.verbose(6, 'Create new UiNode(%s,%s,%s)' \
                        %(label, str(parent), str(attributes)))

        self._label = label
        self._children = []
        self._parent = parent
        if attributes is None:
            attributes = {}

        # Check input attributes (has to be a dict of strings):
        if not isinstance(attributes, dict):
            raise Exception('Wrong type "%s" for attributes, has to be a dict'\
                            %str(type(attributes)))

        for k,v in attributes.iteritems():
            if not isinstance(k, str):
                raise Exception('Wrong type for attribute "%s" '\
                                '(has to be string)' %str(k))
            if k not in ['init_obj', 'type'] and not isinstance(v, str):
                # init_obj can be a method/function (see Initable class)
                raise Exception('Wrong type for value of attribute "%s" (%s) '\
                                '(has to be string)' %(str(k),str(v)))

        self._attributes = attributes


        if parent is not None:
            parent.add_child(self)
Exemplo n.º 20
0
Arquivo: noise.py Projeto: Solvi/pyhrf
    def linkToData(self, dataInput):
        self.dataInput = dataInput
        self.nbConditions = self.dataInput.nbConditions
        self.nbVox = self.dataInput.nbVoxels
        self.ny = self.dataInput.ny
        self.nbColX = self.dataInput.nbColX

        # Do some allocations :
        self.beta = np.zeros(self.nbVox, dtype=float)
        self.mXhQXh = np.zeros((self.nbConditions,self.nbConditions),
                               dtype=float)

        if self.dataInput.simulData is not None:
            if isinstance(self.dataInput.simulData, dict):
                if dataInput.simulData.has_key('v_gnoise'):
                    self.trueValue = self.dataInput.simulData['v_noise']
                    pyhrf.verbose(3, 'True noise variance = %1.3f' \
                                  %self.trueValue)

            elif isinstance(dataInput.simulData, list):
                    sd = dataInput.simulData[0]
                    if isinstance(sd, dict):
                        self.trueValue = sd['noise'].var(0).astype(np.float64)
                    else:
                        self.trueValue = sd.noise.variance
            else:
                self.trueValue = self.dataInput.simulData.noise.variance
Exemplo n.º 21
0
def split_big_parcels(parcel_file, output_file, max_size=400):
    print 'split_big_parcels ...'
    roiMask, roiHeader = read_volume(parcel_file)
    roiIds = np.unique(roiMask)
    background = roiIds.min()
    labels = roiMask[np.where(roiMask>background)].astype(int)
    if (np.bincount(labels) <= max_size).all():
        pyhrf.verbose(1, 'no parcel to split')
        return

    graphs = parcels_to_graphs(roiMask, kerMask3D_6n)
    for roiId in roiIds:
        if roiId != background:
            roi_size = (roiMask==roiId).sum()
            if roi_size > max_size:
                print 'roi %d, size = %d' %(roiId, roi_size)
                nparcels = int(np.ceil(roi_size*1./max_size))
                print 'split into %d parcels ...' %(nparcels)
                split_parcel(labels, graphs, roiId, nparcels, inplace=True,
                             verbosity=1)

    final_roi_mask = np.zeros_like(roiMask)
    final_roi_mask[np.where(roiMask>background)] = labels
    #print np.bincount(labels)
    assert (np.bincount(labels) <= max_size).all()
    write_volume(final_roi_mask, output_file, roiHeader)
Exemplo n.º 22
0
Arquivo: graph.py Projeto: Solvi/pyhrf
def graph_from_mesh(polygonList):
    """Return the list of neighbours indexes for each position, from a list of
    polygons. Each polygon is a triplet.

    """

    indexSet = set()
    for l in polygonList:
        indexSet.update(l)
    nbPositions = len(indexSet)

    pyhrf.verbose(6, 'indexSet:')
    pyhrf.verbose.printDict(6, indexSet)

    neighbourSets = {}
    #pyhrf.verbose(6, 'polygonList:')
    #pyhrf.verbose.printDict(6, polygonList)
    for triangle in polygonList:
        for idx in triangle:
            if not neighbourSets.has_key(idx):
                neighbourSets[idx] = set()
            neighbourSets[idx].update(triangle)

    # idx<->set --> idx<->list
    neighbourLists = np.empty(nbPositions, dtype=object)
    for idx in indexSet:
        neighbourSets[idx].remove(idx)
        neighbourLists[idx] = np.array(list(neighbourSets[idx]),dtype=int)

    return neighbourLists
Exemplo n.º 23
0
    def computeWithJeffreyPriors(self, j, cardCDj):
        if pyhrf.verbose.verbosity >= 3:
            print 'cond %d - card CD = %d' %(j,cardCDj)
            print 'cond %d - cur mean CD = %f' %(j,self.currentValue[self.I_MEAN_CD,j])
            if cardCDj > 0:
                print 'cond %d - nrl CD: %f(v%f)[%f,%f]' %(j,self.nrlCD[j].mean(),
                                                           self.nrlCD[j].var(),
                                                           self.nrlCD[j].min(),
                                                           self.nrlCD[j].max())

        if cardCDj > 1:
            nrlC2Centered = self.nrlCD[j] - self.currentValue[self.I_MEAN_CD,j]
            nu2j = dot(nrlC2Centered, nrlC2Centered)
            varCDj = 1.0 / random.gamma(0.5 * (cardCDj + 1) - 1, 2. / nu2j)

            eta2j = mean(self.nrlCD[j])
            meanCDj = random.normal(eta2j, (varCDj / cardCDj)**0.5)

            if pyhrf.verbose.verbosity >= 3:
                print 'varCD ~ InvGamma(%f, nu2j/2=%f)' %(0.5*(cardCDj+1)-1,
                                                          nu2j/2.)
                print ' -> mean =', (nu2j/2.)/(0.5*(cardCDj+1)-1)
        else :
            pyhrf.verbose(1,'Warning : cardCD <= 1!')
            varCDj = 1.0 / random.gamma(.5, 2.)
            if cardCDj == 0 :
                meanCDj = random.normal(5.0, varCDj**0.5)
            else:
                meanCDj = random.normal(self.nrlCD[j], varCDj**0.5)

        if pyhrf.verbose.verbosity >= 3:
            print 'Sampled components - cond', j
            print 'mean CD =', meanCDj, 'var CD =', varCDj

        return meanCDj, varCDj
Exemplo n.º 24
0
    def linkToData(self, data):

        self.M = data.nbConditions
        dp = data.paradigm
        self.condition_names = dp.stimOnsets.keys()
        self.onsets = dp.stimOnsets.values()
        self.LengthOnsets = [dp.stimDurations[n] for n in dp.stimOnsets.keys()]

        self.nbVoxels = data.bold.shape[1]
        if not self.avg_bold:
            self.bold = data.bold
        else:
            self.bold = data.bold.mean(1)[:,np.newaxis]
            pyhrf.verbose(2, 'BOLD is averaged -> shape: %s' \
                              %str(self.bold.shape))

        self.sscans = data.sessionsScans
        self.nbSessions = data.nbSessions
        self.I = self.nbSessions
        self.ImagesNb = sum([len(ss) for ss in self.sscans])
        self.TR = data.tr

        self.Ni = array([len(ss) for ss in self.sscans])
        self.OnsetList = [[o[i] for o in self.onsets] \
                              for i in xrange(self.nbSessions)]
        self.Qi = zeros(self.nbSessions, dtype=int) + 2

        self.history = defaultdict(init_dict)
Exemplo n.º 25
0
    def split(self, dump_sub_results=None, make_sub_outputs=None,
              output_dir=None, output_file_list=None):

        if dump_sub_results is None:
            dump_sub_results = (self.result_dump_file is not None)
        if make_sub_outputs is None:
            make_sub_outputs = self.make_outputs

        if output_dir is None:
            output_dir = self.output_dir

        sub_treatments = [FMRITreatment(d, deepcopy(self.analyser),
                                        make_outputs=make_sub_outputs,
                                        output_dir=output_dir) \
                              for d in self.analyser.split_data(self.data)]

        if output_dir is not None:
            pyhrf.verbose(1, 'Dump sub treatments in: %s ...' %output_dir)
            cmp_size = lambda t1,t2:cmp(t1.data.get_nb_vox_in_mask(),
                                        t2.data.get_nb_vox_in_mask())

            for it, sub_t in enumerate(sorted(sub_treatments, cmp=cmp_size,
                                              reverse=True)):
                if dump_sub_results:
                    sub_t.result_dump_file = op.join(output_dir,
                                                     'result_%04d.pck' %it)
                fn = op.join(output_dir, 'treatment_%04d.pck' %it)
                fout = open(fn, 'w')
                cPickle.dump(sub_t, fout)
                fout.close()
                if output_file_list is not None:
                    output_file_list.append(fn)

        return sub_treatments
Exemplo n.º 26
0
    def sampleNrlsSerialWithRelVar(self, rb, h, gTg, variables, w, t1, t2):

        pyhrf.verbose(3, 'Sampling Nrls (serial, spatial prior) ...')
        sIMixtP = variables[self.samplerEngine.I_MIXT_PARAM]
        var = sIMixtP.getCurrentVars()
        mean = sIMixtP.getCurrentMeans()
        rb = variables[self.samplerEngine.I_NOISE_VAR].currentValue
        y = self.dataInput.varMBY
        matPl = self.samplerEngine.getVariable('drift').matPl
        sampleWFlag = variables[self.samplerEngine.I_W].sampleFlag

        # Add one dimension to be consistent with habituation model
        varXh = variables[self.samplerEngine.I_HRF].varXh
        varXht = varXh.transpose()
        nrls = self.currentValue

        neighbours = self.dataInput.neighboursIndexes

        beta = self.samplerEngine.getVariable('beta').currentValue
        voxOrder = permutation(self.nbVox)


        cardClassCA = np.zeros(self.nbConditions, dtype=int)
        for i in range(self.nbConditions):
            cardClassCA[i] = self.cardClass[self.L_CA,i]
        
        #sampleSmmNrl2WithRelVar(voxOrder.astype(np.int32), rb.astype(np.float64),
                      #neighbours.astype(np.int32), self.varYbar,
                      #self.labels, np.array([varXh], dtype=np.float64),
                      #self.currentValue,
                      #self.nrlsSamples.astype(np.float64),
                      #self.labelsSamples.astype(np.float64),
                      #np.array([varXht], dtype=np.float64),
                      #gTg.astype(np.float64),
                      #beta.astype(np.float64), mean.astype(np.float64),
                      #var.astype(np.float64), self.meanClassApost,
                      #self.varClassApost, w.astype(np.int32), t1, t2,
                      #cardClassCA.astype(np.int32),
                      #self.nbClasses,
                      #self.sampleLabelsFlag+0, self.iteration,
                      #self.nbConditions)
        
        sampleSmmNrl2WithRelVar_NEW(voxOrder.astype(np.int32), rb.astype(np.float64),
                      neighbours.astype(np.int32), self.varYbar , y.astype(np.float64), matPl.astype(np.float64),
                      self.labels, np.array([varXh], dtype=np.float64),
                      self.currentValue,
                      self.nrlsSamples.astype(np.float64),
                      self.labelsSamples.astype(np.float64),
                      np.array([varXht], dtype=np.float64),
                      gTg.astype(np.float64),
                      beta.astype(np.float64), mean.astype(np.float64),
                      var.astype(np.float64), self.meanClassApost,
                      self.varClassApost, w.astype(np.int32), t1, t2,
                      cardClassCA.astype(np.int32),
                      self.nbClasses,
                      self.sampleLabelsFlag+0, self.iteration,
                      self.nbConditions,sampleWFlag)

        self.countLabels(self.labels, self.voxIdx, self.cardClass)
Exemplo n.º 27
0
Arquivo: io.py Projeto: philouc/pyhrf
def sub_sample_vol(image_file, dest_file, dsf, interpolation="continuous", verb_lvl=0):

    from nipy.labs.datasets.volumes.volume_img import VolumeImg  # ,CompositionError

    pyhrf.verbose(verb_lvl, "Subsampling at dsf=%d, %s -> %s" % (dsf, image_file, dest_file))

    interp = interpolation
    data_src, src_meta = read_volume(image_file)
    affine = src_meta[0]
    daffine = dsf * affine

    original_dtype = data_src.dtype
    if np.issubdtype(np.int, data_src.dtype):
        # to avoid error "array type 5 not supported" on int arrays ...
        # data_src = np.asarray(np.asfarray(data_src), data_src.dtype)
        data_src = np.asfarray(data_src)

    if data_src.ndim == 4:
        ref_vol = data_src[:, :, :, 0]
    else:
        ref_vol = data_src
    # print 'ref_vol:', ref_vol.shape

    img_src = VolumeImg(ref_vol, affine, "mine")
    img_dest = img_src.as_volume_img(daffine, interpolation=interpolation)

    # setup dest geometry:
    dest_header = src_meta[1].copy()

    # dest_header['sform'] = img_src.affine
    dpixdim = np.array(src_meta[1]["pixdim"])
    dpixdim[1:4] *= dsf
    # print 'pixdim:', ','.join(map(str,src_meta[1]['pixdim']))
    # print 'dpixdim:', ','.join(map(str,dpixdim))

    dest_header["pixdim"] = list(dpixdim)
    sh = ref_vol[::dsf, ::dsf, ::dsf, ...].shape
    # print 'sh:', sh
    dest_meta = (daffine, dest_header)

    # do the resampling:
    if data_src.ndim == 3:
        vol = img_dest.get_data()[: sh[0], : sh[1], : sh[2]]
        if ref_vol.dtype == np.int32 or np.allclose(np.round(ref_vol), ref_vol):  # if input is int
            vol = np.round(vol).astype(np.int32)
        write_volume(vol, dest_file, dest_meta)

    elif data_src.ndim == 4:
        imgs = [VolumeImg(i, affine, "mine") for i in np.rollaxis(data_src, 3, 0)]
        dvols = [i.as_volume_img(daffine, interpolation=interp).get_data() for i in imgs]
        # print 'dvols[0]:', dvols[0].shape
        dvols = np.array(dvols)
        # print 'dvols:', dvols.shape
        sub_vols = np.rollaxis(dvols[:, : sh[0], : sh[1], : sh[2]], 0, 4)
        # print 'sub_vols:', sub_vols.shape
        write_volume(sub_vols, dest_file, dest_meta)

    else:
        raise Exception("Nb of dims (%d) not handled. Only 3D or 4D" % data_src.ndim)
Exemplo n.º 28
0
 def manageMappingInit(self, shape, axes_names):
     tans = self.dataInput.voxelMapping.getTargetAxesNames()
     i = axes_names.index("voxel")
     axes_names = axes_names[:i] + tans + axes_names[i + 1 :]
     shape = shape[:i] + self.dataInput.finalShape + shape[i + 1 :]
     pyhrf.verbose(5, "manageMappingInit returns :")
     pyhrf.verbose(5, " -> sh: %s, axes_names: %s" % (str(shape), str(axes_names)))
     return shape, axes_names
Exemplo n.º 29
0
    def to_ui_node(self, label, parent=None):
        pyhrf.verbose(6, "Initable.to_ui_node(label=%s) ..." % label)
        n = UiNode(label, parent, {"init_obj": self._init_obj, "type": "Initable"})

        for pname, pval in self._init_parameters.iteritems():
            pyhrf.verbose(6, "pname: %s, pval: %s" % (str(pname), str(pval)))
            n.add_child(UiNode.from_py_object(pname, pval))
        return n
Exemplo n.º 30
0
def remote_mkdir(host, user, path):
    import paramiko
    pyhrf.verbose(1, 'Make remote dir %s@%s:%s ...' %(user,host,path))
    ssh = paramiko.SSHClient()
    known_hosts_file = os.path.join("~", ".ssh", "known_hosts")
    ssh.load_host_keys(os.path.expanduser(known_hosts_file))
    ssh.connect(host, username=user)
    sftp = ssh.open_sftp()
    sftp.mkdir(path)