コード例 #1
0
ファイル: parcellation.py プロジェクト: rcherbonnier/pyhrf
    def resolve(self):

        t0 = time()
        self.time = 0
        while not self.balanced() and not self.force_end():
            for a_id in permutation(self.nb_ants):
                if self.ants[a_id].action(self.time):
                    continue
            self.time += 1
            if self.time % 100000 == 0:
                self.verbose(1, 'Time: %d' % self.time)

                self.verbose(1, 'Nb of uncontrolled sites: %d' %
                             self.uncontrolled)
                sizes = [str(a.area_size) for a in self.ants]
                self.verbose(1, 'Territories: %s' % ','.join(sizes))
                if self.uncontrolled > 0:
                    delta_t = time() - self.tmark
                    duration = format_duration(delta_t)
                    delta_site = self.prev_uncontrolled - self.uncontrolled
                    self.verbose(1, '%d sites taken in %s'
                                 % (delta_site, duration))
                    self.total_time += delta_t
                    speed = (self.nb_sites - self.uncontrolled) / \
                        self.total_time
                    expected_duration = self.uncontrolled / speed
                    self.verbose(1, '%s expected for complete control'
                                 % (format_duration(expected_duration)))

                    self.prev_uncontrolled = self.uncontrolled
                    self.tmark = time()

        self.verbose(1, 'Balloon partitioning done, took %s, %d iterations'
                     % (format_duration(time() - t0), self.time))
コード例 #2
0
    def resolve(self):

        t0 = time()
        self.time = 0
        while not self.balanced() and not self.force_end():
            for a_id in permutation(self.nb_ants):
                if self.ants[a_id].action(self.time):
                    continue
            self.time += 1
            if self.time % 100000 == 0:
                self.verbose(1, 'Time: %d' % self.time)

                self.verbose(1, 'Nb of uncontrolled sites: %d' %
                             self.uncontrolled)
                sizes = [str(a.area_size) for a in self.ants]
                self.verbose(1, 'Territories: %s' % ','.join(sizes))
                if self.uncontrolled > 0:
                    delta_t = time() - self.tmark
                    duration = format_duration(delta_t)
                    delta_site = self.prev_uncontrolled - self.uncontrolled
                    self.verbose(1, '%d sites taken in %s'
                                 % (delta_site, duration))
                    self.total_time += delta_t
                    speed = (self.nb_sites - self.uncontrolled) / \
                        self.total_time
                    expected_duration = self.uncontrolled / speed
                    self.verbose(1, '%s expected for complete control'
                                 % (format_duration(expected_duration)))

                    self.prev_uncontrolled = self.uncontrolled
                    self.tmark = time()

        self.verbose(1, 'Balloon partitioning done, took %s, %d iterations'
                     % (format_duration(time() - t0), self.time))
コード例 #3
0
ファイル: core.py プロジェクト: thomas-vincent/pyhrf
    def execute(self):


        if self.data is None:
            self.data = self.data_ui.get_fmri_data()

        if self.analyser is None:
            self.analyser = self.analyser_ui.get_analyser()

        lg = pyhrf.verbose.verbosity >= 2
        pyhrf.verbose(2, self.data.get_summary(long=lg))

        pyhrf.verbose(1,'All data loaded !')
        pyhrf.verbose(1,'running estimation ...')
        #TODO : print summary of analyser setup.
        pyhrf.verbose(1,'Estimation start date is : %s'
                      %time.strftime('%c'))
        tIni = time.time()
        result = self.analyser.analyse(self.data)

        pyhrf.verbose(1,'Estimation done, total time : %s'
                      %format_duration(time.time()-tIni))
        pyhrf.verbose(1,'End date is : '+time.strftime('%c'))

        return result
コード例 #4
0
def make_parcellation_from_files(betaFiles,
                                 maskFile,
                                 outFile,
                                 nparcels,
                                 method,
                                 dry=False,
                                 spatial_weight=10.):

    if not op.exists(maskFile):
        print 'Error, file %s not found' % maskFile
        return

    betaFiles = sorted(betaFiles)
    for b in betaFiles:
        if not op.exists(b):
            raise Exception('Error, file %s not found' % b)

    logger.info('Mask image: %s', op.basename(maskFile))
    logger.info('Betas: %s ... %s', op.basename(betaFiles[0]),
                op.basename(betaFiles[-1]))
    logger.info("Method: %s, nb parcels: %d", method, nparcels)
    logger.info('Spatial weight: %f', spatial_weight)

    if not dry:
        logger.info('Running parcellation ... ')
        logger.info('Start date is: %s', strftime('%c', localtime()))
        t0 = time()
        v = logger.getEffectiveLevel() <= logging.INFO
        lpa = fixed_parcellation(maskFile,
                                 betaFiles,
                                 nparcels,
                                 nn=6,
                                 method=method,
                                 fullpath=outFile,
                                 verbose=v,
                                 mu=spatial_weight)

        from pyhrf.ndarray import xndarray
        c = xndarray.load(outFile)
        if c.min() == -1:
            c.data += 1

        for i in np.unique(c.data):
            # remove parcel with size < 2:
            if i != 0 and len(c.data == 1) < 2:
                c.data[np.where(c.data == i)] = 0

        c.save(outFile)

        logger.info('Parcellation complete, took %s',
                    format_duration(time() - t0))
        return lpa
    else:
        logger.info('Dry run.')
コード例 #5
0
ファイル: treatment.py プロジェクト: zddzxxsmile/pyhrf
 def execute(self):
     logger.debug('Input data description:')
     logger.debug(self.data.getSummary(long=True))
     logger.info('All data loaded !')
     logger.info('running estimation ...')
     # TODO : print summary of analyser setup.
     logger.info('Estimation start date is : %s', time.strftime('%c'))
     tIni = time.time()
     result = self.analyser.analyse(self.data, self.output_dir)
     logger.info('Estimation done, total time : %s',
                 format_duration(time.time() - tIni))
     logger.info('End date is : %s', time.strftime('%c'))
     return result
コード例 #6
0
ファイル: treatment.py プロジェクト: pyhrf/pyhrf
 def execute(self):
     logger.debug('Input data description:')
     logger.debug(self.data.getSummary(long=True))
     logger.info('All data loaded !')
     logger.info('running estimation ...')
     # TODO : print summary of analyser setup.
     logger.info('Estimation start date is : %s', time.strftime('%c'))
     tIni = time.time()
     result = self.analyser.analyse(self.data, self.output_dir)
     logger.info('Estimation done, total time : %s',
                 format_duration(time.time() - tIni))
     logger.info('End date is : %s', time.strftime('%c'))
     return result
コード例 #7
0
ファイル: treatment.py プロジェクト: zddzxxsmile/pyhrf
    def output(self, result, dump_result=True, outputs=True):
        if dump_result and self.result_dump_file is not None:
            self.pickle_result(result)

        if outputs:
            logger.info('Output of results to %s ...', self.output_dir)
            try:
                tIni = time.time()
                r = self.analyser.outputResults(result, self.output_dir)
                logger.info('Creation of outputs took : %s',
                            format_duration(time.time() - tIni))
                return r
            except NotImplementedError:
                logger.error('No output function defined')
コード例 #8
0
ファイル: treatment.py プロジェクト: pmesejo/pyhrf
    def output(self, result, dump_result=True, outputs=True):
        if dump_result and self.result_dump_file is not None:
            self.pickle_result(result)

        if outputs:
            pyhrf.verbose(1, 'Output of results to %s ...' %self.output_dir)
            try:
                tIni = time.time()
                r = self.analyser.outputResults(result, self.output_dir)
                pyhrf.verbose(1,'Creation of outputs took : %s'
                              %format_duration(time.time()-tIni))
                return r
            except NotImplementedError :
                pyhrf.verbose(1,'No output function defined')
コード例 #9
0
    def run(self):
        """
        function to launch the analysis
        """
        logger.info('Starting voxel-wise HRF estimation ...')
        logger.info('nvox=%d, ncond=%d, nscans=%d', self.nbVoxels, self.M,
                    self.ImagesNb)
        # initialization of the matrices that will store all voxel resuls
        logger.info("Init storage ...")
        self.InitStorageMat()
        logger.info("Compute onset matrix ...")
        self.Compute_onset_matrix3()

        self.stop_iterations = np.zeros(self.bold.shape[1], dtype=int)

        # voxelwise analysis. This loop handles the currently analyzed voxels.
        for POI in xrange(self.bold.shape[1]):  # POI = point of interest
            t0 = time()
            logger.info("Point %s / %s", str(POI), str(self.nbVoxels))
            self.ReadPointOfInterestData(POI)

            # initialize with zeros or ones all matrix and vector used in
            # the class
            logger.info("Init matrix and vectors ...")
            self.InitMatrixAndVectors(POI)

            # compute onset matrix

            # compute low frequency basis
            logger.info('build low freq mat ...')
            self.buildLowFreqMat()

            # precompute usefull data
            logger.info('Compute inv R ...')
            self.Compute_INV_R_and_R_and_DET_R()

            # solve find response functions and hyperparameters
            logger.info('EM solver ...')
            self.EM_solver(POI)

            # store current results in matrices initialized in 'InitStoringMat'
            logger.info('Store res ...')
            self.StoreRes(POI)
            logger.info("Done in %s", format_duration(time() - t0))

        self.clean_memory()

        logger.info('Nb of iterations to reach stop crit: %s',
                    array_summary(self.stop_iterations))
コード例 #10
0
ファイル: rfir.py プロジェクト: ainafp/pyhrf
    def run(self):
        """
        function to launch the analysis
        """
        logger.info('Starting voxel-wise HRF estimation ...')
        logger.info('nvox=%d, ncond=%d, nscans=%d', self.nbVoxels, self.M,
                    self.ImagesNb)
        # initialization of the matrices that will store all voxel resuls
        logger.info("Init storage ...")
        self.InitStorageMat()
        logger.info("Compute onset matrix ...")
        self.Compute_onset_matrix3()

        self.stop_iterations = np.zeros(self.bold.shape[1], dtype=int)

        # voxelwise analysis. This loop handles the currently analyzed voxels.
        for POI in xrange(self.bold.shape[1]):  # POI = point of interest
            t0 = time()
            logger.info("Point %s / %s", str(POI), str(self.nbVoxels))
            self.ReadPointOfInterestData(POI)

            # initialize with zeros or ones all matrix and vector used in
            # the class
            logger.info("Init matrix and vectors ...")
            self.InitMatrixAndVectors(POI)

            # compute onset matrix

            # compute low frequency basis
            logger.info('build low freq mat ...')
            self.buildLowFreqMat()

            # precompute usefull data
            logger.info('Compute inv R ...')
            self.Compute_INV_R_and_R_and_DET_R()

            # solve find response functions and hyperparameters
            logger.info('EM solver ...')
            self.EM_solver(POI)

            # store current results in matrices initialized in 'InitStoringMat'
            logger.info('Store res ...')
            self.StoreRes(POI)
            logger.info("Done in %s", format_duration(time() - t0))

        self.clean_memory()

        logger.info('Nb of iterations to reach stop crit: %s',
                    array_summary(self.stop_iterations))
コード例 #11
0
ファイル: parcellation.py プロジェクト: Solvi/pyhrf
def make_parcellation_from_files(betaFiles, maskFile, outFile, nparcels,
                                 method, dry=False, spatial_weight=10.):

    if not op.exists(maskFile):
        print 'Error, file %s not found' %maskFile
        return

    betaFiles = sorted(betaFiles)
    for b in betaFiles:
        if not op.exists(b):
            print 'Error, file %s not found' %b
            return

    pyhrf.verbose(1, 'Mask image: ' + op.basename(maskFile))
    pyhrf.verbose(1, 'Betas: ' + op.basename(betaFiles[0]) + ' ... ' + \
                      op.basename(betaFiles[-1]))
    pyhrf.verbose(1, "Method: %s, nb parcels: %d" %(method, nparcels))
    pyhrf.verbose(1, 'Spatial weight: %f' %spatial_weight)

    if not dry:
        pyhrf.verbose(1, 'Running parcellation ... ')
        pyhrf.verbose(1, 'Start date is: %s' %strftime('%c',localtime()))
        t0 = time()
        #lpa = one_subj_parcellation(maskFile, betaFiles, nparcels, 6,
        #                            method, 10, 1, fullpath=outFile)
        v = pyhrf.verbose.verbosity
        lpa = fixed_parcellation(maskFile, betaFiles, nparcels, nn=6,
                                 method=method, fullpath=outFile, verbose=v,
                                 mu=spatial_weight)

        from pyhrf.ndarray import xndarray
        c = xndarray.load(outFile)
        if c.min() == -1:
            c.data += 1

        for i in np.unique(c.data):
            # remove parcel with size < 2:
            if i!=0 and len(c.data==1) < 2:
                c.data[np.where(c.data==i)] = 0

        c.save(outFile)

        pyhrf.verbose(1, 'Parcellation complete, took %s' \
                          %format_duration(time() - t0))
        return lpa
    else:
        pyhrf.verbose(1, 'Dry run.')
コード例 #12
0
ファイル: parcellation.py プロジェクト: rcherbonnier/pyhrf
def make_parcellation_from_files(betaFiles, maskFile, outFile, nparcels,
                                 method, dry=False, spatial_weight=10.):

    if not op.exists(maskFile):
        print 'Error, file %s not found' % maskFile
        return

    betaFiles = sorted(betaFiles)
    for b in betaFiles:
        if not op.exists(b):
            raise Exception('Error, file %s not found' % b)

    logger.info('Mask image: %s', op.basename(maskFile))
    logger.info('Betas: %s ... %s', op.basename(betaFiles[0]),
                op.basename(betaFiles[-1]))
    logger.info("Method: %s, nb parcels: %d", method, nparcels)
    logger.info('Spatial weight: %f', spatial_weight)

    if not dry:
        logger.info('Running parcellation ... ')
        logger.info('Start date is: %s', strftime('%c', localtime()))
        t0 = time()
        v = logger.getEffectiveLevel() <= logging.INFO
        lpa = fixed_parcellation(maskFile, betaFiles, nparcels, nn=6,
                                 method=method, fullpath=outFile, verbose=v,
                                 mu=spatial_weight)

        from pyhrf.ndarray import xndarray
        c = xndarray.load(outFile)
        if c.min() == -1:
            c.data += 1

        for i in np.unique(c.data):
            # remove parcel with size < 2:
            if i != 0 and len(c.data == 1) < 2:
                c.data[np.where(c.data == i)] = 0

        c.save(outFile)

        logger.info(
            'Parcellation complete, took %s', format_duration(time() - t0))
        return lpa
    else:
        logger.info('Dry run.')
コード例 #13
0
ファイル: treatment.py プロジェクト: pmesejo/pyhrf
    def execute(self):

        if pyhrf.verbose.verbosity >= 2:
            pyhrf.verbose(2, 'Input data description:')
            pyhrf.verbose(2, self.data.getSummary(long=True))
        else:
            pyhrf.verbose(1, 'Input data description:')
            pyhrf.verbose(1, self.data.getSummary(long=False))

        pyhrf.verbose(1,'All data loaded !')
        pyhrf.verbose(1,'running estimation ...')
        #TODO : print summary of analyser setup.
        pyhrf.verbose(1,'Estimation start date is : %s'
                      %time.strftime('%c'))
        tIni = time.time()
        result = self.analyser.analyse(self.data, self.output_dir)

        pyhrf.verbose(1,'Estimation done, total time : %s'
                      %format_duration(time.time()-tIni))
        pyhrf.verbose(1,'End date is : '+time.strftime('%c'))

        return result
コード例 #14
0
ファイル: treatment.py プロジェクト: pmesejo/pyhrf
def run_pyhrf_cmd_treatment(cfg_cmd, exec_cmd, default_cfg_file,
                            default_profile_file, label_for_cluster):


    usage = 'usage: %%prog [options]'

    description = 'Manage a joint detection-estimation treatment of fMRI data.' \
                'This command runs the treatment defined in an xml '\
                'parameter file. See pyhrf_jde_buildcfg command to build a'\
                'template of such a file. If no xml file found, then runs a '\
                'default example analysis.'

    parser = OptionParser(usage=usage, description=description)

    parser.add_option('-c','--input-cfg-file', metavar='XMLFILE', dest='cfgFile',
                    default=default_cfg_file,
                    help='Configuration file: XML file containing parameters'\
                    ' defining input data and analysis to perform.')

    parser.add_option('-r','--roi-data', metavar='PICKLEFILE', dest='roidata',
                    default=None, help='Input fMRI ROI data. The data '\
                    'definition part in the config file is ignored.')

    parser.add_option('-t','--treatment_pck',
                      metavar='PICKLEFILE', dest='treatment_pck',
                      default=None, help='Input treatment as a pickle dump.' \
                          'The XML cfg file is ignored')

    parser.add_option('-s','--stop-on-error', dest='stop_on_error',
                      action='store_true',
                    default=False, help='For debug: do not continue if error' \
                          ' during one ROI analysis')


    parser.add_option('-v','--verbose',dest='verbose',metavar='INTEGER',
                    type='int',default=0,
                    help=dictToString(pyhrf.verboseLevels))

    parser.add_option('-p','--profile',action='store_true', default=False,
                    help='Enable profiling of treatment. Store profile data in '\
                        '%s. NOTE: not avalaible in parallel mode.'\
                    %default_profile_file)

    parallel_choices = ['LAN','local','cluster']
    parser.add_option('-x','--parallel', choices=parallel_choices,
                    help='Parallel processing. Choices are %s'\
                        %string.join(parallel_choices,', '))


    (options,args) = parser.parse_args()

    pyhrf.verbose.set_verbosity(options.verbose)

    t0 = time.time()

    if options.treatment_pck is not None:
        f = open(options.treatment_pck)
        treatment = cPickle.load(f)
        f.close()
    else:
        if not os.path.exists(options.cfgFile):
            print 'Error: could not find default configuration file "%s"\n'\
                'Consider running "%s" to generate it.' \
                %(options.cfgFile, cfg_cmd)
            sys.exit(1)
        else:
            pyhrf.verbose(1, 'Loading configuration from: "%s" ...' \
                              %options.cfgFile)
            f = open(options.cfgFile, 'r')
            sXml = string.join(f.readlines())
            f.close()
            treatment = xmlio.from_xml(sXml)
            if 0:
                sXml = xmlio.to_xml(treatment)
                f = './treatment_cmd.xml'
                fOut = open(f,'w')
                fOut.write(sXml)
                fOut.close()
            #f = open(fOut, 'w')
            #cPickle.dump(treatment, f)
            #f.close()


    treatment.analyser.set_pass_errors(not options.stop_on_error)

    if options.parallel is not None:

        # tmpDir = tempfile.mkdtemp(prefix='pyhrf',
        #                           dir=pyhrf.cfg['global']['tmp_path'])
        # pyhrf.verbose(1, 'Tmpdir: %s' %tmpDir)

        treatment.run(parallel=options.parallel)

    else:
        if options.roidata is not None:
            #treatment.set_roidata(options.roidata)
            pyhrf.verbose(1, 'Loading ROI data from: "%s" ...' \
                              %options.roidata)

            roidata = cPickle.load(open(options.roidata))
            roidata.verbosity = pyhrf.verbose.verbosity
            if pyhrf.verbose > 1:
                print roidata.getSummary()
            #TODO: enable profiling
            pyhrf.verbose(1, 'Launching analysis ...')
            if options.profile:
                cProfile.runctx("result = treatment.analyser(roidata)",
                                globals(),
                                {'treatment':treatment,'roidata': roidata},
                                default_profile_file)
            else:
                result = treatment.analyser(roidata)
            outPath = op.dirname(op.abspath(options.roidata))
            fOut = op.join(outPath,"result_%04d.pck" %roidata.get_roi_id())
            pyhrf.verbose(1, 'Dumping results to %s ...' %fOut)
            f = open(fOut, 'w')
            cPickle.dump(result, f)
            f.close()
        else:
            pyhrf.verbose(1, 'ROI data is none')
            if options.profile:
                cProfile.runctx("treatment.run()", globals(),
                                {'treatment':treatment}, default_profile_file)
            else:
                #print 'treatment:', treatment
                treatment.run()

    pyhrf.verbose(1, 'Estimation done, took %s' %format_duration(time.time() - t0))
コード例 #15
0
ファイル: rfir.py プロジェクト: pmesejo/pyhrf
    def run(self):
        """
        function to launch the analysis
        """
        pyhrf.verbose(1, 'Starting voxel-wise HRF estimation ...')
        pyhrf.verbose(1, 'nvox=%d, ncond=%d, nscans=%d' \
                          %(self.nbVoxels,self.M,self.ImagesNb))
        #initialization of the matrices that will store all voxel resuls
        pyhrf.verbose(3,"Init storage ...")
        self.InitStorageMat()
        pyhrf.verbose(3,"Compute onset matrix ...")
        self.Compute_onset_matrix3()

        self.stop_iterations = np.zeros(self.bold.shape[1], dtype=int)

        #voxelwise analysis. This loop handles the currently analyzed voxels.
        for POI in xrange(self.bold.shape[1]):  # POI = point of interest
            t0 = time()
            pyhrf.verbose(3,"Point "+str(POI)+" / "+str(self.nbVoxels))
            self.ReadPointOfInterestData(POI)
            #print "Signal in point " , POI , "read"

#             if hasattr(self, 'X'):
#                 prevX = copyModule.deepcopy(self.X)
#             else:
#                 prevX = None

            #initialize with zeros or ones all matrix and vector used in
            #the class
            pyhrf.verbose(4, "Init matrix and vectors ...")
            self.InitMatrixAndVectors(POI)
            #print "Matrix and vectors initialized"

            #compute onset matrix

#             print "Onset matrix computed"
#             if prevX is not None:
#                 for i,x in enumerate(self.X):
#                     print 'sess %d ->' %i, (prevX[i]!=x).any()
#                     if 0:
#                         for m in xrange(prevX[i].shape[0]):
#                             for n in xrange(prevX[i].shape[1]):
#                                 for k in xrange(prevX[i].shape[2]):
#                                     print int(prevX[i][m,n,k]),'',
#                                 print ''
#                             print ''
#                             print ''

#                         for m in xrange(x.shape[0]):
#                             for n in xrange(x.shape[1]):
#                                 for k in xrange(x.shape[2]):
#                                     print int(x[m,n,k]),
#                                 print ''
#                             print ''
#                             print ''

            #compute low frequency basis
            pyhrf.verbose(4, 'build low freq mat ...')
            self.buildLowFreqMat()
            #print "Low frequency matrix generated"

            #precompute usefull data
            pyhrf.verbose(4, 'Compute inv R ...')
            self.Compute_INV_R_and_R_and_DET_R()
            #print "R^{-1} and det(R) precomputed"

            #solve find response functions and hyperparameters
            pyhrf.verbose(4, 'EM solver ...')
            self.EM_solver(POI)

            #store current results in matrices initialized in 'InitStoringMat'
            pyhrf.verbose(4, 'Store res ...')
            self.StoreRes(POI)
            pyhrf.verbose(3,"Done in %s" %format_duration(time() - t0) )

        self.clean_memory()

        pyhrf.verbose(1, 'Nb of iterations to reach stop crit: %s' \
                      %array_summary(self.stop_iterations))
コード例 #16
0
    def analyse_roi(self, roiData):
        # roiData is of type FmriRoiData, see pyhrf.core.FmriRoiData
        # roiData.bold : numpy array of shape
        # BOLD has shape (nscans, nvoxels)
        # roiData.graph #list of neighbours
        n_scan_allsession, nvox = roiData.bold.shape
        n_scan = n_scan_allsession / self.n_session
        data0 = roiData.bold.reshape(self.n_session, n_scan, nvox)
        data = np.zeros_like(data0)
        for s in xrange(self.n_session):
            data_mean = np.mean(data0[s, :, :])
            data_range = (np.max(data0[s, :, :]) - np.min(data0[s, :, :]))
            data[s, :, :] = (data0[s, :, :] - data_mean) * 100 / data_range
        Onsets = roiData.paradigm.get_joined_onsets_dim()
        durations = roiData.paradigm.get_joined_durations_dim()
        TR = roiData.tr
        beta = self.beta
        scale = 1  # roiData.nbVoxels
        #nvox = roiData.get_nb_vox_in_mask()
        if self.scale:
            scale = nvox
        rid = roiData.get_roi_id()
        logger.info("JDE VEM - roi %d, nvox=%d, nconds=%d, nItMax=%d", rid,
                    nvox, len(Onsets), self.nItMax)

        #self.contrasts.pop('dummy_example', None)
        cNames = roiData.paradigm.get_stimulus_names()
        graph = roiData.get_graph()
        idx_tag1 = roiData.get_extra_data('asl_first_tag_scan_idx', 0)

        t_start = time()

        logger.info("fast VEM with drift estimation and a constraint")
        try:
            simu = roiData.simulation[0]
        except:
            try:
                simu = roiData.simulation
            except:
                simu = None

        if self.physio:
            NbIter, brls, estimated_brf, prls, estimated_prf, labels, \
            noiseVar, mu_Ma, sigma_Ma, mu_Mc, sigma_Mc, Beta, L, PL, alpha,\
            Sigma_brls, Sigma_prls, Sigma_brf, Sigma_prf, rerror, \
            CONTRAST_A, CONTRASTVAR_A, CONTRAST_C, CONTRASTVAR_C, \
            cA, cH, cC, cG, cZ, cAH, cCG, cTime, FE = Main_vbjde_physio(
                                       graph, data, Onsets, durations, self.hrfDuration,
                                       self.nbClasses, TR, beta, self.dt, scale=scale,
                                       estimateSigmaG=self.estimateSigmaG,
                                       sigmaH=self.sigmaH, sigmaG=self.sigmaG,
                                       gamma_h=self.gammaH, gamma_g=self.gammaG,
                                       NitMax=self.nItMax, NitMin=self.nItMin,
                                       estimateSigmaH=self.estimateSigmaH,
                                       estimateBeta=self.estimateBeta, PLOT=self.PLOT,
                                       contrasts=self.contrasts,
                                       computeContrast=self.computeContrast,
                                       idx_first_tag=idx_tag1,
                                       simulation=simu, sigmaMu=self.sigmaMu,
                                       estimateH=self.estimateH,
                                       estimateG=self.estimateG,
                                       estimateA=self.estimateA,
                                       estimateC=self.estimateC,
                                       estimateNoise=self.estimateNoise,
                                       estimateMP=self.estimateMixtParam,
                                       estimateZ=self.estimateLabels,
                                       estimateLA=self.estimateLA,
                                       constraint=self.constrained,
                                       positivity=self.positivity,
                                       use_hyperprior=self.use_hyperprior,
                                       phy_params=self.phy_params,
                                       prior=self.prior, zc=self.zc)

        # Plot analysis duration
        self.analysis_duration = time() - t_start
        logger.info('JDE VEM analysis took: %s',
                    format_duration(self.analysis_duration))

        # OUTPUTS: Pack all outputs within a dict
        logger.info("Preparing outputs... ")
        outputs = {}
        brf_time = np.arange(len(estimated_brf)) * self.dt
        outputs['brf'] = xndarray(estimated_brf,
                                  axes_names=['time'],
                                  axes_domains={'time': brf_time},
                                  value_label="BRF")
        #logger.info("BRF prepared ")
        domCondition = {'condition': cNames}
        outputs['brls'] = xndarray(brls.T,
                                   value_label="BRLs",
                                   axes_names=['condition', 'voxel'],
                                   axes_domains=domCondition)
        #logger.info("BRLs prepared ")
        prf_time = np.arange(len(estimated_prf)) * self.dt
        outputs['prf'] = xndarray(estimated_prf,
                                  axes_names=['time'],
                                  axes_domains={'time': prf_time},
                                  value_label="PRF")
        #logger.info("PRF prepared ")
        outputs['prls'] = xndarray(prls.T,
                                   value_label="PRLs",
                                   axes_names=['condition', 'voxel'],
                                   axes_domains=domCondition)
        #logger.info("PRLs prepared ")

        outputs['Sigma_brf'] = xndarray(Sigma_brf, value_label="Sigma_BRF")
        #logger.info("Sigma_BRF prepared ")
        outputs['Sigma_prf'] = xndarray(Sigma_prf, value_label="Sigma_PRF")
        #logger.info("Sigma_PRF prepared ")

        ad = {'condition': cNames, 'condition2': Onsets.keys()}
        outputs['Sigma_brls'] = xndarray(
            Sigma_brls,
            value_label="Sigma_BRLs",
            axes_names=['condition', 'condition2', 'voxel'],
            axes_domains=ad)
        #logger.info("Sigma_a prepared ")
        outputs['Sigma_prls'] = xndarray(
            Sigma_prls,
            value_label="Sigma_PRLs",
            axes_names=['condition', 'condition2', 'voxel'],
            axes_domains=ad)
        #logger.info("Sigma_c prepared ")
        outputs['NbIter'] = xndarray(np.array([NbIter]), value_label="NbIter")
        outputs['beta'] = xndarray(Beta,
                                   value_label="beta",
                                   axes_names=['condition'],
                                   axes_domains=domCondition)

        #logger.info("perfusion baseline prepared ")
        outputs['alpha'] = xndarray(alpha,
                                    value_label="Perf_baseline",
                                    axes_names=['voxel'])

        #logger.info("Beta prepared ")
        nbc, nbv = len(cNames), brls.shape[0]
        repeatedBeta = np.repeat(Beta, nbv).reshape(nbc, nbv)
        outputs['beta_mapped'] = xndarray(repeatedBeta,
                                          value_label="beta",
                                          axes_names=['condition', 'voxel'],
                                          axes_domains=domCondition)

        repeated_brf = np.repeat(estimated_brf, nbv).reshape(-1, nbv)
        outputs["brf_mapped"] = xndarray(repeated_brf,
                                         value_label="BRFs",
                                         axes_names=["time", "voxel"],
                                         axes_domains={"time": brf_time})

        repeated_prf = np.repeat(estimated_prf, nbv).reshape(-1, nbv)
        outputs["prf_mapped"] = xndarray(repeated_prf,
                                         value_label="PRFs",
                                         axes_names=["time", "voxel"],
                                         axes_domains={"time": prf_time})

        #logger.info("beta mapped prepared ")
        outputs['roi_mask'] = xndarray(np.zeros(nbv) + roiData.get_roi_id(),
                                       value_label="ROI",
                                       axes_names=['voxel'])

        #logger.info("ROI mask prepared ")
        mixtpB = np.zeros((roiData.nbConditions, self.nbClasses, 2))
        mixtpB[:, :, 0] = mu_Ma
        mixtpB[:, :, 1] = sigma_Ma**2
        mixtpP = np.zeros((roiData.nbConditions, self.nbClasses, 2))
        mixtpP[:, :, 0] = mu_Mc
        mixtpP[:, :, 1] = sigma_Mc**2
        an = ['condition', 'Act_class', 'component']
        ad = {
            'Act_class': ['inactiv', 'activ'],
            'condition': cNames,
            'component': ['mean', 'var']
        }
        outputs['mixt_pB'] = xndarray(mixtpB, axes_names=an, axes_domains=ad)
        outputs['mixt_pP'] = xndarray(mixtpP, axes_names=an, axes_domains=ad)
        #logger.info("Mixture parameters prepared ")
        an = ['condition', 'Act_class', 'voxel']
        ad = {'Act_class': ['inactiv', 'activ'], 'condition': cNames}
        #logger.info("mixt params prepared ")
        outputs['labels'] = xndarray(labels,
                                     value_label="Labels",
                                     axes_names=an,
                                     axes_domains=ad)
        #logger.info("labels prepared ")
        outputs['noiseVar'] = xndarray(noiseVar,
                                       value_label="noiseVar",
                                       axes_names=['voxel'])
        #logger.info("noise variance prepared ")
        if self.estimateLA:
            outputs['drift_coeff'] = xndarray(L,
                                              value_label="Drift",
                                              axes_names=['coeff', 'voxel'])
            outputs['drift'] = xndarray(PL,
                                        value_label="Delta BOLD",
                                        axes_names=['time', 'voxel'])
            logger.info("drift prepared ")
        logger.info("outputs prepared ")

        if (len(self.contrasts) > 0) and self.computeContrast:
            #keys = list((self.contrasts[nc]) for nc in self.contrasts)
            domContrast = {'contrast': self.contrasts.keys()}
            outputs['contrastsA'] = xndarray(CONTRAST_A,
                                             value_label="Contrast_A",
                                             axes_names=['voxel', 'contrast'],
                                             axes_domains=domContrast)
            outputs['contrastsC'] = xndarray(CONTRAST_C,
                                             value_label="Contrast_C",
                                             axes_names=['voxel', 'contrast'],
                                             axes_domains=domContrast)
            c = xndarray(CONTRASTVAR_A,
                         value_label="Contrasts_Variance_A",
                         axes_names=['voxel', 'contrast'],
                         axes_domains=domContrast)
            outputs['contrasts_variance_a'] = c
            outputs['ncontrasts_a'] = xndarray(
                CONTRAST_A / CONTRASTVAR_A**.5,
                value_label="Normalized Contrast A",
                axes_names=['voxel', 'contrast'],
                axes_domains=domContrast)
            c = xndarray(CONTRASTVAR_C,
                         value_label="Contrasts_Variance_C",
                         axes_names=['voxel', 'contrast'],
                         axes_domains=domContrast)
            outputs['contrasts_variance_c'] = c
            outputs['ncontrasts_c'] = xndarray(
                CONTRAST_C / CONTRASTVAR_C**.5,
                value_label="Normalized Contrast C",
                axes_names=['voxel', 'contrast'],
                axes_domains=domContrast)

        #######################################################################
        # CONVERGENCE
        if 1:
            cTimeMean = cTime[-1] / np.float(NbIter)
            logger.info("Saving convergence... ")

            axes_names = ['duration']
            ax = (np.arange(self.nItMax) + 1) * cTimeMean
            ax[:len(cTime)] = cTime
            ad = {'duration': ax}

            outName = 'convergence_Labels'
            c = np.zeros(self.nItMax)  # -.001 #
            c[:len(cZ)] = cZ
            outputs[outName] = xndarray(c,
                                        axes_names=axes_names,
                                        axes_domains=ad,
                                        value_label='Conv_Criterion_Z')
            outName = 'convergence_BRF'
            #ad = {'Conv_Criterion':np.arange(len(cH))}
            c = np.zeros(self.nItMax)  # -.001 #
            c[:len(cH)] = cH
            outputs[outName] = xndarray(c,
                                        axes_names=axes_names,
                                        axes_domains=ad,
                                        value_label='Conv_Criterion_H')
            outName = 'convergence_BRL'
            c = np.zeros(self.nItMax)  # -.001 #
            c[:len(cA)] = cA
            #ad = {'Conv_Criterion':np.arange(len(cA))}
            outputs[outName] = xndarray(c,
                                        axes_names=axes_names,
                                        axes_domains=ad,
                                        value_label='Conv_Criterion_A')
            outName = 'convergence_PRF'
            #ad = {'Conv_Criterion':np.arange(len(cH))}
            c = np.zeros(self.nItMax)  # -.001 #
            c[:len(cG)] = cG
            outputs[outName] = xndarray(c,
                                        axes_names=axes_names,
                                        axes_domains=ad,
                                        value_label='Conv_Criterion_G')
            outName = 'convergence_PRL'
            c = np.zeros(self.nItMax)  # -.001 #
            c[:len(cC)] = cC
            #ad = {'Conv_Criterion':np.arange(len(cA))}
            outputs[outName] = xndarray(c,
                                        axes_names=axes_names,
                                        axes_domains=ad,
                                        value_label='Conv_Criterion_C')
            outName = 'convergence_FE'
            c = np.zeros(self.nItMax)  # -.001 #
            c[:len(FE)] = FE
            outputs[outName] = xndarray(c,
                                        axes_names=axes_names,
                                        axes_domains=ad,
                                        value_label='Conv_Criterion_FE')
            logger.info("Convergence saved ")

        #######################################################################
        # SIMULATION
        if self.simulation is not None and 0:
            logger.info("Prepare parameters to compare if simulation")
            M = labels.shape[0]
            K = labels.shape[1]
            J = labels.shape[2]

            true_labels = np.zeros((M, J))
            for m in xrange(0, M):
                true_labels[
                    m, :] = roiData.simulation[0]['labels'][m].flatten()

            newlabels = np.reshape(labels[:, 1, :], (M, J))
            #true_labels = roiData.simulation[0]['labels']
            #newlabels = labels

            se = []
            sp = []
            size = np.prod(labels.shape)
            for i in xrange(0, 2):  # (0, M):
                se0, sp0, auc = roc_curve(newlabels[i, :].tolist(),
                                          true_labels[i, :].tolist())
                se.append(se0)
                sp.append(sp0)
                size = min(size, len(sp0))
            SE = np.zeros((M, size), dtype=float)
            SP = np.zeros((M, size), dtype=float)
            for i in xrange(0, 2):  # M):
                tmp = np.array(se[i])
                SE[i, :] = tmp[0:size]
                tmp = np.array(sp[i])
                SP[i, :] = tmp[0:size]
            sensData, specData = SE, SP
            axes_names = ['1-specificity', 'condition']
            outName = 'ROC_audio'
            #ad = {'1-specificity': specData[0], 'condition': cNames}
            outputs[outName] = xndarray(
                sensData,
                axes_names=axes_names,
                #axes_domains=ad,
                value_label='sensitivity')

            m = specData[0].min()
            import matplotlib.font_manager as fm
            import matplotlib.pyplot as plt
            plt.figure(200)
            plt.plot(sensData[0],
                     specData[0],
                     '--',
                     color='k',
                     linewidth=2.0,
                     label='m=1')
            plt.hold(True)
            plt.plot(sensData[1],
                     specData[1],
                     color='k',
                     linewidth=2.0,
                     label='m=2')
            # legend(('audio','video'))
            plt.xticks(color='k', size=14, fontweight='bold')
            plt.yticks(color='k', size=14, fontweight='bold')
            #xlabel('1 - Specificity',fontsize=16,fontweight='bold')
            # ylabel('Sensitivity',fontsize=16,fontweight='bold')
            prop = fm.FontProperties(size=14, weight='bold')
            plt.legend(loc=1, prop=prop)
            plt.axis([0., 1., m, 1.02])

            true_labels = roiData.simulation[0]['labels']
            true_brls = roiData.simulation[0]['nrls']
            true_prls = roiData.simulation[0]['prls']
            true_brf = roiData.simulation[0]['hrf'][:, 0]
            true_prf = roiData.simulation[0]['prf'][:, 0]
            true_drift = roiData.simulation[0]['drift']
            true_noise = roiData.simulation[0]['noise']

            if simu is not None:
                logger.info("Check parameters errors")
                self.finalizeEstimation(true_labels, newlabels, nvox, true_brf,
                                        estimated_brf, true_prf, estimated_prf,
                                        true_brls, brls.T, true_prls, prls.T,
                                        true_drift, PL, L, true_noise,
                                        noiseVar)

        # END SIMULATION
        #######################################################################
        d = {'parcel_size': np.array([nvox])}
        outputs['analysis_duration'] = xndarray(np.array(
            [self.analysis_duration]),
                                                axes_names=['parcel_size'],
                                                axes_domains=d)
        """outputs['rerror'] = xndarray(np.array(  rerror),
                                                axes_names=['parcel_size'])"""
        return outputs
コード例 #17
0
ファイル: jde_vem_analysis.py プロジェクト: zddzxxsmile/pyhrf
def main():
    """Run when calling the script"""

    start_time = time.time()

    if not os.path.isdir(config["output_dir"]):
        try:
            os.makedirs(config["output_dir"])
        except OSError as e:
            print("Ouput directory could not be created.\n"
                  "Error was: {}".format(e.strerror))
            sys.exit(1)

    bold_data = FmriData.from_vol_files(
        mask_file=config["parcels_file"], paradigm_csv_file=config["onsets_file"],
        bold_files=config["bold_data_file"], tr=config["tr"]
    )

    compute_contrasts, contrasts_def = load_contrasts_definitions(config["def_contrasts_file"])

    jde_vem_analyser = JDEVEMAnalyser(
        hrfDuration=config["hrf_duration"], sigmaH=config["sigma_h"], fast=True,
        computeContrast=compute_contrasts, nbClasses=2, PLOT=False,
        nItMax=config["nb_iter_max"], nItMin=config["nb_iter_min"], scale=False,
        beta=config["beta"], estimateSigmaH=True, estimateHRF=config["estimate_hrf"],
        TrueHrfFlag=False, HrfFilename='hrf.nii', estimateDrifts=True,
        hyper_prior_sigma_H=config["hrf_hyperprior"], dt=config["dt"], estimateBeta=True,
        contrasts=contrasts_def, simulation=False, estimateLabels=True,
        LabelsFilename=None, MFapprox=False, estimateMixtParam=True,
        constrained=False, InitVar=0.5, InitMean=2.0, MiniVemFlag=False, NbItMiniVem=5,
        zero_constraint=config["zero_constraint"], drifts_type=config["drifts_type"]
    )

    processing_jde_vem = FMRITreatment(
        fmri_data=bold_data, analyser=jde_vem_analyser,
        output_dir=config["output_dir"], make_outputs=True
    )

    if not config["parallel"]:
        processing_jde_vem.run()
    else:
        processing_jde_vem.run(parallel="local")


    if config["save_processing_config"]:
        # Let's canonicalize all paths
        config_save = dict(config)
        for file_nb, bold_file in enumerate(config_save["bold_data_file"]):
            config_save["bold_data_file"][file_nb] = os.path.abspath(bold_file)
        config_save["parcels_file"] = os.path.abspath(config_save["parcels_file"])
        config_save["onsets_file"] = os.path.abspath(config_save["onsets_file"])
        if config_save["def_contrasts_file"]:
            config_save["def_contrasts_file"] = os.path.abspath(config_save["def_contrasts_file"])
        config_save["output_dir"] = os.path.abspath(config_save["output_dir"])
        config_save_filename = "{}_processing.json".format(
            datetime.datetime.today()
        ).replace(" ", "_")
        config_save_path = os.path.join(config["output_dir"], config_save_filename)
        with open(config_save_path, 'w') as json_file:
            json.dump(config_save, json_file, sort_keys=True, indent=4)

    print("")
    print("Total computation took: {} seconds".format(format_duration(time.time() - start_time)))
コード例 #18
0
ファイル: vb_jde_analyser.py プロジェクト: ainafp/pyhrf
    def analyse_roi(self, roiData):
        #roiData is of type FmriRoiData, see pyhrf.core.FmriRoiData
        # roiData.bold : numpy array of shape
        ## BOLD has shape (nscans, nvoxels)

        #roiData.graph #list of neighbours
        data = roiData.bold
        Onsets = roiData.get_joined_onsets()
        TR = roiData.tr
        #K = 2 #number of classes
        beta = self.beta
        scale = 1#roiData.nbVoxels
        nvox = roiData.get_nb_vox_in_mask()
        if self.scale:
            scale = nvox
        rid = roiData.get_roi_id()
        logger.info("JDE VEM - roi %d, nvox=%d, nconds=%d, nItMax=%d", rid,
                    nvox, len(Onsets), self.nItMax)

        self.contrasts.pop('dummy_example', None)
        cNames = roiData.paradigm.get_stimulus_names()
        graph = roiData.get_graph()

        t_start = time()

        if self.fast:
            if not self.constrained:
                logger.info("fast VEM with drift estimation")

                NbIter, nrls, estimated_hrf, \
                labels, noiseVar, mu_k, sigma_k, \
                Beta, L, PL, CONTRAST, CONTRASTVAR, \
                cA,cH,cZ,cAH,cTime,cTimeMean, Sigma_nrls, \
                StimuIndSignal = Main_vbjde_Extension_stable(graph,data,Onsets, \
                                        self.hrfDuration, self.nbClasses,TR,
                                        beta,self.dt,scale,self.estimateSigmaH,
                                        self.sigmaH,self.nItMax, self.nItMin,
                                        self.estimateBeta,self.PLOT,
                                        self.contrasts,self.computeContrast,
                                        self.hyper_prior_sigma_H,self.estimateHRF,
                                        self.TrueHrfFlag, self.HrfFilename,
                                        self.estimateLabels,self.LabelsFilename,
                                        self.MFapprox,self.InitVar,self.InitMean,
                                        self.MiniVemFlag,self.NbItMiniVem)
            else:
                logger.info("fast VEM with drift estimation and a constraint")

                NbIter, nrls, estimated_hrf, \
                labels, noiseVar, mu_k, sigma_k, \
                Beta, L, PL, CONTRAST, CONTRASTVAR, \
                cA,cH,cZ,cAH,cTime,cTimeMean, \
                Sigma_nrls, StimuIndSignal,\
                FreeEnergy = Main_vbjde_Extension_constrained(graph,data,Onsets, \
                                        self.hrfDuration, self.nbClasses,TR,
                                        beta,self.dt,scale,self.estimateSigmaH,
                                        self.sigmaH,self.nItMax, self.nItMin,
                                        self.estimateBeta,self.PLOT,
                                        self.contrasts,self.computeContrast,
                                        self.hyper_prior_sigma_H,self.estimateHRF,
                                        self.TrueHrfFlag, self.HrfFilename,
                                        self.estimateLabels,self.LabelsFilename,
                                        self.MFapprox,self.InitVar,self.InitMean,
                                        self.MiniVemFlag,self.NbItMiniVem)
        else:
            # if not self.fast
            if self.estimateDrifts:
                logger.info("not fast VEM")
                logger.info("NOT WORKING")
                nrls, estimated_hrf, \
                labels, noiseVar, mu_k, \
                sigma_k, Beta, L, \
                PL = Main_vbjde_Python_constrained(graph,data,Onsets,
                                       self.hrfDuration,self.nbClasses,
                                       TR,beta,self.dt,scale,
                                       self.estimateSigmaH,self.sigmaH,
                                       self.nItMax,self.nItMin,
                                       self.estimateBeta,self.PLOT)

        # Plot analysis duration
        self.analysis_duration = time() - t_start
        logger.info('JDE VEM analysis took: %s',
                    format_duration(self.analysis_duration))


        if self.fast:
            ### OUTPUTS: Pack all outputs within a dict
            outputs = {}
            hrf_time = np.arange(len(estimated_hrf)) * self.dt

            axes_names = ['iteration']
            """axes_domains = {'iteration':np.arange(FreeEnergy.shape[0])}
            outputs['FreeEnergy'] = xndarray(FreeEnergy,
                                        axes_names=axes_names,
                                        axes_domains=axes_domains)
            """
            outputs['hrf'] = xndarray(estimated_hrf, axes_names=['time'],
                                axes_domains={'time':hrf_time},
                                value_label="HRF")

            domCondition = {'condition':cNames}
            outputs['nrls'] = xndarray(nrls.transpose(),value_label="NRLs",
                                    axes_names=['condition','voxel'],
                                    axes_domains=domCondition)

            ad = {'condition':cNames,'condition2':Onsets.keys()}

            outputs['Sigma_nrls'] = xndarray(Sigma_nrls,value_label="Sigma_NRLs",
                                            axes_names=['condition','condition2','voxel'],
                                            axes_domains=ad)

            outputs['NbIter'] = xndarray(np.array([NbIter]),value_label="NbIter")

            outputs['beta'] = xndarray(Beta,value_label="beta",
                                    axes_names=['condition'],
                                    axes_domains=domCondition)

            nbc, nbv = len(cNames), nrls.shape[0]
            repeatedBeta = np.repeat(Beta, nbv).reshape(nbc, nbv)
            outputs['beta_mapped'] = xndarray(repeatedBeta,value_label="beta",
                                            axes_names=['condition','voxel'],
                                            axes_domains=domCondition)

            outputs['roi_mask'] = xndarray(np.zeros(nbv)+roiData.get_roi_id(),
                                        value_label="ROI",
                                        axes_names=['voxel'])

            h = estimated_hrf
            nrls = nrls.transpose()

            nvox = nrls.shape[1]
            nbconds = nrls.shape[0]
            ah = np.zeros((h.shape[0], nvox, nbconds))

            mixtp = np.zeros((roiData.nbConditions, self.nbClasses, 2))
            mixtp[:, :, 0] = mu_k
            mixtp[:, :, 1] = sigma_k**2

            an = ['condition','Act_class','component']
            ad = {'Act_class':['inactiv','activ'],
                'condition': cNames,
                'component':['mean','var']}
            outputs['mixt_p'] = xndarray(mixtp, axes_names=an, axes_domains=ad)

            ad = {'class' : ['inactiv','activ'],
                'condition': cNames,
                }
            outputs['labels'] = xndarray(labels,value_label="Labels",
                                    axes_names=['condition','class','voxel'],
                                    axes_domains=ad)
            outputs['noiseVar'] = xndarray(noiseVar,value_label="noiseVar",
                                        axes_names=['voxel'])
            if self.estimateDrifts:
                outputs['drift_coeff'] = xndarray(L,value_label="Drift",
                                axes_names=['coeff','voxel'])
                outputs['drift'] = xndarray(PL,value_label="Delta BOLD",
                            axes_names=['time','voxel'])
            if (len(self.contrasts) >0) and self.computeContrast:
                #keys = list((self.contrasts[nc]) for nc in self.contrasts)
                domContrast = {'contrast':self.contrasts.keys()}
                outputs['contrasts'] = xndarray(CONTRAST, value_label="Contrast",
                                            axes_names=['voxel','contrast'],
                                            axes_domains=domContrast)
                #print 'contrast output:'
                #print outputs['contrasts'].descrip()

                c = xndarray(CONTRASTVAR, value_label="Contrasts_Variance",
                        axes_names=['voxel','contrast'],
                        axes_domains=domContrast)
                outputs['contrasts_variance'] = c

                outputs['ncontrasts'] = xndarray(CONTRAST/CONTRASTVAR**.5,
                                            value_label="Normalized Contrast",
                                            axes_names=['voxel','contrast'],
                                            axes_domains=domContrast)

            ################################################################################
            # CONVERGENCE

            axes_names = ['duration']
            outName = 'Convergence_Labels'
            ax = np.arange(self.nItMax)*cTimeMean
            ax[:len(cTime)] = cTime
            ad = {'duration':ax}
            c = np.zeros(self.nItMax) #-.001 #
            c[:len(cZ)] = cZ
            outputs[outName] = xndarray(c, axes_names=axes_names,
                                        axes_domains=ad,
                                        value_label='Conv_Criterion_Z')
            outName = 'Convergence_HRF'
            #ad = {'Conv_Criterion':np.arange(len(cH))}
            c = np.zeros(self.nItMax) #-.001 #
            c[:len(cH)] = cH
            outputs[outName] = xndarray(c, axes_names=axes_names,
                                        axes_domains=ad,
                                        value_label='Conv_Criterion_H')
            outName = 'Convergence_NRL'
            c = np.zeros(self.nItMax)# -.001 #
            c[:len(cA)] = cA
            #ad = {'Conv_Criterion':np.arange(len(cA))}
            outputs[outName] = xndarray(c, axes_names=axes_names,
                                        axes_domains=ad,
                                        value_label='Conv_Criterion_A')

        ################################################################################
        # SIMULATION

        if self.simulation and self.fast:
            from pyhrf.stats import compute_roc_labels
            labels_vem_audio = roiData.simulation['labels'][0]
            labels_vem_video = roiData.simulation['labels'][1]

            M = labels.shape[0]
            K = labels.shape[1]
            J = labels.shape[2]
            true_labels = np.zeros((K,J))
            true_labels[0,:] = reshape(labels_vem_audio,(J))
            true_labels[1,:] = reshape(labels_vem_video,(J))
            newlabels = np.reshape(labels[:,1,:],(M,J))
            se = []
            sp = []
            size = prod(labels.shape)

            for i in xrange(0,M):
                se0,sp0, auc = roc_curve(newlabels[i,:].tolist(),
                                         true_labels[i,:].tolist())
                se.append(se0)
                sp.append(sp0)
                size = min(size,len(sp0))
            SE = np.zeros((M,size),dtype=float)
            SP = np.zeros((M,size),dtype=float)
            for i in xrange(0,M):
                tmp = np.array(se[i])
                SE[i,:] = tmp[0:size]
                tmp = np.array(sp[i])
                SP[i,:] = tmp[0:size]

            sensData, specData = SE, SP
            axes_names = ['condition','1-specificity']
            outName = 'ROC_audio'
            ad = {'1-specificity':specData[0],'condition':cNames}
            outputs[outName] = xndarray(sensData, axes_names=axes_names,
                                      axes_domains=ad,
                                      value_label='sensitivity')

            m = specData[0].min()
            import matplotlib.font_manager as fm
            figure(200)
            plot(sensData[0],specData[0],'--',color='k',linewidth=2.0,label='m=1')
            hold(True)
            plot(sensData[1],specData[1],color='k',linewidth=2.0,label='m=2')
            #legend(('audio','video'))
            xticks(color = 'k', size = 14,fontweight='bold')
            yticks(color = 'k', size = 14,fontweight='bold')
            #xlabel('1 - Specificity',fontsize=16,fontweight='bold')
            #ylabel('Sensitivity',fontsize=16,fontweight='bold')
            prop = fm.FontProperties(size=14,weight='bold')
            legend(loc=1,prop=prop)
            axis([0., 1., m, 1.02])


            from pyhrf.stats import compute_roc_labels
            if hasattr(roiData.simulation, 'nrls'):
                true_labels = roiData.simulation.nrls.labels
                true_nrls = roiData.simulation.nrls.data
            elif isinstance(roiData.simulation, dict) and \
                    roiData.simulation.has_key('labels') and \
                    roiData.simulation.has_key('nrls') :
                true_labels = roiData.simulation['labels']
                true_nrls = roiData.simulation['nrls']
            else:
                raise Exception('Simulation can not be retrieved from %s' \
                                    %str(roiData.simulation))

            domCondition = {'condition':cNames}
            outputs['Truenrls'] = xndarray(true_nrls,value_label="True_nrls",
                                         axes_names=['condition','voxel'],
                                         axes_domains=domCondition)
            M = labels.shape[0]
            K = labels.shape[1]
            J = labels.shape[2]

            newlabels = np.reshape(labels[:,1,:],(M,J))

            for i in xrange(0,M):
                se0,sp0, auc = roc_curve(newlabels[i,:].tolist(),
                                         true_labels[i,:].tolist())
                se.append(se0)
                sp.append(sp0)
                size = min(size,len(sp0))
            SE = np.zeros((M,size),dtype=float)
            SP = np.zeros((M,size),dtype=float)
            for i in xrange(0,M):
                tmp = np.array(se[i])
                SE[i,:] = tmp[0:size]
                tmp = np.array(sp[i])
                SP[i,:] = tmp[0:size]

        # END SIMULATION
        ##########################################################################
        if self.fast:
            d = {'parcel_size':np.array([nvox])}
            outputs['analysis_duration'] = xndarray(np.array([self.analysis_duration]),
                                                axes_names=['parcel_size'],
                                                axes_domains=d)

        return #outputs
コード例 #19
0
    def analyse_roi(self, roiData):
        # roiData is of type FmriRoiData, see pyhrf.core.FmriRoiData
        # roiData.bold : numpy array of shape
        # BOLD has shape (nscans, nvoxels)
        # roiData.graph #list of neighbours
        n_scan_allsession, nvox = roiData.bold.shape
        n_scan = n_scan_allsession / self.n_session
        data0 = roiData.bold.reshape(self.n_session, n_scan, nvox)
        data = np.zeros_like(data0)
        for s in xrange(self.n_session):
            data_mean = np.mean(data0[s, :, :])
            data_range = (np.max(data0[s, :, :]) - np.min(data0[s, :, :]))
            data[s, :, :] = (data0[s, :, :] - data_mean) * 100 / data_range
        Onsets = roiData.paradigm.get_joined_onsets_dim()
        durations = roiData.paradigm.get_joined_durations_dim()
        TR = roiData.tr
        beta = self.beta
        scale = 1                   # roiData.nbVoxels
        #nvox = roiData.get_nb_vox_in_mask()
        if self.scale:
            scale = nvox
        rid = roiData.get_roi_id()
        logger.info("JDE VEM - roi %d, nvox=%d, nconds=%d, nItMax=%d", rid,
                    nvox, len(Onsets), self.nItMax)

        #self.contrasts.pop('dummy_example', None)
        cNames = roiData.paradigm.get_stimulus_names()
        graph = roiData.get_graph()
        idx_tag1 = roiData.get_extra_data('asl_first_tag_scan_idx', 0)

        t_start = time()

        logger.info("fast VEM with drift estimation and a constraint")
        try:
            simu = roiData.simulation[0]
        except:
            try:
                simu = roiData.simulation
            except:
                simu = None

        if self.physio:
            NbIter, brls, estimated_brf, prls, estimated_prf, labels, \
            noiseVar, mu_Ma, sigma_Ma, mu_Mc, sigma_Mc, Beta, L, PL, alpha,\
            Sigma_brls, Sigma_prls, Sigma_brf, Sigma_prf, rerror, \
            CONTRAST_A, CONTRASTVAR_A, CONTRAST_C, CONTRASTVAR_C, \
            cA, cH, cC, cG, cZ, cAH, cCG, cTime, FE = Main_vbjde_physio(
                                       graph, data, Onsets, durations, self.hrfDuration,
                                       self.nbClasses, TR, beta, self.dt, scale=scale,
                                       estimateSigmaG=self.estimateSigmaG,
                                       sigmaH=self.sigmaH, sigmaG=self.sigmaG,
                                       gamma_h=self.gammaH, gamma_g=self.gammaG,
                                       NitMax=self.nItMax, NitMin=self.nItMin,
                                       estimateSigmaH=self.estimateSigmaH,
                                       estimateBeta=self.estimateBeta, PLOT=self.PLOT,
                                       contrasts=self.contrasts,
                                       computeContrast=self.computeContrast,
                                       idx_first_tag=idx_tag1,
                                       simulation=simu, sigmaMu=self.sigmaMu,
                                       estimateH=self.estimateH,
                                       estimateG=self.estimateG,
                                       estimateA=self.estimateA,
                                       estimateC=self.estimateC,
                                       estimateNoise=self.estimateNoise,
                                       estimateMP=self.estimateMixtParam,
                                       estimateZ=self.estimateLabels,
                                       estimateLA=self.estimateLA,
                                       constraint=self.constrained,
                                       positivity=self.positivity,
                                       use_hyperprior=self.use_hyperprior,
                                       phy_params=self.phy_params,
                                       prior=self.prior, zc=self.zc)

        # Plot analysis duration
        self.analysis_duration = time() - t_start
        logger.info('JDE VEM analysis took: %s',
                    format_duration(self.analysis_duration))

        # OUTPUTS: Pack all outputs within a dict
        logger.info("Preparing outputs... ")
        outputs = {}
        brf_time = np.arange(len(estimated_brf)) * self.dt
        outputs['brf'] = xndarray(estimated_brf, axes_names=['time'],
                                  axes_domains={'time': brf_time},
                                  value_label="BRF")
        #logger.info("BRF prepared ")
        domCondition = {'condition': cNames}
        outputs['brls'] = xndarray(brls.T, value_label="BRLs",
                                   axes_names=['condition', 'voxel'],
                                   axes_domains=domCondition)
        #logger.info("BRLs prepared ")
        prf_time = np.arange(len(estimated_prf)) * self.dt
        outputs['prf'] = xndarray(estimated_prf, axes_names=['time'],
                                  axes_domains={'time': prf_time},
                                  value_label="PRF")
        #logger.info("PRF prepared ")
        outputs['prls'] = xndarray(prls.T, value_label="PRLs",
                                   axes_names=['condition', 'voxel'],
                                   axes_domains=domCondition)
        #logger.info("PRLs prepared ")

        outputs['Sigma_brf'] = xndarray(Sigma_brf, value_label="Sigma_BRF")
        #logger.info("Sigma_BRF prepared ")
        outputs['Sigma_prf'] = xndarray(Sigma_prf, value_label="Sigma_PRF")
        #logger.info("Sigma_PRF prepared ")

        ad = {'condition': cNames, 'condition2': Onsets.keys()}
        outputs['Sigma_brls'] = xndarray(Sigma_brls, value_label="Sigma_BRLs",
                                         axes_names=['condition', 'condition2',
                                                     'voxel'],
                                         axes_domains=ad)
        #logger.info("Sigma_a prepared ")
        outputs['Sigma_prls'] = xndarray(Sigma_prls, value_label="Sigma_PRLs",
                                         axes_names=['condition', 'condition2',
                                                     'voxel'],
                                         axes_domains=ad)
        #logger.info("Sigma_c prepared ")
        outputs['NbIter'] = xndarray(np.array([NbIter]), value_label="NbIter")
        outputs['beta'] = xndarray(Beta, value_label="beta",
                                   axes_names=['condition'],
                                   axes_domains=domCondition)

        #logger.info("perfusion baseline prepared ")
        outputs['alpha'] = xndarray(alpha, value_label="Perf_baseline",
                                          axes_names=['voxel'])

        #logger.info("Beta prepared ")
        nbc, nbv = len(cNames), brls.shape[0]
        repeatedBeta = np.repeat(Beta, nbv).reshape(nbc, nbv)
        outputs['beta_mapped'] = xndarray(repeatedBeta, value_label="beta",
                                          axes_names=['condition', 'voxel'],
                                          axes_domains=domCondition)

        repeated_brf = np.repeat(estimated_brf, nbv).reshape(-1, nbv)
        outputs["brf_mapped"] = xndarray(repeated_brf, value_label="BRFs",
                                         axes_names=["time", "voxel"],
                                         axes_domains={"time": brf_time})

        repeated_prf = np.repeat(estimated_prf, nbv).reshape(-1, nbv)
        outputs["prf_mapped"] = xndarray(repeated_prf, value_label="PRFs",
                                         axes_names=["time", "voxel"],
                                         axes_domains={"time": prf_time})

        #logger.info("beta mapped prepared ")
        outputs['roi_mask'] = xndarray(np.zeros(nbv) + roiData.get_roi_id(),
                                       value_label="ROI",
                                       axes_names=['voxel'])

        #logger.info("ROI mask prepared ")
        mixtpB = np.zeros((roiData.nbConditions, self.nbClasses, 2))
        mixtpB[:, :, 0] = mu_Ma
        mixtpB[:, :, 1] = sigma_Ma ** 2
        mixtpP = np.zeros((roiData.nbConditions, self.nbClasses, 2))
        mixtpP[:, :, 0] = mu_Mc
        mixtpP[:, :, 1] = sigma_Mc ** 2
        an = ['condition', 'Act_class', 'component']
        ad = {'Act_class': ['inactiv', 'activ'],
              'condition': cNames,
              'component': ['mean', 'var']}
        outputs['mixt_pB'] = xndarray(mixtpB, axes_names=an, axes_domains=ad)
        outputs['mixt_pP'] = xndarray(mixtpP, axes_names=an, axes_domains=ad)
        #logger.info("Mixture parameters prepared ")
        an = ['condition', 'Act_class', 'voxel']
        ad = {'Act_class': ['inactiv', 'activ'],
              'condition': cNames}
        #logger.info("mixt params prepared ")
        outputs['labels'] = xndarray(labels, value_label="Labels",
                                     axes_names=an, axes_domains=ad)
        #logger.info("labels prepared ")
        outputs['noiseVar'] = xndarray(noiseVar, value_label="noiseVar",
                                       axes_names=['voxel'])
        #logger.info("noise variance prepared ")
        if self.estimateLA:
            outputs['drift_coeff'] = xndarray(L, value_label="Drift",
                                              axes_names=['coeff', 'voxel'])
            outputs['drift'] = xndarray(PL, value_label="Delta BOLD",
                                        axes_names=['time', 'voxel'])
            logger.info("drift prepared ")
        logger.info("outputs prepared ")

        if (len(self.contrasts) >0) and self.computeContrast:
            #keys = list((self.contrasts[nc]) for nc in self.contrasts)
            domContrast = {'contrast':self.contrasts.keys()}
            outputs['contrastsA'] = xndarray(CONTRAST_A, value_label="Contrast_A",
                                            axes_names=['voxel','contrast'],
                                            axes_domains=domContrast)
            outputs['contrastsC'] = xndarray(CONTRAST_C, value_label="Contrast_C",
                                            axes_names=['voxel','contrast'],
                                            axes_domains=domContrast)
            c = xndarray(CONTRASTVAR_A, value_label="Contrasts_Variance_A",
                         axes_names=['voxel','contrast'],
                         axes_domains=domContrast)
            outputs['contrasts_variance_a'] = c
            outputs['ncontrasts_a'] = xndarray(CONTRAST_A/CONTRASTVAR_A**.5,
                                             value_label="Normalized Contrast A",
                                             axes_names=['voxel','contrast'],
                                             axes_domains=domContrast)
            c = xndarray(CONTRASTVAR_C, value_label="Contrasts_Variance_C",
                         axes_names=['voxel','contrast'],
                         axes_domains=domContrast)
            outputs['contrasts_variance_c'] = c
            outputs['ncontrasts_c'] = xndarray(CONTRAST_C/CONTRASTVAR_C**.5,
                                             value_label="Normalized Contrast C",
                                             axes_names=['voxel','contrast'],
                                             axes_domains=domContrast)

        #######################################################################
        # CONVERGENCE
        if 1:
            cTimeMean = cTime[-1] / np.float(NbIter)
            logger.info("Saving convergence... ")

            axes_names = ['duration']
            ax = (np.arange(self.nItMax) + 1) * cTimeMean
            ax[:len(cTime)] = cTime
            ad = {'duration': ax}

            outName = 'convergence_Labels'
            c = np.zeros(self.nItMax)  # -.001 #
            c[:len(cZ)] = cZ
            outputs[outName] = xndarray(c, axes_names=axes_names,
                                        axes_domains=ad,
                                        value_label='Conv_Criterion_Z')
            outName = 'convergence_BRF'
            #ad = {'Conv_Criterion':np.arange(len(cH))}
            c = np.zeros(self.nItMax)   # -.001 #
            c[:len(cH)] = cH
            outputs[outName] = xndarray(c, axes_names=axes_names,
                                        axes_domains=ad,
                                        value_label='Conv_Criterion_H')
            outName = 'convergence_BRL'
            c = np.zeros(self.nItMax)  # -.001 #
            c[:len(cA)] = cA
            #ad = {'Conv_Criterion':np.arange(len(cA))}
            outputs[outName] = xndarray(c, axes_names=axes_names,
                                        axes_domains=ad,
                                        value_label='Conv_Criterion_A')
            outName = 'convergence_PRF'
            #ad = {'Conv_Criterion':np.arange(len(cH))}
            c = np.zeros(self.nItMax)   # -.001 #
            c[:len(cG)] = cG
            outputs[outName] = xndarray(c, axes_names=axes_names,
                                        axes_domains=ad,
                                        value_label='Conv_Criterion_G')
            outName = 'convergence_PRL'
            c = np.zeros(self.nItMax)  # -.001 #
            c[:len(cC)] = cC
            #ad = {'Conv_Criterion':np.arange(len(cA))}
            outputs[outName] = xndarray(c, axes_names=axes_names,
                                        axes_domains=ad,
                                        value_label='Conv_Criterion_C')
            outName = 'convergence_FE'
            c = np.zeros(self.nItMax)  # -.001 #
            c[:len(FE)] = FE
            outputs[outName] = xndarray(c, axes_names=axes_names,
                                        axes_domains=ad,
                                        value_label='Conv_Criterion_FE')
            logger.info("Convergence saved ")

        #######################################################################
        # SIMULATION
        if self.simulation is not None and 0:
            logger.info("Prepare parameters to compare if simulation")
            M = labels.shape[0]
            K = labels.shape[1]
            J = labels.shape[2]

            true_labels = np.zeros((M, J))
            for m in xrange(0,M):
                true_labels[m, :] = roiData.simulation[0]['labels'][m].flatten()

            newlabels = np.reshape(labels[:, 1, :], (M, J))
            #true_labels = roiData.simulation[0]['labels']
            #newlabels = labels

            se = []
            sp = []
            size = np.prod(labels.shape)
            for i in xrange(0, 2):  # (0, M):
                se0, sp0, auc = roc_curve(newlabels[i, :].tolist(),
                                          true_labels[i, :].tolist())
                se.append(se0)
                sp.append(sp0)
                size = min(size, len(sp0))
            SE = np.zeros((M, size), dtype=float)
            SP = np.zeros((M, size), dtype=float)
            for i in xrange(0, 2):  # M):
                tmp = np.array(se[i])
                SE[i, :] = tmp[0:size]
                tmp = np.array(sp[i])
                SP[i, :] = tmp[0:size]
            sensData, specData = SE, SP
            axes_names = ['1-specificity', 'condition']
            outName = 'ROC_audio'
            #ad = {'1-specificity': specData[0], 'condition': cNames}
            outputs[outName] = xndarray(sensData, axes_names=axes_names,
                                        #axes_domains=ad,
                                        value_label='sensitivity')

            m = specData[0].min()
            import matplotlib.font_manager as fm
            import matplotlib.pyplot as plt
            plt.figure(200)
            plt.plot(sensData[0], specData[0], '--', color='k', linewidth=2.0,
                     label='m=1')
            plt.hold(True)
            plt.plot(sensData[1], specData[1], color='k', linewidth=2.0,
                     label='m=2')
            # legend(('audio','video'))
            plt.xticks(color='k', size=14, fontweight='bold')
            plt.yticks(color='k', size=14, fontweight='bold')
            #xlabel('1 - Specificity',fontsize=16,fontweight='bold')
            # ylabel('Sensitivity',fontsize=16,fontweight='bold')
            prop = fm.FontProperties(size=14, weight='bold')
            plt.legend(loc=1, prop=prop)
            plt.axis([0., 1., m, 1.02])

            true_labels = roiData.simulation[0]['labels']
            true_brls = roiData.simulation[0]['nrls']
            true_prls = roiData.simulation[0]['prls']
            true_brf = roiData.simulation[0]['hrf'][:, 0]
            true_prf = roiData.simulation[0]['prf'][:, 0]
            true_drift = roiData.simulation[0]['drift']
            true_noise = roiData.simulation[0]['noise']

            if simu is not None:
                logger.info("Check parameters errors")
                self.finalizeEstimation(true_labels, newlabels, nvox,
                                        true_brf, estimated_brf,
                                        true_prf, estimated_prf,
                                        true_brls, brls.T,
                                        true_prls, prls.T,
                                        true_drift, PL, L,
                                        true_noise, noiseVar)

        # END SIMULATION
        #######################################################################
        d = {'parcel_size': np.array([nvox])}
        outputs['analysis_duration'] = xndarray(np.array(
                                                [self.analysis_duration]),
                                                axes_names=['parcel_size'],
                                                axes_domains=d)
        """outputs['rerror'] = xndarray(np.array(  rerror),
                                                axes_names=['parcel_size'])"""
        return outputs
コード例 #20
0
    def analyse_roi(self, roiData):
        #roiData is of type FmriRoiData, see pyhrf.core.FmriRoiData
        # roiData.bold : numpy array of shape
        ## BOLD has shape (nscans, nvoxels)

        #roiData.graph #list of neighbours
        data = roiData.bold
        Onsets = roiData.get_joined_onsets()
        durations = roiData.get_joined_durations()
        TR = roiData.tr
        #K = 2 #number of classes
        scale = 1#roiData.nbVoxels
        nvox = roiData.get_nb_vox_in_mask()
        if self.scale:
            scale = nvox
        rid = roiData.get_roi_id()
        logger.info("JDE VEM - roi %d, nvox=%d, nconds=%d, nItMax=%d", rid,
                    nvox, len(Onsets), self.nItMax)

        self.contrasts.pop('dummy_example', None)
        cNames = roiData.paradigm.get_stimulus_names()
        graph = roiData.get_graph()

        t_start = time()

        if self.fast:
            logger.info("fast VEM with drift estimation"+
                        ("and a constraint"*self.constrained))
            (nb_iter, nrls_mean, hrf_mean, hrf_covar, labels_proba, noise_var,
             nrls_class_mean, nrls_class_var, beta, drift_coeffs, drift,
             contrasts_mean, contrasts_var, _, _, nrls_covar, _, density_ratio,
             density_ratio_cano, density_ratio_diff, density_ratio_prod,
             ppm_a_nrl, ppm_g_nrl, ppm_a_contrasts, ppm_g_contrasts,
             variation_coeff, free_energy, free_energy_crit, beta_list,
             delay_of_response, delay_of_undershoot, dispersion_of_response,
             dispersion_of_undershoot, ratio_resp_under, delay) = jde_vem_bold(
                 graph, data, Onsets, durations, self.hrfDuration, self.nbClasses,
                 TR, self.beta, self.dt, self.estimateSigmaH, self.sigmaH, self.nItMax,
                 self.nItMin, self.estimateBeta, self.contrasts,
                 self.computeContrast, self.hyper_prior_sigma_H, self.estimateHRF,
                 constrained=self.constrained, zero_constraint=self.zero_constraint,
                 drifts_type=self.drifts_type
             )
        else:
            # if not self.fast
            if self.estimateDrifts:
                logger.info("not fast VEM")
                logger.info("NOT WORKING")
                nrls_mean, hrf_mean, \
                labels_proba, noise_var, nrls_class_mean, \
                nrls_class_var, beta, drift_coeffs, \
                drift = Main_vbjde_Python_constrained(graph,data,Onsets,
                                       self.hrfDuration,self.nbClasses,
                                       TR,beta,self.dt,scale,
                                       self.estimateSigmaH,self.sigmaH,
                                       self.nItMax,self.nItMin,
                                       self.estimateBeta,self.PLOT)

        # Plot analysis duration
        self.analysis_duration = time() - t_start
        logger.info('JDE VEM analysis took: %s',
                    format_duration(self.analysis_duration))


        if self.fast:
            ### OUTPUTS: Pack all outputs within a dict
            outputs = {}
            hrf_time = np.arange(len(hrf_mean)) * self.dt

            axes_names = ['iteration']
            """axes_domains = {'iteration':np.arange(FreeEnergy.shape[0])}
            outputs['FreeEnergy'] = xndarray(FreeEnergy,
                                        axes_names=axes_names,
                                        axes_domains=axes_domains)
            """
            outputs['hrf'] = xndarray(hrf_mean, axes_names=['time'],
                                      axes_domains={'time':hrf_time},
                                      value_label="HRF")

            domCondition = {'condition': cNames}
            outputs['nrls'] = xndarray(nrls_mean.transpose(), value_label="nrls",
                                       axes_names=['condition','voxel'],
                                       axes_domains=domCondition)

            ad = {'condition': cNames,'condition2': Onsets.keys()}

            outputs['Sigma_nrls'] = xndarray(nrls_covar, value_label="Sigma_NRLs",
                                             axes_names=['condition', 'condition2', 'voxel'],
                                             axes_domains=ad)

            outputs['nb_iter'] = xndarray(np.array([nb_iter]), value_label="nb_iter")

            outputs['beta'] = xndarray(beta, value_label="beta",
                                       axes_names=['condition'],
                                       axes_domains=domCondition)

            nbc, nbv = len(cNames), nrls_mean.shape[0]
            repeatedBeta = np.repeat(beta, nbv).reshape(nbc, nbv)
            outputs['beta_mapped'] = xndarray(repeatedBeta, value_label="beta",
                                              axes_names=['condition', 'voxel'],
                                              axes_domains=domCondition)

            repeated_hrf = np.repeat(hrf_mean, nbv).reshape(-1, nbv)
            outputs["hrf_mapped"] = xndarray(repeated_hrf, value_label="HRFs",
                                             axes_names=["time", "voxel"],
                                             axes_domains={"time": hrf_time})

            repeated_hrf_covar = np.repeat(np.diag(hrf_covar), nbv).reshape(-1, nbv)
            outputs["hrf_variance_mapped"] = xndarray(repeated_hrf_covar,
                                                      value_label="HRFs covariance",
                                                      axes_names=["time", "voxel"],
                                                      axes_domains={"time": hrf_time})

            outputs['roi_mask'] = xndarray(np.zeros(nbv)+roiData.get_roi_id(),
                                           value_label="ROI",
                                           axes_names=['voxel'])

            outputs["density_ratio"] = xndarray(np.zeros(nbv)+density_ratio,
                                                value_label="Density Ratio to zero",
                                                axes_names=["voxel"])

            outputs["density_ratio_cano"] = xndarray(np.zeros(nbv)+density_ratio_cano,
                                                     value_label="Density Ratio to canonical",
                                                     axes_names=["voxel"])

            outputs["density_ratio_diff"] = xndarray(np.zeros(nbv)+density_ratio_diff,
                                                     value_label="Density Ratio to canonical",
                                                     axes_names=["voxel"])

            outputs["density_ratio_prod"] = xndarray(np.zeros(nbv)+density_ratio_prod,
                                                     value_label="Density Ratio to canonical",
                                                     axes_names=["voxel"])

            outputs["variation_coeff"] = xndarray(np.zeros(nbv)+variation_coeff,
                                                  value_label="Coefficient of variation of the HRF",
                                                  axes_names=["voxel"])
            free_energy = np.concatenate((np.asarray(free_energy), np.zeros((self.nItMax - len(free_energy)))))
            free_energy[free_energy == 0.] = np.nan
            free_energy = np.repeat(free_energy, nbv).reshape(-1, nbv)
            outputs["free_energy"] = xndarray(free_energy,
                                              value_label="free energy",
                                              axes_names=["time", "voxel"])

            if self.estimateHRF:
                fitting_parameters = {
                    "hrf_fit_delay_of_response":  delay_of_response,
                    "hrf_fit_delay_of_undershoot":  delay_of_undershoot,
                    "hrf_fit_dispersion_of_response":  dispersion_of_response,
                    "hrf_fit_dispersion_of_undershoot":  dispersion_of_undershoot,
                    "hrf_fit_ratio_response_undershoot": ratio_resp_under,
                    "hrf_fit_delay": delay,
                }
                affine = np.eye(4)
                for param_name in fitting_parameters:
                    header = nibabel.Nifti1Header()
                    description = param_name[8:].replace("_", " ").capitalize()
                    outputs[param_name] = xndarray(
                        np.zeros(nbv)+fitting_parameters[param_name],
                        value_label=description + " of the fitted estimated HRF",
                        axes_names=["voxel"], meta_data=(affine, header)
                    )
                    outputs[param_name].meta_data[1]["descrip"] = description

            h = hrf_mean
            nrls_mean = nrls_mean.transpose()

            nvox = nrls_mean.shape[1]
            nbconds = nrls_mean.shape[0]
            ah = np.zeros((h.shape[0], nvox, nbconds))

            mixtp = np.zeros((roiData.nbConditions, self.nbClasses, 2))
            mixtp[:, :, 0] = nrls_class_mean
            mixtp[:, :, 1] = np.sqrt(nrls_class_var)

            an = ['condition', 'Act_class', 'component']
            ad = {'Act_class': ['inactiv', 'activ'],
                  'condition': cNames,
                  'component': ['mean', 'var']}
            outputs['mixt_p'] = xndarray(mixtp, axes_names=an, axes_domains=ad)

            ad = {'class': ['inactiv', 'activ'],
                  'condition': cNames}
            outputs['labels'] = xndarray(labels_proba, value_label="Labels",
                                         axes_names=['condition', 'class', 'voxel'],
                                         axes_domains=ad)
            outputs['noise_var'] = xndarray(noise_var,value_label="noise_var",
                                           axes_names=['voxel'])
            if self.estimateDrifts and self.output_drifts:
                outputs['drift_coeff'] = xndarray(drift_coeffs, value_label="Drift",
                                                  axes_names=['coeff', 'voxel'])
                outputs['drift'] = xndarray(drift, value_label="Delta BOLD",
                                            axes_names=['time', 'voxel'])

            affine = np.eye(4)
            for condition_nb, condition_name in enumerate(cNames):
                header = nibabel.Nifti1Header()
                outputs["ppm_a_nrl_"+condition_name] = xndarray(ppm_a_nrl[:, condition_nb],
                                                value_label="PPM NRL alpha fixed",
                                                axes_names=["voxel"],
                                                meta_data=(affine, header))
                outputs["ppm_a_nrl_"+condition_name].meta_data[1]["descrip"] = condition_name


                outputs["ppm_g_nrl_"+condition_name] = xndarray(ppm_g_nrl[:, condition_nb],
                                                                value_label="PPM NRL gamma fixed",
                                                                axes_names=["voxel"],
                                                                meta_data=(affine, header))
                outputs["ppm_g_nrl_"+condition_name].meta_data[1]["descrip"] = condition_name

            if (len(self.contrasts) > 0) and self.computeContrast:
                #keys = list((self.contrasts[nc]) for nc in self.contrasts)
                domContrast = {'contrast': self.contrasts.keys()}
                outputs['contrasts'] = xndarray(contrasts_mean, value_label="Contrast",
                                                axes_names=['voxel', 'contrast'],
                                                axes_domains=domContrast)
                #print 'contrast output:'
                #print outputs['contrasts'].descrip()

                c = xndarray(contrasts_var, value_label="Contrasts_Variance",
                             axes_names=['voxel', 'contrast'],
                             axes_domains=domContrast)
                outputs['contrasts_variance'] = c

                outputs['ncontrasts'] = xndarray(contrasts_mean/contrasts_var**.5,
                                                 value_label="Normalized Contrast",
                                                 axes_names=['voxel', 'contrast'],
                                                 axes_domains=domContrast)

                for i, contrast in enumerate(self.contrasts.keys()):
                    header = nibabel.Nifti1Header()
                    outputs["ppm_a_"+contrast] = xndarray(ppm_a_contrasts[:, i],
                                                          value_label="PPM Contrasts alpha fixed",
                                                          axes_names=["voxel"],
                                                          meta_data=(affine, header))
                    outputs["ppm_a_"+contrast].meta_data[1]["descrip"] = contrast

                    outputs["ppm_g_"+contrast] = xndarray(ppm_g_contrasts[:, i],
                                                          value_label="PPM Contrasts gamma fixed",
                                                          axes_names=["voxel"],
                                                          meta_data=(affine, header))
                    outputs["ppm_g_"+contrast].meta_data[1]["descrip"] = contrast


        ################################################################################
        # SIMULATION

        if self.simulation and self.fast:

            labels_vem_audio = roiData.simulation[0]['labels'][0]
            labels_vem_video = roiData.simulation[0]['labels'][1]

            M = labels_proba.shape[0]
            K = labels_proba.shape[1]
            J = labels_proba.shape[2]
            true_labels = np.zeros((K,J))
            true_labels[0,:] = np.reshape(labels_vem_audio,(J))
            true_labels[1,:] = np.reshape(labels_vem_video,(J))
            newlabels = np.reshape(labels_proba[:,1,:],(M,J))
            se = []
            sp = []
            size = np.prod(labels_proba.shape)

            for i in xrange(0,M):
                se0,sp0, auc = roc_curve(newlabels[i,:].tolist(),
                                         true_labels[i,:].tolist())
                se.append(se0)
                sp.append(sp0)
                size = min(size,len(sp0))
            SE = np.zeros((M,size),dtype=float)
            SP = np.zeros((M,size),dtype=float)
            for i in xrange(0,M):
                tmp = np.array(se[i])
                SE[i,:] = tmp[0:size]
                tmp = np.array(sp[i])
                SP[i,:] = tmp[0:size]

            sensData, specData = SE, SP
            axes_names = ['condition','1-specificity']
            outName = 'ROC_audio'
            ad = {'1-specificity':specData[0],'condition':cNames}
            outputs[outName] = xndarray(sensData, axes_names=axes_names,
                                      axes_domains=ad,
                                      value_label='sensitivity')

            m = specData[0].min()
            import matplotlib.font_manager as fm
            figure(200)
            plot(sensData[0],specData[0],'--',color='k',linewidth=2.0,label='m=1')
            hold(True)
            plot(sensData[1],specData[1],color='k',linewidth=2.0,label='m=2')
            #legend(('audio','video'))
            xticks(color = 'k', size = 14,fontweight='bold')
            yticks(color = 'k', size = 14,fontweight='bold')
            #xlabel('1 - Specificity',fontsize=16,fontweight='bold')
            #ylabel('Sensitivity',fontsize=16,fontweight='bold')
            prop = fm.FontProperties(size=14,weight='bold')
            legend(loc=1,prop=prop)
            axis([0., 1., m, 1.02])


            if hasattr(roiData.simulation, 'nrls'):
                true_labels = roiData.simulation.nrls.labels
                true_nrls = roiData.simulation.nrls.data
            elif isinstance(roiData.simulation, dict) and \
                    roiData.simulation.has_key('labels') and \
                    roiData.simulation.has_key('nrls') :
                true_labels = roiData.simulation['labels']
                true_nrls = roiData.simulation['nrls']
            else:
                raise Exception('Simulation can not be retrieved from %s' \
                                    %str(roiData.simulation))

            domCondition = {'condition':cNames}
            outputs['Truenrls'] = xndarray(true_nrls,value_label="True_nrls",
                                         axes_names=['condition','voxel'],
                                         axes_domains=domCondition)
            M = labels_proba.shape[0]
            K = labels_proba.shape[1]
            J = labels_proba.shape[2]

            newlabels = np.reshape(labels_proba[:,1,:],(M,J))

            for i in xrange(0,M):
                se0,sp0, auc = roc_curve(newlabels[i,:].tolist(),
                                         true_labels[i,:].tolist())
                se.append(se0)
                sp.append(sp0)
                size = min(size,len(sp0))
            SE = np.zeros((M,size),dtype=float)
            SP = np.zeros((M,size),dtype=float)
            for i in xrange(0,M):
                tmp = np.array(se[i])
                SE[i,:] = tmp[0:size]
                tmp = np.array(sp[i])
                SP[i,:] = tmp[0:size]

        # END SIMULATION
        ##########################################################################
        if self.fast:
            d = {'parcel_size': np.array([nvox])}
            outputs['analysis_duration'] = xndarray(np.array([self.analysis_duration]),
                                                    axes_names=['parcel_size'],
                                                    axes_domains=d)

        return outputs
コード例 #21
0
ファイル: vem_tools.py プロジェクト: ainafp/pyhrf
def MiniVEM_CompMod(Thrf, TR, dt, beta, Y, K, gamma, gradientStep, MaxItGrad, D, M, N, J, S, maxNeighbours, neighboursIndexes, XX, X, R, Det_invR, Gamma, Det_Gamma, p_Wtilde, scale, Q_barnCond, XGamma, tau1, tau2, Nit, sigmaH, estimateHRF):

    # print 'InitVar =',InitVar,',    InitMean =',InitMean,',     gamma_h
    # =',gamma_h

    Init_sigmaH = sigmaH

    IM_val = np.array([-5., 5.])
    IV_val = np.array([0.008, 0.016, 0.032, 0.064, 0.128, 0.256, 0.512])
    #IV_val = np.array([0.01,0.05,0.1,0.5])
    gammah_val = np.array([1000])
    MiniVemStep = IM_val.shape[0] * IV_val.shape[0] * gammah_val.shape[0]

    Init_mixt_p_gammah = []

    logger.info("Number of tested initialisation is %s", MiniVemStep)

    t1_MiniVEM = time.time()
    FE = []
    for Gh in gammah_val:
        for InitVar in IV_val:
            for InitMean in IM_val:
                Init_mixt_p_gammah += [[InitVar, InitMean, Gh]]
                sigmaH = Init_sigmaH
                sigma_epsilone = np.ones(J)
                if 0:
                    logger.info(
                        "Labels are initialized by setting active probabilities to zeros ...")
                    q_Z = np.ones((M, K, J), dtype=np.float64)
                    q_Z[:, 1, :] = 0
                if 0:
                    logger.info("Labels are initialized randomly ...")
                    q_Z = np.zeros((M, K, J), dtype=np.float64)
                    nbVoxInClass = J / K
                    for j in xrange(M):
                        if J % 2 == 0:
                            l = []
                        else:
                            l = [0]
                        for c in xrange(K):
                            l += [c] * nbVoxInClass
                        q_Z[j, 0, :] = np.random.permutation(l)
                        q_Z[j, 1, :] = 1. - q_Z[j, 0, :]
                if 1:
                    logger.info(
                        "Labels are initialized by setting active probabilities to ones ...")
                    q_Z = np.zeros((M, K, J), dtype=np.float64)
                    q_Z[:, 1, :] = 1

                # TT,m_h = getCanoHRF(Thrf-dt,dt) #TODO: check
                TT, m_h = getCanoHRF(Thrf, dt)  # TODO: check
                m_h = m_h[:D]
                m_H = np.array(m_h).astype(np.float64)
                if estimateHRF:
                    Sigma_H = np.ones((D, D), dtype=np.float64)
                else:
                    Sigma_H = np.zeros((D, D), dtype=np.float64)

                Beta = beta * np.ones((M), dtype=np.float64)
                P = PolyMat(N, 4, TR)
                L = polyFit(Y, TR, 4, P)
                PL = np.dot(P, L)
                y_tilde = Y - PL
                Ndrift = L.shape[0]

                gamma_h = Gh
                sigma_M = np.ones((M, K), dtype=np.float64)
                sigma_M[:, 0] = 0.1
                sigma_M[:, 1] = 1.0
                mu_M = np.zeros((M, K), dtype=np.float64)
                for k in xrange(1, K):
                    mu_M[:, k] = InitMean
                Sigma_A = np.zeros((M, M, J), np.float64)
                for j in xrange(0, J):
                    Sigma_A[:, :, j] = 0.01 * np.identity(M)
                m_A = np.zeros((J, M), dtype=np.float64)
                for j in xrange(0, J):
                    for m in xrange(0, M):
                        for k in xrange(0, K):
                            m_A[j, m] += np.random.normal(
                                mu_M[m, k], np.sqrt(sigma_M[m, k])) * q_Z[m, k, j]

                for ni in xrange(0, Nit + 1):
                    logger.info("------------------------------ Iteration n° " +
                                str(ni + 1) + " ------------------------------")
                    UtilsC.expectation_A(q_Z, mu_M, sigma_M, PL, sigma_epsilone, Gamma,
                                         Sigma_H, Y, y_tilde, m_A, m_H, Sigma_A, XX.astype(int32), J, D, M, N, K)
                    val = np.reshape(m_A, (M * J))
                    val[np.where((val <= 1e-50) & (val > 0.0))] = 0.0
                    val[np.where((val >= -1e-50) & (val < 0.0))] = 0.0
                    m_A = np.reshape(val, (J, M))

                    if estimateHRF:
                        UtilsC.expectation_H(XGamma, Q_barnCond, sigma_epsilone, Gamma, R, Sigma_H, Y, y_tilde, m_A, m_H, Sigma_A, XX.astype(
                            int32), J, D, M, N, scale, sigmaH)
                        m_H[0] = 0
                        m_H[-1] = 0

                    UtilsC.expectation_Z_ParsiMod_3(
                        Sigma_A, m_A, sigma_M, Beta, p_Wtilde, mu_M, q_Z, neighboursIndexes.astype(int32), M, J, K, maxNeighbours)
                    val = np.reshape(q_Z, (M * K * J))
                    val[np.where((val <= 1e-50) & (val > 0.0))] = 0.0
                    q_Z = np.reshape(val, (M, K, J))

                    if estimateHRF:
                        if gamma_h > 0:
                            sigmaH = maximization_sigmaH_prior(
                                D, Sigma_H, R, m_H, gamma_h)
                        else:
                            sigmaH = maximization_sigmaH(D, Sigma_H, R, m_H)
                    mu_M, sigma_M = maximization_mu_sigma(
                        mu_M, sigma_M, q_Z, m_A, K, M, Sigma_A)
                    UtilsC.maximization_L(
                        Y, m_A, m_H, L, P, XX.astype(int32), J, D, M, Ndrift, N)
                    PL = np.dot(P, L)
                    y_tilde = Y - PL
                    for m in xrange(0, M):
                        Beta[m] = UtilsC.maximization_beta(beta, q_Z[m, :, :].astype(float64), q_Z[m, :, :].astype(
                            float64), J, K, neighboursIndexes.astype(int32), gamma, maxNeighbours, MaxItGrad, gradientStep)
                    UtilsC.maximization_sigma_noise(
                        Gamma, PL, sigma_epsilone, Sigma_H, Y, m_A, m_H, Sigma_A, XX.astype(int32), J, D, M, N)

                FreeEnergy = Compute_FreeEnergy(y_tilde, m_A, Sigma_A, mu_M, sigma_M, m_H, Sigma_H, R, Det_invR, sigmaH, p_Wtilde, tau1,
                                                tau2, q_Z, neighboursIndexes, maxNeighbours, Beta, sigma_epsilone, XX, Gamma, Det_Gamma, XGamma, J, D, M, N, K, S, "CompMod")
                FE += [FreeEnergy]

    max_FE, max_FE_ind = maximum(FE)
    InitVar = Init_mixt_p_gammah[max_FE_ind][0]
    InitMean = Init_mixt_p_gammah[max_FE_ind][1]
    Initgamma_h = Init_mixt_p_gammah[max_FE_ind][2]

    t2_MiniVEM = time.time()
    logger.info(
        "MiniVEM duration is %s", format_duration(t2_MiniVEM - t1_MiniVEM))
    logger.info("Choosed initialisation is : var = %s,  mean = %s,  gamma_h = %s",
                InitVar, InitMean, Initgamma_h)

    return InitVar, InitMean, Initgamma_h
コード例 #22
0
ファイル: treatment.py プロジェクト: zddzxxsmile/pyhrf
def run_pyhrf_cmd_treatment(cfg_cmd, exec_cmd, default_cfg_file,
                            default_profile_file, label_for_cluster):

    usage = 'usage: %%prog [options]'

    description = 'Manage a joint detection-estimation treatment of fMRI data.' \
        'This command runs the treatment defined in an xml '\
        'parameter file. See pyhrf_jde_buildcfg command to build a'\
        'template of such a file. If no xml file found, then runs a '\
        'default example analysis.'

    parser = OptionParser(usage=usage, description=description)

    parser.add_option('-c',
                      '--input-cfg-file',
                      metavar='XMLFILE',
                      dest='cfgFile',
                      default=default_cfg_file,
                      help='Configuration file: XML file containing parameters'
                      ' defining input data and analysis to perform.')

    parser.add_option('-r',
                      '--roi-data',
                      metavar='PICKLEFILE',
                      dest='roidata',
                      default=None,
                      help='Input fMRI ROI data. The data '
                      'definition part in the config file is ignored.')

    parser.add_option('-t',
                      '--treatment_pck',
                      metavar='PICKLEFILE',
                      dest='treatment_pck',
                      default=None,
                      help='Input treatment as a pickle dump.'
                      'The XML cfg file is ignored')

    parser.add_option('-s',
                      '--stop-on-error',
                      dest='stop_on_error',
                      action='store_true',
                      default=False,
                      help='For debug: do not continue if error'
                      ' during one ROI analysis')

    parser.add_option('-v',
                      '--verbose',
                      dest='verbose',
                      metavar='INTEGER',
                      type='int',
                      default=0,
                      help=pformat(pyhrf.verbose_levels))

    parser.add_option(
        '-p',
        '--profile',
        action='store_true',
        default=False,
        help='Enable profiling of treatment. Store profile data in '
        '%s. NOTE: not avalaible in parallel mode.' % default_profile_file)

    parallel_choices = ['LAN', 'local', 'cluster']
    parser.add_option('-x',
                      '--parallel',
                      choices=parallel_choices,
                      help='Parallel processing. Choices are %s' %
                      string.join(parallel_choices, ', '))

    (options, args) = parser.parse_args()

    # pyhrf.verbose.set_verbosity(options.verbose)
    pyhrf.logger.setLevel(options.verbose)

    t0 = time.time()

    if options.treatment_pck is not None:
        f = open(options.treatment_pck)
        treatment = cPickle.load(f)
        f.close()
    else:
        if not os.path.exists(options.cfgFile):
            print 'Error: could not find default configuration file "%s"\n'\
                'Consider running "%s" to generate it.' \
                % (options.cfgFile, cfg_cmd)
            sys.exit(1)
        else:
            logger.info('Loading configuration from: "%s" ...',
                        options.cfgFile)
            f = open(options.cfgFile, 'r')
            sXml = string.join(f.readlines())
            f.close()
            treatment = xmlio.from_xml(sXml)
            if 0:
                sXml = xmlio.to_xml(treatment)
                f = './treatment_cmd.xml'
                fOut = open(f, 'w')
                fOut.write(sXml)
                fOut.close()

    treatment.analyser.set_pass_errors(not options.stop_on_error)

    if options.parallel is not None:

        treatment.run(parallel=options.parallel)

    else:
        if options.roidata is not None:
            logger.info('Loading ROI data from: "%s" ...', options.roidata)

            roidata = cPickle.load(open(options.roidata))
            roidata.verbosity = logger.getEffectiveLevel()
            if logger.getEffectiveLevel() <= logging.INFO:
                print roidata.getSummary()
            # TODO: enable profiling
            logger.info('Launching analysis ...')
            if options.profile:
                cProfile.runctx("result = treatment.analyser(roidata)",
                                globals(), {
                                    'treatment': treatment,
                                    'roidata': roidata
                                }, default_profile_file)
            else:
                result = treatment.analyser(roidata)
            outPath = op.dirname(op.abspath(options.roidata))
            fOut = op.join(outPath, "result_%04d.pck" % roidata.get_roi_id())
            logger.info('Dumping results to %s ...', fOut)
            f = open(fOut, 'w')
            cPickle.dump(result, f)
            f.close()
        else:
            logger.info('ROI data is none')
            if options.profile:
                cProfile.runctx("treatment.run()", globals(),
                                {'treatment': treatment}, default_profile_file)
            else:
                treatment.run()

    logger.info('Estimation done, took %s', format_duration(time.time() - t0))