Exemplo n.º 1
0
def remote_dir_is_writable(user, hosts, path):
    """
    Test if *path* is writable from each host in *hosts*. Sending bash
    commands to each host via ssh using the given *user* login.

    Args:

    """
    import os.path as op
    import pyhrf

    mode = 'dispatch'
    cmds = ['bash -c "echo -n \"[write_test]:%d:\"; '\
            'if [ -w %s ]; then echo \"OK\"; else echo \"NOTOK\"; fi;"' \
            %(ih, path) for ih in range(len(hosts))]
    tasks = read_tasks(cmds, mode)
    timeslot = read_timeslot('allday')

    tmp_dir = pyhrf.get_tmp_path()
    logfile = op.join(tmp_dir, 'pyhrf.log')
    run_grid(mode, hosts, 'rsa', tasks, timeslot, logfile=logfile, user=user)
    kill_threads()
    log = open(logfile).readlines()

    res = [False] * len(hosts)
    for line in log:
        if line.startswith('[write_test]'):
            #print line
            _, ih, r = line.strip('\n').split(':')
            res[int(ih)] = ('OK' in r)
    os.remove(logfile)
    return res
Exemplo n.º 2
0
    def setUp(self):

        pyhrf.verbose.set_verbosity(0)

        np.random.seed(8652761)

        self.tmp_dir = pyhrf.get_tmp_path()
Exemplo n.º 3
0
    def _test_spm_option_parse(self, spm_ver):
        """
        Test parsing of option "-s SPM.mat" with given SPM version (int)
        """
        spm_file = op.join(pyhrf.get_tmp_path(), 'SPM.mat')
        tools._io._zip.gunzip(pyhrf.get_data_file_name('SPM_v%d.mat.gz' % spm_ver),
                              outFileName=spm_file)

        options = ['-s', spm_file]
        from optparse import OptionParser
        parser = OptionParser()
        ptr.append_common_treatment_options(parser)

        fd = ptr.parse_data_options(parser.parse_args(options)[0])

        self.assertEqual(fd.tr, 2.4)  # nb sessions
        p = fd.paradigm
        # nb sessions
        self.assertEqual(len(p.stimOnsets[p.stimOnsets.keys()[0]]), 2)
        npt.assert_almost_equal(p.stimOnsets['audio'][0],
                                ppar.onsets_loc_av['audio'][0])
        npt.assert_almost_equal(p.stimOnsets['audio'][1],
                                ppar.onsets_loc_av['audio'][0])
        npt.assert_almost_equal(p.stimOnsets['video'][1],
                                ppar.onsets_loc_av['video'][0])
Exemplo n.º 4
0
Arquivo: grid.py Projeto: Solvi/pyhrf
def remote_dir_is_writable(user, hosts, path):
    """
    Test if *path* is writable from each host in *hosts*. Sending bash
    commands to each host via ssh using the given *user* login.

    Args:

    """
    import os.path as op
    import pyhrf

    mode = 'dispatch'
    cmds = ['bash -c "echo -n \"[write_test]:%d:\"; '\
            'if [ -w %s ]; then echo \"OK\"; else echo \"NOTOK\"; fi;"' \
            %(ih, path) for ih in range(len(hosts))]
    tasks = read_tasks(cmds, mode)
    timeslot = read_timeslot('allday')

    tmp_dir = pyhrf.get_tmp_path()
    brokenfile = op.join(tmp_dir, 'pyhrf-broken_cmd.batch')
    logfile = op.join(tmp_dir, 'pyhrf-parallel.log')
    run_grid(mode, hosts, 'rsa', tasks, timeslot, brokenfile,
             logfile, user=user)
    kill_threads()

    log = open(logfile).readlines()

    res = [False] * len(hosts)
    for line in log:
        if line.startswith('[write_test]'):
            #print line
            _,ih,r = line.strip('\n').split(':')
            res[int(ih)] = ('OK' in r)

    return res
Exemplo n.º 5
0
    def setUp(self):
        np.random.seed(8652761)

        self.tmp_dir = pyhrf.get_tmp_path()
        self.clean_tmp = True

        self.sampler_params_for_single_test = {
            'nb_iterations':
            40,
            'smpl_hist_pace':
            1,
            'obs_hist_pace':
            1,
            'brf':
            jasl.PhysioBOLDResponseSampler(do_sampling=False,
                                           normalise=1.,
                                           use_true_value=True,
                                           zero_constraint=False),
            'brf_var':
            jasl.PhysioBOLDResponseVarianceSampler(do_sampling=False,
                                                   val_ini=np.array([0.1]),
                                                   use_true_value=False),
            'prf':
            jasl.PhysioPerfResponseSampler(
                do_sampling=False,
                normalise=1.,
                use_true_value=True,
                zero_constraint=False,
                prior_type='physio_stochastic_regularized'),
            'prf_var':
            jasl.PhysioPerfResponseVarianceSampler(do_sampling=False,
                                                   val_ini=np.array([.001]),
                                                   use_true_value=False),
            'noise_var':
            jasl.NoiseVarianceSampler(do_sampling=False, use_true_value=True),
            'drift_var':
            jasl.DriftVarianceSampler(do_sampling=False, use_true_value=True),
            'drift':
            jasl.DriftCoeffSampler(do_sampling=False, use_true_value=True),
            'bold_response_levels':
            jasl.BOLDResponseLevelSampler(do_sampling=False,
                                          use_true_value=True),
            'perf_response_levels':
            jasl.PerfResponseLevelSampler(do_sampling=False,
                                          use_true_value=True),
            'labels':
            jasl.LabelSampler(do_sampling=False, use_true_value=True),
            'bold_mixt_params':
            jasl.BOLDMixtureSampler(do_sampling=False, use_true_value=True),
            'perf_mixt_params':
            jasl.PerfMixtureSampler(do_sampling=False, use_true_value=True),
            'perf_baseline':
            jasl.PerfBaselineSampler(do_sampling=False, use_true_value=True),
            'perf_baseline_var':
            jasl.PerfBaselineVarianceSampler(do_sampling=False,
                                             use_true_value=True),
            'check_final_value':
            'raise',  # print or raise
        }
Exemplo n.º 6
0
    def setUp(self):

        self.tmp_dir = pyhrf.get_tmp_path()

        simu = simulate_sessions(output_dir=self.tmp_dir,
                                 snr_scenario='high_snr',
                                 spatial_size='random_small')
        self.data_simu = merge_fmri_sessions(simu)
Exemplo n.º 7
0
    def setUp(self):
        cache_dir = tempfile.mkdtemp(prefix='pyhrf_validate',
                                     dir=pyhrf.cfg['global']['tmp_path'])
        mem = Memory(cache_dir)
        np.random.seed(8652761)

        self.tmp_dir = pyhrf.get_tmp_path()
        self.clean_tmp = True
Exemplo n.º 8
0
    def setUp(self):

        # pyhrf.verbose.set_verbosity(0)
        pyhrf.logger.setLevel(logging.WARNING)

        np.random.seed(8652761)

        self.tmp_dir = pyhrf.get_tmp_path()
Exemplo n.º 9
0
 def _test_load_regnames(self, spm_ver):
     spm_file = op.join(pyhrf.get_tmp_path(), 'SPM.mat')
     pio._zip.gunzip(pyhrf.get_data_file_name('SPM_v%d.mat.gz' % spm_ver),
                     outFileName=spm_file)
     expected = ['Sn(1) audio*bf(1)', 'Sn(1) video*bf(1)',
                 'Sn(2) audio*bf(1)', 'Sn(2) video*bf(1)',
                 'Sn(1) constant', 'Sn(2) constant']
     self.assertEqual(pio.spmio.load_regnames(spm_file), expected)
Exemplo n.º 10
0
    def setUp(self):
        np.random.seed(8652761)

        self.simu_dir = pyhrf.get_tmp_path()

        # Parameters to setup a Sampler where all samplers are OFF and
        # set to their true values.
        # This is used by the function _test_specific_samplers,
        # which will turn on specific samplers to test.

        self.sampler_params_for_single_test = {
            'nb_iterations': 40,
            'smpl_hist_pace': -1,
            'obs_hist_pace': -1,
            # HRF by subject
            'hrf_subj': jms.HRF_Sampler(do_sampling=False,
                                        normalise=1.,
                                        use_true_value=True,
                                        zero_contraint=False,
                                        prior_type='singleHRF'),

            # HRF variance
            'hrf_var_subj': jms.HRFVarianceSubjectSampler(do_sampling=False,
                                                          use_true_value=True),
            # HRF group
            'hrf_group': jms.HRF_Group_Sampler(do_sampling=False,
                                               normalise=1.,
                                               use_true_value=True,
                                               zero_contraint=False,
                                               prior_type='singleHRF'),
            # HRF variance
            'hrf_var_group': jms.RHGroupSampler(do_sampling=False,
                                                use_true_value=True),

            # neural response levels (stimulus-induced effects) by subject
            'response_levels': jms.NRLs_Sampler(do_sampling=False,
                                                use_true_value=True),
            'labels': jms.LabelSampler(do_sampling=False,
                                       use_true_value=True),
            # drift
            'drift': jms.Drift_MultiSubj_Sampler(do_sampling=False,
                                                 use_true_value=True),
            # drift variance
            'drift_var': jms.ETASampler_MultiSubj(do_sampling=False,
                                                  use_true_value=True),
            # noise variance
            'noise_var':
            jms.NoiseVariance_Drift_MultiSubj_Sampler(do_sampling=False,
                                                      use_true_value=False),
            # weights o fthe mixture
            # parameters of the mixture
            'mixt_params': jms.MixtureParamsSampler(do_sampling=False,
                                                    use_true_value=False),
            #'alpha_subj' : Alpha_hgroup_Sampler(dict_alpha_single),
            #'alpha_var_subj' : AlphaVar_Sampler(dict_alpha_var_single),
            'check_final_value': 'none',  # print or raise
        }
Exemplo n.º 11
0
    def setUp(self):
        tag = "subj0_%s.nii.gz"
        self.func_file = pyhrf.get_data_file_name(tag % "bold_session0")
        self.anatomy_file = pyhrf.get_data_file_name(tag % "anatomy")
        self.roi_mask_file = pyhrf.get_data_file_name(tag % "parcellation")

        self.ax_slice = 24
        self.sag_slice = 7
        self.cor_slice = 34

        self.tmp_dir = pyhrf.get_tmp_path()  #'./'
Exemplo n.º 12
0
    def setUp(self):
        tag = 'subj0_%s.nii.gz'
        self.func_file = pyhrf.get_data_file_name(tag%'bold_session0')
        self.anatomy_file = pyhrf.get_data_file_name(tag%'anatomy')
        self.roi_mask_file = pyhrf.get_data_file_name(tag%'parcellation')

        self.ax_slice = 24
        self.sag_slice = 7
        self.cor_slice = 34

        self.tmp_dir = pyhrf.get_tmp_path() #'./'
Exemplo n.º 13
0
    def setUp(self):
        tag = 'subj0_%s.nii.gz'
        self.func_file = pyhrf.get_data_file_name(tag % 'bold_session0')
        self.anatomy_file = pyhrf.get_data_file_name(tag % 'anatomy')
        self.roi_mask_file = pyhrf.get_data_file_name(tag % 'parcellation')

        self.ax_slice = 24
        self.sag_slice = 7
        self.cor_slice = 34

        self.tmp_dir = pyhrf.get_tmp_path()
Exemplo n.º 14
0
    def setUp(self):
        np.random.seed(8652761)

        self.tmp_dir = pyhrf.get_tmp_path()
        self.clean_tmp = True

        self.sampler_params_for_single_test = {
            'nb_iterations': 40,
            'smpl_hist_pace': 1,
            'obs_hist_pace': 1,
            'brf': jasl.PhysioBOLDResponseSampler(do_sampling=False,
                                                  normalise=1.,
                                                  use_true_value=True,
                                                  zero_constraint=False),
            'brf_var':
            jasl.PhysioBOLDResponseVarianceSampler(do_sampling=False,
                                                   val_ini=np.array([0.1]),
                                                   use_true_value=False),
            'prf': jasl.PhysioPerfResponseSampler(do_sampling=False,
                                                  normalise=1.,
                                                  use_true_value=True,
                                                  zero_constraint=False,
                                                  prior_type='physio_stochastic_regularized'),
            'prf_var':
            jasl.PhysioPerfResponseVarianceSampler(do_sampling=False,
                                                   val_ini=np.array(
                                                       [.001]),
                                                   use_true_value=False),
            'noise_var': jasl.NoiseVarianceSampler(do_sampling=False,
                                                   use_true_value=True),
            'drift_var': jasl.DriftVarianceSampler(do_sampling=False,
                                                   use_true_value=True),
            'drift': jasl.DriftCoeffSampler(do_sampling=False,
                                            use_true_value=True),
            'bold_response_levels':
            jasl.BOLDResponseLevelSampler(do_sampling=False,
                                          use_true_value=True),
            'perf_response_levels':
            jasl.PerfResponseLevelSampler(do_sampling=False,
                                          use_true_value=True),
            'labels': jasl.LabelSampler(do_sampling=False,
                                        use_true_value=True),
            'bold_mixt_params': jasl.BOLDMixtureSampler(do_sampling=False,
                                                        use_true_value=True),
            'perf_mixt_params': jasl.PerfMixtureSampler(do_sampling=False,
                                                        use_true_value=True),
            'perf_baseline': jasl.PerfBaselineSampler(do_sampling=False,
                                                      use_true_value=True),
            'perf_baseline_var':
            jasl.PerfBaselineVarianceSampler(do_sampling=False,
                                             use_true_value=True),
            'check_final_value': 'raise',  # print or raise
        }
Exemplo n.º 15
0
    def dump_roi_datasets(self, dry=False, output_dir=None):
        pyhrf.verbose(1,'Loading data ...')
        # if no file to dump (dry), assume it's only to get file names,
        # then don't build the graph (could take some time ...)
        if not dry:
            self.data.build_graphs()

        explData = self.analyser.split_data(self.data)
        files = []
        roiIds = []
        if output_dir is not None:
            roi_data_out_dir = output_dir
        else:
            if self.output_dir is not None:
                roi_data_out_dir = op.join(self.output_dir, 'ROI_datasets')
            else:
                roi_data_out_dir = op.join(pyhrf.get_tmp_path(), 'ROI_datasets')
            if not op.exists(roi_data_out_dir): os.makedirs(roi_data_out_dir)

        assert op.exists(roi_data_out_dir)

        if not dry:
            pyhrf.verbose(1,'Dump roi data in dir %s' %roi_data_out_dir)


        #data_order = sorted([d.get_nb_vox_in_mask() for d in explData])
        pyhrf.verbose(1,'Dump of roi data, ordering by size ...')
        cmp_size = lambda e1,e2:cmp(e1.get_nb_vox_in_mask(),
                                    e2.get_nb_vox_in_mask())
        for edata in sorted(explData, cmp=cmp_size, reverse=True):
            roiId = edata.get_roi_id()
            fn = op.abspath(op.join(roi_data_out_dir,
                                    "roidata_%04d.pck" %roiId))
            roiIds.append(roiId)
            if not dry:
                f = open(fn ,'w')
                cPickle.dump(edata, f)
                f.close()
            files.append(fn)

        pyhrf.verbose(1,'Dump of roi data done.')

        return files, roiIds
Exemplo n.º 16
0
    def dump_roi_datasets(self, dry=False, output_dir=None):
        logger.info('Loading data ...')
        # if no file to dump (dry), assume it's only to get file names,
        # then don't build the graph (could take some time ...)
        if not dry:
            self.data.build_graphs()

        explData = self.analyser.split_data(self.data)
        files = []
        roiIds = []
        if output_dir is not None:
            roi_data_out_dir = output_dir
        else:
            if self.output_dir is not None:
                roi_data_out_dir = op.join(self.output_dir, 'ROI_datasets')
            else:
                roi_data_out_dir = op.join(pyhrf.get_tmp_path(),
                                           'ROI_datasets')
            if not op.exists(roi_data_out_dir):
                os.makedirs(roi_data_out_dir)

        assert op.exists(roi_data_out_dir)

        if not dry:
            logger.info('Dump roi data in dir %s', roi_data_out_dir)

        logger.info('Dump of roi data, ordering by size ...')
        cmp_size = lambda e1, e2: cmp(e1.get_nb_vox_in_mask(),
                                      e2.get_nb_vox_in_mask())
        for edata in sorted(explData, cmp=cmp_size, reverse=True):
            roiId = edata.get_roi_id()
            fn = op.abspath(
                op.join(roi_data_out_dir, "roidata_%04d.pck" % roiId))
            roiIds.append(roiId)
            if not dry:
                f = open(fn, 'w')
                cPickle.dump(edata, f)
                f.close()
            files.append(fn)

        logger.info('Dump of roi data done.')

        return files, roiIds
Exemplo n.º 17
0
    def setUp(self):
        from pyhrf.ndarray import MRI3Daxes
        self.tmp_dir = pyhrf.get_tmp_path()

        self.p1 = np.array([[[1, 1, 1, 3],
                             [1, 1, 3, 3],
                             [0, 1, 2, 2],
                             [0, 2, 2, 2],
                             [0, 0, 2, 4]]], dtype=np.int32)

        self.p1_fn = op.join(self.tmp_dir, 'p1.nii')
        xndarray(self.p1, axes_names=MRI3Daxes).save(self.p1_fn)

        self.p2 = self.p1 * 4.5
        self.p2_fn = op.join(self.tmp_dir, 'p2.nii')
        xndarray(self.p2, axes_names=MRI3Daxes).save(self.p2_fn)

        self.mask = (self.p1 > 0).astype(np.int32)
        self.mask_fn = op.join(self.tmp_dir, 'mask.nii')
        xndarray(self.mask, axes_names=MRI3Daxes).save(self.mask_fn)
Exemplo n.º 18
0
def make_parcellation_cubed_blobs_from_file(parcellation_file,
                                            output_path,
                                            roi_ids=None,
                                            bg_parcel=0,
                                            skip_existing=False):

    p, mp = read_volume(parcellation_file)
    p = p.astype(np.int32)
    if bg_parcel == 0 and p.min() == -1:
        p += 1  # set background to 0

    if roi_ids is None:
        roi_ids = np.unique(p)

    logger.info('%d rois to extract', (len(roi_ids) - 1))

    tmp_dir = pyhrf.get_tmp_path('blob_parcellation')
    tmp_parcel_mask_file = op.join(tmp_dir, 'parcel_for_blob.nii')

    out_files = []
    for roi_id in roi_ids:
        if roi_id != bg_parcel:  # discard background
            output_blob_file = op.join(output_path,
                                       'parcel_%d_cubed_blob.arg' % roi_id)
            out_files.append(output_blob_file)
            if skip_existing and os.path.exists(output_blob_file):
                continue
            parcel_mask = (p == roi_id).astype(np.int32)
            write_volume(parcel_mask, tmp_parcel_mask_file, mp)
            logger.info('Extract ROI %d -> %s', roi_id, output_blob_file)
            cmd = 'AimsGraphConvert -i %s -o %s --bucket' \
                % (tmp_parcel_mask_file, output_blob_file)
            logger.info('Cmd: %s', cmd)
            os.system(cmd)
    if op.exists(tmp_parcel_mask_file):
        os.remove(tmp_parcel_mask_file)

    return out_files
Exemplo n.º 19
0
def make_parcellation_cubed_blobs_from_file(parcellation_file, output_path,
                                            roi_ids=None, bg_parcel=0,
                                            skip_existing=False):


    p,mp = read_volume(parcellation_file)
    p = p.astype(np.int32)
    if bg_parcel==0 and p.min() == -1:
        p += 1 #set background to 0

    if roi_ids is None:
        roi_ids = np.unique(p)

    pyhrf.verbose(1,'%d rois to extract' %(len(roi_ids)-1))

    tmp_dir = pyhrf.get_tmp_path('blob_parcellation')
    tmp_parcel_mask_file = op.join(tmp_dir, 'parcel_for_blob.nii')

    out_files = []
    for roi_id in roi_ids:
        if roi_id != bg_parcel: #discard background
            output_blob_file = op.join(output_path, 'parcel_%d_cubed_blob.arg'\
                                           %roi_id)
            out_files.append(output_blob_file)
            if skip_existing and os.path.exists(output_blob_file):
                continue
            parcel_mask = (p==roi_id).astype(np.int32)
            write_volume(parcel_mask, tmp_parcel_mask_file, mp)
            pyhrf.verbose(3,'Extract ROI %d -> %s' %(roi_id,output_blob_file))
            cmd = 'AimsGraphConvert -i %s -o %s --bucket' \
                %(tmp_parcel_mask_file, output_blob_file)
            pyhrf.verbose(3,'Cmd: %s' %(cmd))
            os.system(cmd)
    if op.exists(tmp_parcel_mask_file):
        os.remove(tmp_parcel_mask_file)

    return out_files
Exemplo n.º 20
0
 def setUp(self):
     self.tmp_dir = pyhrf.get_tmp_path()
     simu = simulate_sessions(output_dir=self.tmp_dir,
                              snr_scenario='high_snr',
                              spatial_size='random_small')
     self.data_simu = merge_fmri_sessions(simu)
Exemplo n.º 21
0
 def setUp(self):
     np.random.seed(8652761)
     self.tmp_dir = pyhrf.get_tmp_path()
Exemplo n.º 22
0
    def setUp(self):
        np.random.seed(8652761)

        self.simu_dir = pyhrf.get_tmp_path()

        # Parameters to setup a Sampler where all samplers are OFF and
        # set to their true values.
        # This is used by the function _test_specific_samplers,
        # which will turn on specific samplers to test.

        self.sampler_params_for_single_test = {
            'nb_iterations':
            40,
            'smpl_hist_pace':
            -1,
            'obs_hist_pace':
            -1,
            # HRF by subject
            'hrf_subj':
            jms.HRF_Sampler(do_sampling=False,
                            normalise=1.,
                            use_true_value=True,
                            zero_contraint=False,
                            prior_type='singleHRF'),

            # HRF variance
            'hrf_var_subj':
            jms.HRFVarianceSubjectSampler(do_sampling=False,
                                          use_true_value=True),
            # HRF group
            'hrf_group':
            jms.HRF_Group_Sampler(do_sampling=False,
                                  normalise=1.,
                                  use_true_value=True,
                                  zero_contraint=False,
                                  prior_type='singleHRF'),
            # HRF variance
            'hrf_var_group':
            jms.RHGroupSampler(do_sampling=False, use_true_value=True),

            # neural response levels (stimulus-induced effects) by subject
            'response_levels':
            jms.NRLs_Sampler(do_sampling=False, use_true_value=True),
            'labels':
            jms.LabelSampler(do_sampling=False, use_true_value=True),
            # drift
            'drift':
            jms.Drift_MultiSubj_Sampler(do_sampling=False,
                                        use_true_value=True),
            # drift variance
            'drift_var':
            jms.ETASampler_MultiSubj(do_sampling=False, use_true_value=True),
            # noise variance
            'noise_var':
            jms.NoiseVariance_Drift_MultiSubj_Sampler(do_sampling=False,
                                                      use_true_value=False),
            # weights o fthe mixture
            # parameters of the mixture
            'mixt_params':
            jms.MixtureParamsSampler(do_sampling=False, use_true_value=False),
            #'alpha_subj' : Alpha_hgroup_Sampler(dict_alpha_single),
            #'alpha_var_subj' : AlphaVar_Sampler(dict_alpha_var_single),
            'check_final_value':
            'none',  # print or raise
        }
Exemplo n.º 23
0
def project_fmri_from_kernels(input_mesh, kernels_file, fmri_data_file, output_tex, bin_threshold=None):

    logger.info("Project data onto mesh using kernels ...")

    if 0:
        print "Projecting ..."
        print "func data:", fmri_data_file
        print "Mesh file:", input_mesh
        print "Save as:", output_tex

    logger.info("Call AimsFunctionProjection -op 1 ...")

    data_files = []
    output_texs = []
    p_ids = None
    if bin_threshold is not None:
        d, h = read_volume(fmri_data_file)
        if np.allclose(d.astype(int), d):
            tmp_dir = pyhrf.get_tmp_path()
            p_ids = np.unique(d)
            logger.info("bin threshold: %f", bin_threshold)
            logger.info("pids(n=%d): %d...%d", len(p_ids), min(p_ids), max(p_ids))
            for i, p_id in enumerate(p_ids):
                if p_id != 0:
                    new_p = np.zeros_like(d)
                    new_p[np.where(d == p_id)] = i + 1  # 0 is background
                    ifn = op.join(tmp_dir, "pmask_%d.nii" % p_id)
                    write_volume(new_p, ifn, h)
                    data_files.append(ifn)
                    ofn = op.join(tmp_dir, "ptex_%d.gii" % p_id)
                    output_texs.append(ofn)
        else:
            data_files.append(fmri_data_file)
            output_texs.append(output_tex)
    else:
        data_files.append(fmri_data_file)
        output_texs.append(output_tex)

    logger.info("input data files: %s", str(data_files))
    logger.info("output data files: %s", str(output_texs))

    for data_file, o_tex in zip(data_files, output_texs):
        projection = [
            "AimsFunctionProjection",
            "-op",
            "1",
            "-d",
            kernels_file,
            "-d1",
            data_file,
            "-m",
            input_mesh,
            "-o",
            o_tex,
        ]

        cmd = " ".join(map(str, projection))
        logger.info("cmd: %s", cmd)
        os.system(cmd)

    if bin_threshold is not None:
        logger.info("Binary threshold of texture at %f", bin_threshold)
        o_tex = output_texs[0]
        data, data_gii = read_texture(o_tex)
        data = (data > bin_threshold).astype(np.int32)
        print "data:", data.dtype
        if p_ids is not None:
            for pid, o_tex in zip(p_ids[1:], output_texs[1:]):
                pdata, pdata_gii = read_texture(o_tex)
                data += (pdata > bin_threshold).astype(np.int32) * pid

        # assert (np.unique(data) == p_ids).all()
        write_texture(data, output_tex, intent="NIFTI_INTENT_LABEL")
Exemplo n.º 24
0
def project_fmri_from_kernels(input_mesh, kernels_file, fmri_data_file,
                              output_tex, bin_threshold=None, ):

    pyhrf.verbose(2,'Project data onto mesh using kernels ...')

    if 0:
        print 'Projecting ...'
        print 'func data:', fmri_data_file
        print 'Mesh file:', input_mesh
        print 'Save as:', output_tex

    pyhrf.verbose(2,'Call AimsFunctionProjection -op 1 ...')    

    data_files = []
    output_texs = []
    p_ids = None
    if bin_threshold is not None:
        d,h = read_volume(fmri_data_file)
        if np.allclose(d.astype(int), d):
            tmp_dir = pyhrf.get_tmp_path()
            p_ids = np.unique(d)
            pyhrf.verbose(2, 'bin threshold: %f' %bin_threshold)
            pyhrf.verbose(2, 'pids(n=%d): %d...%d' \
                              %(len(p_ids),min(p_ids),max(p_ids)))
            for i,p_id in enumerate(p_ids):
                if p_id != 0:
                    new_p = np.zeros_like(d)
                    new_p[np.where(d==p_id)] = i + 1 #0 is background
                    ifn = op.join(tmp_dir,'pmask_%d.nii'%p_id)
                    write_volume(new_p, ifn, h)
                    data_files.append(ifn)
                    ofn = op.join(tmp_dir,'ptex_%d.gii'%p_id)
                    output_texs.append(ofn)
        else:
            data_files.append(fmri_data_file)
            output_texs.append(output_tex)
    else:
        data_files.append(fmri_data_file)
        output_texs.append(output_tex)

    pyhrf.verbose(3, 'input data files: %s' %str(data_files))
    pyhrf.verbose(3, 'output data files: %s' %str(output_texs))

    for data_file, o_tex in zip(data_files, output_texs):
        projection = [ 
            'AimsFunctionProjection', 
            '-op', '1',
            '-d', kernels_file,
            '-d1', data_file,
            '-m', input_mesh,
            '-o', o_tex
            ]

        cmd = ' '.join(map(str,projection))
        pyhrf.verbose(3, 'cmd: %s' %cmd)
        os.system(cmd)

    if bin_threshold is not None:
        pyhrf.verbose(2, 'Binary threshold of texture at %f' %bin_threshold)
        o_tex = output_texs[0]
        data,data_gii = read_texture(o_tex)
        data = (data>bin_threshold).astype(np.int32)
        print 'data:', data.dtype
        if p_ids is not None:
            for pid, o_tex in zip(p_ids[1:], output_texs[1:]):
                pdata,pdata_gii = read_texture(o_tex)
                data += (pdata>bin_threshold).astype(np.int32) * pid

        #assert (np.unique(data) == p_ids).all()
        write_texture(data, output_tex, intent='NIFTI_INTENT_LABEL')
Exemplo n.º 25
0
    def setUp(self):

        np.random.seed(8652761)
        self.tmp_outputs = True  #save outputs in tmp dir
        #if False then save in current dir

        if not self.tmp_outputs:
            self.tmp_dir_small = './JDE_MS_test_small_simu'
            if not op.exists(self.tmp_dir_small):
                os.makedirs(self.tmp_dir_small)
            self.tmp_dir_big = './JDE_MS_test_big_simu'
            if not op.exists(self.tmp_dir_big): os.makedirs(self.tmp_dir_big)
        else:
            self.tmp_dir_small = pyhrf.get_tmp_path()
            self.tmp_dir_big = pyhrf.get_tmp_path()

        simu = simulate_sessions(output_dir=self.tmp_dir_small,
                                 snr_scenario='high_snr',
                                 spatial_size='tiny')
        self.data_small_simu = merge_fmri_sessions(simu)

        simu = simulate_sessions(output_dir=self.tmp_dir_big,
                                 snr_scenario='low_snr',
                                 spatial_size='normal')
        self.data_simu = merge_fmri_sessions(simu)

        # Parameters for multi-sessions sampler
        dict_beta_single = {
            BetaSampler.P_VAL_INI: np.array([0.5]),
            BetaSampler.P_SAMPLE_FLAG: False,
            BetaSampler.P_PARTITION_FUNCTION_METH: 'es',
            BetaSampler.P_USE_TRUE_VALUE: False,
        }

        dict_hrf_single = {
            HRF_MultiSess_Sampler.P_SAMPLE_FLAG: False,
            HRF_MultiSess_Sampler.P_NORMALISE: 1.,  # normalise samples
            HRF_MultiSess_Sampler.P_USE_TRUE_VALUE: True,
            HRF_MultiSess_Sampler.P_ZERO_CONSTR: True,
            #HRF_MultiSess_Sampler.P_PRIOR_TYPE : 'singleHRF',
        }

        dict_var_hrf_single = {
            RHSampler.P_SAMPLE_FLAG: False,
            RHSampler.P_VAL_INI: np.array([0.001]),
        }

        dict_nrl_sess_single = {
            NRL_Multi_Sess_Sampler.P_SAMPLE_FLAG: False,
            NRL_Multi_Sess_Sampler.P_USE_TRUE_VALUE: True,
        }

        dict_nrl_sess_var_single = {
            Variance_GaussianNRL_Multi_Sess.P_SAMPLE_FLAG: False,
            Variance_GaussianNRL_Multi_Sess.P_USE_TRUE_VALUE: True,
        }

        dict_nrl_bar_single = {
            NRLsBar_Drift_Multi_Sess_Sampler.P_SAMPLE_FLAG: False,
            NRLsBar_Drift_Multi_Sess_Sampler.P_USE_TRUE_NRLS: True,
            NRLsBar_Drift_Multi_Sess_Sampler.P_SAMPLE_LABELS: False,
            NRLsBar_Drift_Multi_Sess_Sampler.P_USE_TRUE_LABELS: True,
        }

        dict_drift_single = {
            Drift_MultiSess_Sampler.P_SAMPLE_FLAG: False,
            Drift_MultiSess_Sampler.P_USE_TRUE_VALUE: True,
        }

        dict_drift_var_single = {
            ETASampler_MultiSess.P_SAMPLE_FLAG: False,
            ETASampler_MultiSess.P_USE_TRUE_VALUE: True,
        }

        dict_noise_var_single = {
            NoiseVariance_Drift_Multi_Sess_Sampler.P_SAMPLE_FLAG: False,
            NoiseVariance_Drift_Multi_Sess_Sampler.P_USE_TRUE_VALUE: True,
        }

        dict_mixt_param_single = {
            BiGaussMixtureParamsSampler.P_SAMPLE_FLAG: False,
            BiGaussMixtureParamsSampler.P_USE_TRUE_VALUE: True,
            BiGaussMixtureParamsSampler.P_HYPER_PRIOR: 'Jeffrey',
        }

        self.sampler_params_for_single_test = {
            BMSS.P_NB_ITERATIONS:
            100,
            BMSS.P_SMPL_HIST_PACE:
            -1,
            BMSS.P_OBS_HIST_PACE:
            -1,
            # level of spatial correlation = beta
            BMSS.P_BETA:
            BetaSampler(dict_beta_single),
            # HRF
            BMSS.P_HRF:
            HRF_MultiSess_Sampler(dict_hrf_single),
            # HRF variance
            BMSS.P_RH:
            RHSampler(dict_var_hrf_single),
            # neural response levels (stimulus-induced effects) by session
            BMSS.P_NRLS_SESS:
            NRL_Multi_Sess_Sampler(dict_nrl_sess_single),
            # neural response levels by session --> variance
            BMSS.P_NRLS_SESS_VAR:
            Variance_GaussianNRL_Multi_Sess(dict_nrl_sess_var_single),
            # neural response levels mean: over sessions
            BMSS.P_NRLS_BAR:
            NRLsBar_Drift_Multi_Sess_Sampler(dict_nrl_bar_single),
            # drift
            BMSS.P_DRIFT:
            Drift_MultiSess_Sampler(dict_drift_single),
            #drift variance
            BMSS.P_ETA:
            ETASampler_MultiSess(dict_drift_var_single),
            #noise variance
            BMSS.P_NOISE_VAR_SESS:
            NoiseVariance_Drift_Multi_Sess_Sampler(dict_noise_var_single),
            #weights o fthe mixture
            #parameters of the mixture
            BMSS.P_MIXT_PARAM_NRLS_BAR:
            BiGaussMixtureParamsSampler(dict_mixt_param_single),
            BMSS.P_CHECK_FINAL_VALUE:
            'raise',  #print or raise
        }

        # Parameters for multi-sessions sampler - full test
        dict_beta_full = {
            BetaSampler.P_VAL_INI: np.array([0.5]),
            BetaSampler.P_SAMPLE_FLAG: True,
            BetaSampler.P_PARTITION_FUNCTION_METH: 'es',
        }

        dict_hrf_full = {
            HRF_MultiSess_Sampler.P_SAMPLE_FLAG: True,
            HRF_MultiSess_Sampler.P_NORMALISE: 1.,  # normalise samples
            HRF_MultiSess_Sampler.P_USE_TRUE_VALUE: False,
            HRF_MultiSess_Sampler.P_ZERO_CONSTR: True,
            #HRF_MultiSess_Sampler.P_PRIOR_TYPE : 'singleHRF',
        }

        dict_var_hrf_full = {
            RHSampler.P_SAMPLE_FLAG: False,
            RHSampler.P_VAL_INI: np.array([0.001]),
        }

        dict_nrl_sess_full = {
            NRL_Multi_Sess_Sampler.P_SAMPLE_FLAG: True,
            NRL_Multi_Sess_Sampler.P_USE_TRUE_VALUE: False,
        }

        dict_nrl_sess_var_full = {
            Variance_GaussianNRL_Multi_Sess.P_SAMPLE_FLAG: True,
            Variance_GaussianNRL_Multi_Sess.P_USE_TRUE_VALUE: False,
        }

        dict_nrl_bar_full = {
            NRLsBar_Drift_Multi_Sess_Sampler.P_SAMPLE_FLAG: True,
            NRLsBar_Drift_Multi_Sess_Sampler.P_USE_TRUE_NRLS: False,
            NRLsBar_Drift_Multi_Sess_Sampler.P_SAMPLE_LABELS: True,
            NRLsBar_Drift_Multi_Sess_Sampler.P_USE_TRUE_LABELS: False,
        }

        dict_drift_full = {
            Drift_MultiSess_Sampler.P_SAMPLE_FLAG: True,
            Drift_MultiSess_Sampler.P_USE_TRUE_VALUE: False,
        }

        dict_drift_var_full = {
            ETASampler_MultiSess.P_SAMPLE_FLAG: True,
            ETASampler_MultiSess.P_USE_TRUE_VALUE: False,
        }

        dict_noise_var_full = {
            NoiseVariance_Drift_Multi_Sess_Sampler.P_SAMPLE_FLAG: True,
            NoiseVariance_Drift_Multi_Sess_Sampler.P_USE_TRUE_VALUE: False,
        }

        dict_mixt_param_full = {
            BiGaussMixtureParamsSampler.P_SAMPLE_FLAG: True,
            BiGaussMixtureParamsSampler.P_USE_TRUE_VALUE: False,
            BiGaussMixtureParamsSampler.P_HYPER_PRIOR: 'Jeffrey',
        }

        self.sampler_params_for_full_test = {
            BMSS.P_NB_ITERATIONS:
            400,
            BMSS.P_SMPL_HIST_PACE:
            -1,
            BMSS.P_OBS_HIST_PACE:
            -1,
            # level of spatial correlation = beta
            BMSS.P_BETA:
            BetaSampler(dict_beta_full),
            # HRF
            BMSS.P_HRF:
            HRF_MultiSess_Sampler(dict_hrf_full),
            # HRF variance
            BMSS.P_RH:
            RHSampler(dict_var_hrf_full),
            # neural response levels (stimulus-induced effects) by session
            BMSS.P_NRLS_SESS:
            NRL_Multi_Sess_Sampler(dict_nrl_sess_full),
            # neural response levels by session --> variance
            BMSS.P_NRLS_SESS_VAR:
            Variance_GaussianNRL_Multi_Sess(dict_nrl_sess_var_full),
            # neural response levels mean: over sessions
            BMSS.P_NRLS_BAR:
            NRLsBar_Drift_Multi_Sess_Sampler(dict_nrl_bar_full),
            # drift
            BMSS.P_DRIFT:
            Drift_MultiSess_Sampler(dict_drift_full),
            #drift variance
            BMSS.P_ETA:
            ETASampler_MultiSess(dict_drift_var_full),
            #noise variance
            BMSS.P_NOISE_VAR_SESS:
            NoiseVariance_Drift_Multi_Sess_Sampler(dict_noise_var_full),
            #weights o fthe mixture
            #parameters of the mixture
            BMSS.P_MIXT_PARAM_NRLS_BAR:
            BiGaussMixtureParamsSampler(dict_mixt_param_full),
            BMSS.P_CHECK_FINAL_VALUE:
            'raise',  #print or raise
        }
Exemplo n.º 26
0
# -*- coding: utf-8 -*-
#
"""
Compute the mean of BOLD signal within parcels.

This is an example of several operations for xndarray:
- explosion of data according to a parcellation mask
- mean over voxel
- merge of several xndarray objects
"""
import os.path as op
from pyhrf import get_data_file_name, get_tmp_path
from pyhrf.ndarray import xndarray, merge

func_data = xndarray.load(get_data_file_name('subj0_bold_session0.nii.gz'))
parcellation = xndarray.load(get_data_file_name('subj0_parcellation.nii.gz'))
parcel_fdata = func_data.explode(parcellation)
parcel_means = dict((parcel_id, d.copy().fill(d.mean('position')))
                    for parcel_id, d in parcel_fdata.items())
parcel_means = merge(parcel_means, parcellation, axis='position')
output_fn = op.join(get_tmp_path(), './subj0_bold_parcel_means.nii')
print 'File saved to:', output_fn
parcel_means.save(output_fn)
#TODO test full script
Exemplo n.º 27
0
    def run(self, parallel=None, n_jobs=None):
        """
        Run the analysis: load data, run estimation, output results
        """
        if parallel is None:
            result = self.execute()
        elif parallel == 'local':
            cfg_parallel = pyhrf.cfg['parallel-local']
            try:
                from joblib import Parallel, delayed
            except ImportError:
                raise Exception(
                    'Can not import joblib. It is required to '
                    'enable parallel processing on a local machine.')

            effective_level = logger.getEffectiveLevel()
            if effective_level == logging.DEBUG:
                parallel_verb = 11
            elif effective_level == logging.INFO:
                parallel_verb = 2
            else:
                parallel_verb = 0

            if n_jobs is None:
                if cfg_parallel["nb_procs"]:
                    n_jobs = cfg_parallel["nb_procs"]
                else:
                    n_jobs = available_cpu_count()

            p = Parallel(n_jobs=n_jobs, verbose=parallel_verb)
            result = p(delayed(exec_t)(t) for t in self.split(output_dir=None))
            # join list of lists:
            result = list(itertools.chain.from_iterable(result))

        elif parallel == 'LAN':

            from pyhrf import grid
            cfg_parallel = pyhrf.cfg['parallel-LAN']
            remoteUser = cfg_parallel['user']

            # 1. Some checks on input/output directory
            remoteDir = cfg_parallel['remote_path']
            # At the end, results will be retrieved direclty from remoteDir,
            # which has to be readable
            if remoteDir is None or not op.exists(remoteDir):
                raise Exception('Remote directory is not readable (%s).'
                                'Consider mounting it with sshfs.' % remoteDir)

            # Try if remoteDir is writeable, so that we don't need to upload
            # data via ssh
            remote_writeable = False
            if os.access(remoteDir, os.W_OK):
                remote_writeable = True
                tmpDir = remoteDir
            else:
                logger.info('Remote dir is not writeable -> using tmp '
                            'dir to store splitted data & then upload.')

            # 2. split roi data
            logger.info('Path to store sub treatments: %s', tmpDir)
            treatments_dump_files = []
            self.split(dump_sub_results=True,
                       output_dir=tmpDir,
                       make_sub_outputs=False,
                       output_file_list=treatments_dump_files)

            # 3. copy data to remote directory
            if not remote_writeable:
                host = cfg_parallel['remote_host']
                logger.info('Uploading data to %s ...', remoteDir)
                remote_input_files = remote_copy(treatments_dump_files, host,
                                                 remoteUser, remoteDir)

            # 4. create job list
            tasks_list = []
            for f in treatments_dump_files:
                f = op.join(remoteDir, op.basename(f))
                nice = cfg_parallel['niceness']
                tasks_list.append(
                    'nice -n %d %s -v%d -t "%s"' %
                    (nice, 'pyhrf_jde_estim', logger.getEffectiveLevel(), f))
            mode = 'dispatch'
            tasks = grid.read_tasks(';'.join(tasks_list), mode)
            timeslot = grid.read_timeslot('allday')
            hosts = grid.read_hosts(cfg_parallel['hosts'])

            if self.output_dir is not None:
                brokenfile = op.join(self.output_dir, 'pyhrf-broken_cmd.batch')
                logfile = op.join(self.output_dir, 'pyhrf-parallel.log')
                logger.info('Log file for process dispatching: %s', logfile)
            else:
                brokenfile = None
                logfile = None

            # 3. launch them
            logger.info('Dispatching processes ...')
            try:
                grid.run_grid(mode,
                              hosts,
                              'rsa',
                              tasks,
                              timeslot,
                              brokenfile,
                              logfile,
                              user=remoteUser)
                grid.kill_threads()
            except KeyboardInterrupt:
                grid.quit(None, None)

            if brokenfile is not None and len(
                    open(brokenfile).readlines()) > 0:
                logger.info('There are some broken commands, trying again ...')
                try:
                    tasks = grid.read_tasks(brokenfile, mode)
                    grid.run_grid(mode,
                                  hosts,
                                  'rsa',
                                  tasks,
                                  timeslot,
                                  brokenfile,
                                  logfile,
                                  user=remoteUser)
                    grid.kill_threads()
                except KeyboardInterrupt:
                    grid.quit(None, None)

            # 3.1 grab everything back ??
            # try:
            # "scp %s@%s:%s %s" %(remoteUser,host,
            #                     op.join(remoteDir,'result*'),
            #                     op.abspath(op.dirname(options.cfgFile))))
            # TODO : test if everything went fine

            # 4. merge all results and create outputs
            result = []
            # if op.exists(remoteDir): TODO :scp if remoteDir not readable
            nb_treatments = len(treatments_dump_files)
            remote_result_files = [
                op.join(remoteDir, 'result_%04d.pck' % i)
                for i in range(nb_treatments)
            ]
            logger.info('remote_result_files: %s', str(remote_result_files))
            nres = len(filter(op.exists, remote_result_files))
            if nres == nb_treatments:
                logger.info('Grabbing results ...')
                for fnresult in remote_result_files:
                    fresult = open(fnresult)
                    result.append(cPickle.load(fresult)[0])
                    fresult.close()
            else:
                print 'Found only %d result files (expected %d)' \
                    % (nres, nb_treatments)
                print 'Something went wrong, check the log files'
            if not remote_writeable:
                logger.info('Cleaning tmp dir (%s)...', tmpDir)
                shutil.rmtree(tmpDir)
                logger.info('Cleaning up remote dir (%s) through ssh ...',
                            remoteDir)
                cmd = 'ssh %s@%s rm -f "%s" "%s" ' \
                    % (remoteUser, host, ' '.join(remote_result_files),
                       ' '.join(remote_input_files))
                logger.info(cmd)
                os.system(cmd)
            else:
                if 0:
                    logger.info('Cleaning up remote dir (%s)...', remoteDir)
                    for f in os.listdir(remoteDir):
                        os.remove(op.join(remoteDir, f))

        elif parallel == 'cluster':

            from pyhrf.parallel import run_soma_workflow
            cfg = pyhrf.cfg['parallel-cluster']
            # create tmp remote path:
            date_now = time.strftime('%c').replace(' ', '_').replace(':', '_')
            remote_path = op.join(cfg['remote_path'], date_now)
            logger.info('Create tmp remote dir: %s', remote_path)
            remote_mkdir(cfg['server'], cfg['user'], remote_path)
            t_name = 'default_treatment'
            tmp_dir = pyhrf.get_tmp_path()
            label_for_cluster = self.analyser.get_label()
            if self.output_dir is None:
                out_dir = pyhrf.get_tmp_path()
            else:
                out_dir = self.output_dir
            result = run_soma_workflow({t_name: self},
                                       'pyhrf_jde_estim', {t_name: tmp_dir},
                                       cfg['server_id'],
                                       cfg['server'],
                                       cfg['user'], {t_name: remote_path},
                                       {t_name: op.abspath(out_dir)},
                                       label_for_cluster,
                                       wait_ending=True)

        else:
            raise Exception('Parallel mode "%s" not available' % parallel)

        logger.info('Retrieved %d results', len(result))
        return self.output(result, (self.result_dump_file is not None),
                           self.make_outputs)
Exemplo n.º 28
0
 def setUp(self):
     self.tmp_dir = pyhrf.get_tmp_path() #'./'
Exemplo n.º 29
0
 def setUp(self):
     # called before any unit test of the class
     self.tmp_path = pyhrf.get_tmp_path()  # create a temporary folder
     self.clean_tmp = True
    def setUp(self):

        #pyhrf.verbose.set_verbosity(2)

        np.random.seed(8652761)

        # tmpDir = tempfile.mkdtemp(prefix='pyhrf_tests',
        #     dir=pyhrf.cfg['global']['tmp_path'])

        self.tmp_outputs = True #save outputs in tmp dir
                                #if False then save in current dir

        if not self.tmp_outputs:
            self.tmp_dir_small = './JDE_MS_test_small_simu'
            if not op.exists(self.tmp_dir_small): os.makedirs(self.tmp_dir_small)
            self.tmp_dir_big = './JDE_MS_test_big_simu'
            if not op.exists(self.tmp_dir_big): os.makedirs(self.tmp_dir_big)
        else:
            self.tmp_dir_small = pyhrf.get_tmp_path()
            self.tmp_dir_big = pyhrf.get_tmp_path()

        simu = simulate_sessions(output_dir = self.tmp_dir_small,
                                 snr_scenario='high_snr', spatial_size='tiny')
        self.data_small_simu = merge_fmri_sessions(simu)

        simu = simulate_sessions(output_dir=self.tmp_dir_big,
                                 snr_scenario='low_snr', spatial_size='normal')
        self.data_simu = merge_fmri_sessions(simu)

        # Parameters for multi-sessions sampler
        dict_beta_single = {
                    BetaSampler.P_VAL_INI : np.array([0.5]),
                    BetaSampler.P_SAMPLE_FLAG : False,
                    BetaSampler.P_PARTITION_FUNCTION_METH : 'es',
                    BetaSampler.P_USE_TRUE_VALUE : False,
                    }

        dict_hrf_single = {
                    HRF_MultiSess_Sampler.P_SAMPLE_FLAG : False,
                    HRF_MultiSess_Sampler.P_NORMALISE : 1., # normalise samples
                    HRF_MultiSess_Sampler.P_USE_TRUE_VALUE :  True,
                    HRF_MultiSess_Sampler.P_ZERO_CONSTR :  True,
                    #HRF_MultiSess_Sampler.P_PRIOR_TYPE : 'singleHRF',
                    }

        dict_var_hrf_single = {
                        RHSampler.P_SAMPLE_FLAG : False,
                        RHSampler.P_VAL_INI : np.array([0.001]),
                    }

        dict_nrl_sess_single =   {
                        NRL_Multi_Sess_Sampler.P_SAMPLE_FLAG : False,
                        NRL_Multi_Sess_Sampler.P_USE_TRUE_VALUE :  True,
                        }

        dict_nrl_sess_var_single = {
                            Variance_GaussianNRL_Multi_Sess.P_SAMPLE_FLAG : False,
                            Variance_GaussianNRL_Multi_Sess.P_USE_TRUE_VALUE :  True,
                            }

        dict_nrl_bar_single =  {
                        NRLsBar_Drift_Multi_Sess_Sampler.P_SAMPLE_FLAG : False,
                        NRLsBar_Drift_Multi_Sess_Sampler.P_USE_TRUE_NRLS : True,
                        NRLsBar_Drift_Multi_Sess_Sampler.P_SAMPLE_LABELS : False,
                        NRLsBar_Drift_Multi_Sess_Sampler.P_USE_TRUE_LABELS : True,
                        }

        dict_drift_single = {
                    Drift_MultiSess_Sampler.P_SAMPLE_FLAG : False,
                    Drift_MultiSess_Sampler.P_USE_TRUE_VALUE : True,
                    }

        dict_drift_var_single = {
                        ETASampler_MultiSess.P_SAMPLE_FLAG : False,
                        ETASampler_MultiSess.P_USE_TRUE_VALUE : True,
                        }

        dict_noise_var_single = {
                        NoiseVariance_Drift_Multi_Sess_Sampler.P_SAMPLE_FLAG : False,
                        NoiseVariance_Drift_Multi_Sess_Sampler.P_USE_TRUE_VALUE :  True,
                        }

        dict_mixt_param_single =  {
                            BiGaussMixtureParamsSampler.P_SAMPLE_FLAG : False,
                            BiGaussMixtureParamsSampler.P_USE_TRUE_VALUE : True,
                            BiGaussMixtureParamsSampler.P_HYPER_PRIOR : 'Jeffrey',
                            }


        self.sampler_params_for_single_test = {
            BMSS.P_NB_ITERATIONS : 100,
            BMSS.P_SMPL_HIST_PACE : -1,
            BMSS.P_OBS_HIST_PACE : -1,
            # level of spatial correlation = beta
            BMSS.P_BETA : BetaSampler(dict_beta_single),
            # HRF
            BMSS.P_HRF : HRF_MultiSess_Sampler(dict_hrf_single),
            # HRF variance
            BMSS.P_RH : RHSampler(dict_var_hrf_single),
            # neural response levels (stimulus-induced effects) by session
            BMSS.P_NRLS_SESS : NRL_Multi_Sess_Sampler(dict_nrl_sess_single),
            # neural response levels by session --> variance
            BMSS.P_NRLS_SESS_VAR : Variance_GaussianNRL_Multi_Sess(dict_nrl_sess_var_single),
            # neural response levels mean: over sessions
            BMSS.P_NRLS_BAR : NRLsBar_Drift_Multi_Sess_Sampler(dict_nrl_bar_single),
            # drift
            BMSS.P_DRIFT : Drift_MultiSess_Sampler(dict_drift_single),
            #drift variance
            BMSS.P_ETA : ETASampler_MultiSess(dict_drift_var_single),
            #noise variance
            BMSS.P_NOISE_VAR_SESS : NoiseVariance_Drift_Multi_Sess_Sampler(dict_noise_var_single),
            #weights o fthe mixture
            #parameters of the mixture
            BMSS.P_MIXT_PARAM_NRLS_BAR : BiGaussMixtureParamsSampler(dict_mixt_param_single),
            BMSS.P_CHECK_FINAL_VALUE : 'raise', #print or raise
        }


        # Parameters for multi-sessions sampler - full test
        dict_beta_full = {
                    BetaSampler.P_VAL_INI : np.array([0.5]),
                    BetaSampler.P_SAMPLE_FLAG : True,
                    BetaSampler.P_PARTITION_FUNCTION_METH : 'es',
                    }

        dict_hrf_full = {
                    HRF_MultiSess_Sampler.P_SAMPLE_FLAG : True,
                    HRF_MultiSess_Sampler.P_NORMALISE : 1., # normalise samples
                    HRF_MultiSess_Sampler.P_USE_TRUE_VALUE :  False,
                    HRF_MultiSess_Sampler.P_ZERO_CONSTR : True,
                    #HRF_MultiSess_Sampler.P_PRIOR_TYPE : 'singleHRF',
                    }

        dict_var_hrf_full = {
                        RHSampler.P_SAMPLE_FLAG : False,
                        RHSampler.P_VAL_INI : np.array([0.001]),
                    }

        dict_nrl_sess_full =   {
                        NRL_Multi_Sess_Sampler.P_SAMPLE_FLAG : True,
                        NRL_Multi_Sess_Sampler.P_USE_TRUE_VALUE :  False,
                        }

        dict_nrl_sess_var_full = {
                            Variance_GaussianNRL_Multi_Sess.P_SAMPLE_FLAG : True,
                            Variance_GaussianNRL_Multi_Sess.P_USE_TRUE_VALUE : False,
                            }

        dict_nrl_bar_full =  {
                        NRLsBar_Drift_Multi_Sess_Sampler.P_SAMPLE_FLAG : True,
                        NRLsBar_Drift_Multi_Sess_Sampler.P_USE_TRUE_NRLS : False,
                        NRLsBar_Drift_Multi_Sess_Sampler.P_SAMPLE_LABELS : True,
                        NRLsBar_Drift_Multi_Sess_Sampler.P_USE_TRUE_LABELS : False,
                        }

        dict_drift_full = {
                    Drift_MultiSess_Sampler.P_SAMPLE_FLAG : True,
                    Drift_MultiSess_Sampler.P_USE_TRUE_VALUE : False,
                    }

        dict_drift_var_full = {
                        ETASampler_MultiSess.P_SAMPLE_FLAG : True,
                        ETASampler_MultiSess.P_USE_TRUE_VALUE : False,
                        }

        dict_noise_var_full = {
                        NoiseVariance_Drift_Multi_Sess_Sampler.P_SAMPLE_FLAG : True,
                        NoiseVariance_Drift_Multi_Sess_Sampler.P_USE_TRUE_VALUE :  False,
                        }

        dict_mixt_param_full =  {
                            BiGaussMixtureParamsSampler.P_SAMPLE_FLAG : True,
                            BiGaussMixtureParamsSampler.P_USE_TRUE_VALUE : False,
                            BiGaussMixtureParamsSampler.P_HYPER_PRIOR : 'Jeffrey',
                            }


        self.sampler_params_for_full_test = {

            BMSS.P_NB_ITERATIONS : 400,
            BMSS.P_SMPL_HIST_PACE : -1,
            BMSS.P_OBS_HIST_PACE : -1,
            # level of spatial correlation = beta
            BMSS.P_BETA : BetaSampler(dict_beta_full),
            # HRF
            BMSS.P_HRF : HRF_MultiSess_Sampler(dict_hrf_full),
            # HRF variance
            BMSS.P_RH : RHSampler(dict_var_hrf_full),
            # neural response levels (stimulus-induced effects) by session
            BMSS.P_NRLS_SESS : NRL_Multi_Sess_Sampler(dict_nrl_sess_full),
            # neural response levels by session --> variance
            BMSS.P_NRLS_SESS_VAR : Variance_GaussianNRL_Multi_Sess(dict_nrl_sess_var_full),
            # neural response levels mean: over sessions
            BMSS.P_NRLS_BAR : NRLsBar_Drift_Multi_Sess_Sampler(dict_nrl_bar_full),
            # drift
            BMSS.P_DRIFT : Drift_MultiSess_Sampler(dict_drift_full),
            #drift variance
            BMSS.P_ETA : ETASampler_MultiSess(dict_drift_var_full),
            #noise variance
            BMSS.P_NOISE_VAR_SESS : NoiseVariance_Drift_Multi_Sess_Sampler(dict_noise_var_full),
            #weights o fthe mixture
            #parameters of the mixture
            BMSS.P_MIXT_PARAM_NRLS_BAR : BiGaussMixtureParamsSampler(dict_mixt_param_full),
            BMSS.P_CHECK_FINAL_VALUE : 'raise', #print or raise
        }
Exemplo n.º 31
0
    def setUp(self):
        np.random.seed(8652761)

        self.tmp_dir = pyhrf.get_tmp_path()
        self.clean_tmp = False  # HACK True
Exemplo n.º 32
0
#
"""
Compute the mean of BOLD signal within parcels.

This is an example of several operations for xndarray:
- explosion of data according to a parcellation mask
- mean over voxel
- merge of several xndarray objects
"""
import os.path as op
from pyhrf import get_data_file_name, get_tmp_path
from pyhrf.ndarray import xndarray, merge

func_data = xndarray.load(get_data_file_name("subj0_bold_session0.nii.gz"))
parcellation = xndarray.load(get_data_file_name("subj0_parcellation.nii.gz"))
parcel_fdata = func_data.explode(parcellation)
parcel_means = dict((parcel_id, d.copy().fill(d.mean("position"))) for parcel_id, d in parcel_fdata.items())
parcel_means = merge(parcel_means, parcellation, axis="position")
output_fn = op.join(get_tmp_path(), "./subj0_bold_parcel_means.nii")
print "File saved to:", output_fn
parcel_means.save(output_fn)
# TODO test full script
Exemplo n.º 33
0
    def setUp(self):

        pyhrf.verbose.set_verbosity(0)

        np.random.seed(8652761)

        #self.simu_dir = './simulation'
        #if not op.exists(self.simu_dir): os.makedirs(self.simu_dir)

        self.simu_dir = pyhrf.get_tmp_path()
        

        # Parameters to setup a Sampler where all samplers are OFF and
        # set to their true values.
        # This is used by the function _test_specific_samplers,
        # which will turn on specific samplers to test.

        self.sampler_params_for_single_test = {
            BMSS.P_NB_ITERATIONS : 40,
            BMSS.P_SMPL_HIST_PACE : -1,
            BMSS.P_OBS_HIST_PACE : -1,
            # level of spatial correlation = beta
            #BMSS.P_BETA : BetaSampler(dict_beta_single),
            # HRF by subject
            BMSS.P_HRF_SUBJ :  jms.HRF_Sampler(do_sampling=False,
                                               normalise=1.,
                                               use_true_value=True,
                                               zero_contraint=False,
                                               prior_type='singleHRF'),

            # HRF variance
            BMSS.P_RH_SUBJ : jms.HRFVarianceSubjectSampler(do_sampling=False,
                                                           use_true_value=True),
            # HRF group
            BMSS.P_HRF_GROUP :  jms.HRF_Group_Sampler(do_sampling=False,
                                                      normalise=1.,
                                                      use_true_value=True,
                                                      zero_contraint=False,
                                                      prior_type='singleHRF'),
            # HRF variance
            BMSS.P_RH_GROUP : jms.RHGroupSampler(do_sampling=False,
                                                 use_true_value=True),

            # neural response levels (stimulus-induced effects) by subject
            BMSS.P_NRLS_SUBJ : jms.NRLs_Sampler(do_sampling=False,
                                                use_true_value=True),
            BMSS.P_LABELS : jms.LabelSampler(do_sampling=False,
                                             use_true_value=True),
            # drift
            BMSS.P_DRIFT : jms.Drift_MultiSubj_Sampler(do_sampling=False,
                                                       use_true_value=True),
            #drift variance
            BMSS.P_ETA : jms.ETASampler_MultiSubj(do_sampling=False,
                                                  use_true_value=True),
            #noise variance
            BMSS.P_NOISE_VAR_SUBJ : \
                jms.NoiseVariance_Drift_MultiSubj_Sampler(do_sampling=False,
                                                          use_true_value=False),
            #weights o fthe mixture
            #parameters of the mixture
            BMSS.P_MIXT_PARAM_NRLS : jms.MixtureParamsSampler(do_sampling=False,
                                                              use_true_value=False),
            #BMSS.P_ALPHA_SUBJ : Alpha_hgroup_Sampler(dict_alpha_single),
            #BMSS.P_ALPHA_VAR_SUBJ : AlphaVar_Sampler(dict_alpha_var_single),
            BMSS.P_CHECK_FINAL_VALUE : 'none', #print or raise
        }
Exemplo n.º 34
0
 def setUp(self):
     self.tmp_dir = pyhrf.get_tmp_path()
Exemplo n.º 35
0
from pyhrf import get_tmp_path
import numpy as np
import pyhrf.boldsynth.scenarios as simu
from pyhrf.tools import Pipeline

simulation_steps = {
  'dt' : 0.6,
  'dsf' : 4, #downsampling factor -> tr = dt * dsf = 2.4
  'mask' : np.array([[[1,1,1,1,1,1,1]]]),
  'labels' : np.array([[0,0,1,1,0,1,1]]),
  'mean_act' : 3.,
  'var_act' : 0.5,
  'mean_inact' : 0.,
  'var_inact' : 0.5,
  'nrls' : simu.create_bigaussian_nrls,
  'rastered_paradigm' : np.array([[0,0,1,0,0,0,1,0]]),
  'hrf' : simu.create_canonical_hrf,
  'v_noise' : 1.,
  'bold_shape' : simu.get_bold_shape,
  'noise' : simu.create_gaussian_noise,
  'stim_induced_signal' : simu.create_stim_induced_signal,
  'bold' : simu.create_bold,
  }

simulation = Pipeline(simulation_steps)
simulation.resolve()
simulation_items = simulation.get_values()
output_dir = get_tmp_path()
print 'Save simulation to:', output_dir
simu.save_simulation(simulation_items, output_dir=output_dir)
Exemplo n.º 36
0
 def setUp(self):
     # called before any unit test of the class
     self.tmp_path = pyhrf.get_tmp_path()  # create a temporary folder
     self.clean_tmp = True
Exemplo n.º 37
0
def project_fmri_from_kernels(
    input_mesh,
    kernels_file,
    fmri_data_file,
    output_tex,
    bin_threshold=None,
):

    logger.info('Project data onto mesh using kernels ...')

    if 0:
        print 'Projecting ...'
        print 'func data:', fmri_data_file
        print 'Mesh file:', input_mesh
        print 'Save as:', output_tex

    logger.info('Call AimsFunctionProjection -op 1 ...')

    data_files = []
    output_texs = []
    p_ids = None
    if bin_threshold is not None:
        d, h = read_volume(fmri_data_file)
        if np.allclose(d.astype(int), d):
            tmp_dir = pyhrf.get_tmp_path()
            p_ids = np.unique(d)
            logger.info('bin threshold: %f', bin_threshold)
            logger.info('pids(n=%d): %d...%d', len(p_ids), min(p_ids),
                        max(p_ids))
            for i, p_id in enumerate(p_ids):
                if p_id != 0:
                    new_p = np.zeros_like(d)
                    new_p[np.where(d == p_id)] = i + 1  # 0 is background
                    ifn = op.join(tmp_dir, 'pmask_%d.nii' % p_id)
                    write_volume(new_p, ifn, h)
                    data_files.append(ifn)
                    ofn = op.join(tmp_dir, 'ptex_%d.gii' % p_id)
                    output_texs.append(ofn)
        else:
            data_files.append(fmri_data_file)
            output_texs.append(output_tex)
    else:
        data_files.append(fmri_data_file)
        output_texs.append(output_tex)

    logger.info('input data files: %s', str(data_files))
    logger.info('output data files: %s', str(output_texs))

    for data_file, o_tex in zip(data_files, output_texs):
        projection = [
            'AimsFunctionProjection', '-op', '1', '-d', kernels_file, '-d1',
            data_file, '-m', input_mesh, '-o', o_tex
        ]

        cmd = ' '.join(map(str, projection))
        logger.info('cmd: %s', cmd)
        os.system(cmd)

    if bin_threshold is not None:
        logger.info('Binary threshold of texture at %f', bin_threshold)
        o_tex = output_texs[0]
        data, data_gii = read_texture(o_tex)
        data = (data > bin_threshold).astype(np.int32)
        print 'data:', data.dtype
        if p_ids is not None:
            for pid, o_tex in zip(p_ids[1:], output_texs[1:]):
                pdata, pdata_gii = read_texture(o_tex)
                data += (pdata > bin_threshold).astype(np.int32) * pid

        #assert (np.unique(data) == p_ids).all()
        write_texture(data, output_tex, intent='NIFTI_INTENT_LABEL')
Exemplo n.º 38
0
    def run(self, parallel=None, n_jobs=None):
        """
        Run the the analysis: load data, run estimation, output results
        """
        if parallel is None:
            result = self.execute()
        elif parallel == 'local':
            cfg_parallel = pyhrf.cfg['parallel-local']
            try:
                from joblib import Parallel, delayed
            except ImportError:
                raise Exception('Can not import joblib. It is required to '\
                                'enable parallel processing on a local machine.')

            parallel_verb = pyhrf.verbose.verbosity
            if pyhrf.verbose.verbosity == 6:
                parallel_verb = 10

            if n_jobs is None:
                n_jobs = cfg_parallel['nb_procs']

            p = Parallel(n_jobs=n_jobs, verbose=parallel_verb)
            result = p(delayed(exec_t)(t) for t in self.split(output_dir=None))
            # join list of lists:
            result = list(itertools.chain.from_iterable(result))

        elif parallel == 'LAN':

            from pyhrf import grid
            cfg_parallel = pyhrf.cfg['parallel-LAN']
            remoteUser = cfg_parallel['user']

            #1. Some checks on input/output directory
            remoteDir = cfg_parallel['remote_path']
            # At the end, results will be retrieved direclty from remoteDir,
            # which has to be readable
            if remoteDir is None or not op.exists(remoteDir):
                raise Exception('Remote directory is not readable (%s).' \
                                'Consider mounting it with sshfs.'
                                %remoteDir)

            # Try if remoteDir is writeable, so that we don't need to upload
            # data via ssh
            remote_writeable = False
            if os.access(remoteDir, os.W_OK):
                remote_writeable = True
                tmpDir = remoteDir
            else:
                pyhrf.verbose(1, 'Remote dir is not writeable -> using tmp ' \
                                  'dir to store splitted data & then upload.')

            #2. split roi data
            pyhrf.verbose(1, 'Path to store sub treatments: %s' %tmpDir)
            treatments_dump_files = []
            self.split(dump_sub_results=True, output_dir=tmpDir,
                       make_sub_outputs=False,
                       output_file_list=treatments_dump_files)

            #3. copy data to remote directory
            if not remote_writeable:
                host = cfg_parallel['remote_host']
                pyhrf.verbose(1, 'Uploading data to %s ...' %(remoteDir))
                remote_input_files = remote_copy(treatments_dump_files,
                                                 host, remoteUser, remoteDir)

            #4. create job list
            tasks_list = []
            for f in treatments_dump_files:
                f = op.join(remoteDir,op.basename(f))
                nice = cfg_parallel['niceness']
                tasks_list.append('nice -n %d %s -v%d -t "%s"' \
                                      %(nice,'pyhrf_jde_estim',
                                        pyhrf.verbose.verbosity,f))
            mode = 'dispatch'
            tasks = grid.read_tasks(';'.join(tasks_list), mode)
            timeslot = grid.read_timeslot('allday')
            hosts = grid.read_hosts(cfg_parallel['hosts'])


            if self.output_dir is not None:
                brokenfile = op.join(self.output_dir, 'pyhrf-broken_cmd.batch')
                logfile = op.join(self.output_dir, 'pyhrf-parallel.log')
                pyhrf.verbose(1, 'Log file for process dispatching: %s' \
                              %logfile)
            else:
                brokenfile = None
                logfile = None

            #3. launch them
            pyhrf.verbose(1, 'Dispatching processes ...')
            try:
                grid.run_grid(mode, hosts, 'rsa', tasks, timeslot, brokenfile,
                              logfile, user=remoteUser)
                grid.kill_threads()
            except KeyboardInterrupt:
                grid.quit(None, None)

            if brokenfile is not None and len(open(brokenfile).readlines()) > 0:
                pyhrf.verbose(1, 'There are some broken commands, '\
                                  'trying again ...')
                try:
                    tasks = grid.read_tasks(brokenfile, mode)
                    grid.run_grid(mode, hosts, 'rsa', tasks, timeslot, brokenfile,
                                  logfile, user=remoteUser)
                    grid.kill_threads()
                except KeyboardInterrupt:
                    grid.quit(None, None)

            #3.1 grab everything back ??
            #try:
                # "scp %s@%s:%s %s" %(remoteUser,host,
                #                     op.join(remoteDir,'result*'),
                #                     op.abspath(op.dirname(options.cfgFile))))
            #TODO : test if everything went fine

            #4. merge all results and create outputs
            result = []
            #if op.exists(remoteDir): TODO :scp if remoteDir not readable
            nb_treatments = len(treatments_dump_files)
            remote_result_files = [op.join(remoteDir, 'result_%04d.pck' %i) \
                                    for i in range(nb_treatments)]
            pyhrf.verbose(1,'remote_result_files: %s' %str(remote_result_files))
            nres = len(filter(op.exists,remote_result_files))
            if nres == nb_treatments:
                pyhrf.verbose(1, 'Grabbing results ...')
                for fnresult in remote_result_files:
                    fresult = open(fnresult)
                    result.append(cPickle.load(fresult)[0])
                    fresult.close()
            else:
                print 'Found only %d result files (expected %d)' \
                    %(nres, nb_treatments)
                print 'Something went wrong, check the log files'
            if not remote_writeable:
                pyhrf.verbose(1, 'Cleaning tmp dir (%s)...' %tmpDir)
                shutil.rmtree(tmpDir)
                pyhrf.verbose(1, 'Cleaning up remote dir (%s) through ssh ...' \
                                %remoteDir)
                cmd = 'ssh %s@%s rm -f "%s" "%s" ' \
                    %(remoteUser, host, ' '.join(remote_result_files),
                      ' '.join(remote_input_files))
                pyhrf.verbose(2, cmd)
                os.system(cmd)
            else:
                if 0:
                    pyhrf.verbose(1, 'Cleaning up remote dir (%s)...' %remoteDir)
                    for f in os.listdir(remoteDir):
                        os.remove(op.join(remoteDir,f))

        elif parallel == 'cluster':

            from pyhrf.parallel import run_soma_workflow
            cfg = pyhrf.cfg['parallel-cluster']
            #create tmp remote path:
            date_now = time.strftime('%c').replace(' ','_').replace(':','_')
            remote_path = op.join(cfg['remote_path'], date_now)
            pyhrf.verbose(1,'Create tmp remote dir: %s' %remote_path)
            remote_mkdir(cfg['server'], cfg['user'], remote_path)
            #if self.result_dump_file
            t_name = 'default_treatment'
            tmp_dir = pyhrf.get_tmp_path()
            label_for_cluster = self.analyser.get_label()
            if self.output_dir is None:
                out_dir = pyhrf.get_tmp_path()
            else:
                out_dir = self.output_dir
            result = run_soma_workflow({t_name:self}, 'pyhrf_jde_estim',
                                       {t_name:tmp_dir}, cfg['server_id'],
                                       cfg['server'], cfg['user'],
                                       {t_name:remote_path},
                                       {t_name:op.abspath(out_dir)},
                                       label_for_cluster, wait_ending=True)

        else:
            raise Exception('Parallel mode "%s" not available' %parallel)

        pyhrf.verbose(1, 'Retrieved %d results' %len(result))
        return self.output(result, (self.result_dump_file is not None),
                           self.make_outputs)
Exemplo n.º 39
0
from pyhrf import get_tmp_path
import numpy as np
import pyhrf.boldsynth.scenarios as simu
from pyhrf.tools import Pipeline

simulation_steps = {
  'dt' : 0.6,
  'dsf' : 4, #downsampling factor -> tr = dt * dsf = 2.4
  'mask' : np.array([[[1,1,1,1,1,1,1]]]),
  'labels' : np.array([[0,0,1,1,0,1,1]]),
  'mean_act' : 3.,
  'var_act' : 0.5,
  'mean_inact' : 0.,
  'var_inact' : 0.5,
  'nrls' : simu.create_bigaussian_nrls,
  'rastered_paradigm' : np.array([[0,0,1,0,0,0,1,0]]),
  'hrf' : simu.create_canonical_hrf,
  'v_noise' : 1.,
  'bold_shape' : simu.get_bold_shape,
  'noise' : simu.create_gaussian_noise,
  'stim_induced_signal' : simu.create_stim_induced_signal,
  'bold' : simu.create_bold,
  }

simulation = Pipeline(simulation_steps)
simulation.resolve()
simulation_items = simulation.get_values()
output_dir = get_tmp_path()
print 'Save simulation to:', output_dir
simu.save_simulation(simulation_items, output_dir=output_dir)