Ejemplo n.º 1
0
def local_random_affine_transformations(ds,
                                        distort_seeds,
                                        distort_neighbor,
                                        space,
                                        scale_fac=100,
                                        shift_fac=10):
    """Distort a dataset in the local neighborhood of selected features.

    This function is similar to ``random_affine_transformation()``, but applies
    multiple random affine transformations to a spatially constraint local
    neighborhood.

    Parameters
    ----------
    ds : Dataset
      The to be transformed/distorted dataset.
    distort_seeds : list(int)
      This a sequence of feature ids (corresponding to the input dataset) that
      serve as anchor to determine the local neighborhood for a distortion. The
      number of seeds also determines the number of different local distortions
      that are going to be applied.
    distort_neighbor : callable
      And object that when called with a coordinate generates a sequence of
      coordinates that comprise its neighborhood (see e.g. ``Sphere()``).
    space : str
      Name of the feature attribute of the input dataset that contains the
      relevant feature coordinates (e.g. 'voxel_indices').
    scale_fac : float
      See ``random_affine_transformation()``
    shift_fac : float
      See ``random_affine_transformation()``

    Returns
    -------
    Dataset
      A dataset derived from the input dataset with added local distortions.
    """
    # which dataset attributes to aggregate
    random_stats = ['random_rotation', 'random_scale', 'random_shift']
    kwa = {space: distort_neighbor}
    qe = IndexQueryEngine(**kwa)
    qe.train(ds)
    ds_distorted = ds.copy()
    for stat in random_stats:
        ds_distorted.a[stat + 's'] = {}
    # for each seed region
    for seed in distort_seeds:
        # select the neighborhood for this seed
        # take data from the distorted dataset to avoid
        # 'loosing' previous distortions
        distort_ids = qe[seed]
        ds_d = random_affine_transformation(ds_distorted[:, distort_ids],
                                            scale_fac=scale_fac,
                                            shift_fac=shift_fac)
        # recover the distortions stats for this seed
        for stat in random_stats:
            ds_distorted.a[stat + 's'].value[seed] = ds_d.a[stat].value
        # put the freshly distorted data back
        ds_distorted.samples[:, distort_ids] = ds_d.samples
    return ds_distorted
Ejemplo n.º 2
0
    def _get_trained_queryengines(self, datasets, queryengine, radius, ref_ds):
        """Helper to return trained query engine(s), either list of one or one per each dataset

        if queryengine is None then IndexQueryEngine based on radius is created
        """
        ndatasets = len(datasets)
        if queryengine:
            if isinstance(queryengine, (list, tuple)):
                queryengines = queryengine
                if len(queryengines) != ndatasets:
                    raise ValueError(
                        "%d query engines were specified although %d datasets "
                        "provided" % (len(queryengines), ndatasets))
                _shpaldebug("Training provided query engines")
                for qe, ds in zip(queryengines, datasets):
                    qe.train(ds)
            else:
                queryengine.train(datasets[ref_ds])
                queryengines = [queryengine]
        else:
            _shpaldebug('No custom query engines were provided. Setting up the '
                        'volumetric query engine on voxel_indices.')
            queryengine = IndexQueryEngine(voxel_indices=Sphere(radius))
            queryengine.train(datasets[ref_ds])
            queryengines = [queryengine]
        return queryengines
    def _get_trained_queryengines(self, datasets, queryengine, radius, ref_ds):
        """Helper to return trained query engine(s), either list of one or one per each dataset

        if queryengine is None then IndexQueryEngine based on radius is created
        """
        ndatasets = len(datasets)
        if queryengine:
            if isinstance(queryengine, (list, tuple)):
                queryengines = queryengine
                if len(queryengines) != ndatasets:
                    raise ValueError(
                        "%d query engines were specified although %d datasets "
                        "provided" % (len(queryengines), ndatasets))
                _shpaldebug("Training provided query engines")
                for qe, ds in zip(queryengines, datasets):
                    qe.train(ds)
            else:
                queryengine.train(datasets[ref_ds])
                queryengines = [queryengine]
        else:
            _shpaldebug(
                'No custom query engines were provided. Setting up the '
                'volumetric query engine on voxel_indices.')
            queryengine = IndexQueryEngine(voxel_indices=Sphere(radius))
            queryengine.train(datasets[ref_ds])
            queryengines = [queryengine]
        return queryengines
Ejemplo n.º 4
0
def local_random_affine_transformations(
        ds, distort_seeds, distort_neighbor, space, scale_fac=100,
        shift_fac=10):
    """Distort a dataset in the local neighborhood of selected features.

    This function is similar to ``random_affine_transformation()``, but applies
    multiple random affine transformations to a spatially constraint local
    neighborhood.

    Parameters
    ----------
    ds : Dataset
      The to be transformed/distorted dataset.
    distort_seeds : list(int)
      This a sequence of feature ids (corresponding to the input dataset) that
      serve as anchor to determine the local neighborhood for a distortion. The
      number of seeds also determines the number of different local distortions
      that are going to be applied.
    distort_neighbor : callable
      And object that when called with a coordinate generates a sequence of
      coordinates that comprise its neighborhood (see e.g. ``Sphere()``).
    space : str
      Name of the feature attribute of the input dataset that contains the
      relevant feature coordinates (e.g. 'voxel_indices').
    scale_fac : float
      See ``random_affine_transformation()``
    shift_fac : float
      See ``random_affine_transformation()``

    Returns
    -------
    Dataset
      A dataset derived from the input dataset with added local distortions.
    """
    # which dataset attributes to aggregate
    random_stats = ['random_rotation', 'random_scale', 'random_shift']
    kwa = {space: distort_neighbor}
    qe = IndexQueryEngine(**kwa)
    qe.train(ds)
    ds_distorted = ds.copy()
    for stat in random_stats:
        ds_distorted.a[stat + 's'] = {}
    # for each seed region
    for seed in distort_seeds:
        # select the neighborhood for this seed
        # take data from the distorted dataset to avoid
        # 'loosing' previous distortions
        distort_ids = qe[seed]
        ds_d = random_affine_transformation(
                               ds_distorted[:, distort_ids],
                               scale_fac=scale_fac,
                               shift_fac=shift_fac)
        # recover the distortions stats for this seed
        for stat in random_stats:
            ds_distorted.a[stat + 's'].value[seed] = ds_d.a[stat].value
        # put the freshly distorted data back
        ds_distorted.samples[:, distort_ids] = ds_d.samples
    return ds_distorted
cnx_tx = 489
toutdir = os.path.join(basedir, 'transformation_matrices', 'iterative_cha_olp4cbp_mappers' +'_' + 'subs-' + str(nsubs) + '_'+ 'radius1-10_radius2-' +  str(HYPERALIGNMENT_RADIUS) + '.hdf5.gz')
print(toutdir)

# load nifti as a pymvpa dataset and then use that as ref_ds in the queryengine definition
# mask with data in brainmask so only 170k (size of connectomes) voxels are included
ref_ds = fmri_dataset(os.path.join(helperfiles,'newbrainmask.nii'), mask=os.path.join(helperfiles,'newbrainmask.nii'))
print('Size of brain mask:')
print(str(len(ref_ds.fa.voxel_indices)))

# set searchlight sphere radius
sl_radius = HYPERALIGNMENT_RADIUS

#create query engine
qe = IndexQueryEngine(voxel_indices=Sphere(sl_radius))
qe.train(ref_ds)

# load all subject 
nfiles = glob.glob(os.path.join(chamats, '*commonspace_subs*'))
print('Loading participant data from: ')
print(chamats)
mysubs = nfiles[0:nsubs]

# import connectomes into pymvpa dataset, zscore, then add chunks and voxel indices, append to list of datsets
dss = []
for sub in range(len(mysubs)):
    ds = mv.Dataset(np.load(mysubs[sub]))
    ds.fa['voxel_indices'] = range(ds.shape[1])
    #ds.sa['chunks'] = np.repeat(i,cnx_tx)
    mv.zscore(ds, chunks_attr=None)
    dss.append(ds)