Exemple #1
0
def execution(self, context):

    #Load light objects first
    csd = load(self.csd_model.fullPath())

    dmri_vol = aims.read(self.diffusion_data.fullPath())
    header = dmri_vol.header()
    data = np.asarray(dmri_vol)

    if self.mask is not None:
        mask_vol = aims.read(self.mask.fullPath())
        mask_arr = np.array(mask_vol, copy=True)
        mask = mask_arr[..., 0]
        if data.shape[:-1] != mask.shape:
            raise ValueError(
                'Diffusion data and mask used do not have the same shape')
    else:
        mask = self.mask

    csdfit = csd.fit(data, mask=mask)
    sh_coeff = csdfit.shm_coeff

    sh_coeff_volume = array_to_vol(sh_coeff, header)
    aims.write(sh_coeff_volume, self.fibre_odf_sh_coeff.fullPath())
    transformManager = getTransformationManager()
    transformManager.copyReferential(self.diffusion_data,
                                     self.fibre_odf_sh_coeff)

    pass
def mesh_transform(path_mesh, path_transfo, path_mesh_out):
    transfo = aims.read(path_transfo)
    mesh = aims.read(path_mesh)
    h = mesh.header()
    aims.SurfaceManip.meshTransform(mesh, transfo)
    aims.write(mesh, path_mesh_out)
    pass
Exemple #3
0
def scatter_plot_files(result_file,
                       reference_file,
                       classif,
                       value_range=None,
                       range_centre=None,
                       ax=None):
    result_vol = aims.read(result_file)
    result_values = numpy.asarray(result_vol)[classif == CORTEX_LABEL]

    reference_vol = aims.read(reference_file)
    reference_values = numpy.asarray(reference_vol)[classif == CORTEX_LABEL]

    if ax is None:
        import matplotlib.pyplot
        ax = matplotlib.pyplot.figure().add_subplot(111)
    ax.scatter(reference_values, result_values)
    if value_range is None:
        value_range = (min(reference_values.min(), result_values.min()),
                       max(reference_values.max(), result_values.max()))
    if range_centre is not None:
        value_range = symmetrize_value_range(value_range, range_centre)
    ax.plot(value_range, value_range)
    ax.set_xlim(value_range)
    ax.set_ylim(value_range)
    ax.set_aspect("equal")
def execution(self, context):
    tr = aims.AffineTransformation3d()
    for t in self.MNI_transform_chain:
        ti = aims.read(t.fullPath())
        tr = ti * tr
    #context.write('transform:', tr)
    vol = aims.read(self.volume.fullPath())
    trl = vol.header().get('transformations', [])
    refl = vol.header().get('referentials', [])

    rname = aims.StandardReferentials.mniTemplateReferential()
    if rname in refl:
        trl[refl.index(rname)] = tr.toVector()
    elif len(trl) < 2:
        #else:
        trl.append(tr.toVector())
        refl.append(rname)
    else:
        trl = [list(trl[0]), list(tr.toVector())] \
            + [list(t) for t in list(trl)[1:]]
        refl = [refl[0], rname] + list(refl)[1:]
    # context.write('now:', refl, trl)
    vol.header()['referentials'] = refl
    vol.header()['transformations'] = trl
    context.write('new header:', vol.header())
    aims.write(vol, self.output_volume.fullPath())
    self.output_volume.readAndUpdateMinf()
    tm = registration.getTransformationManager()
    tm.copyReferential(self.volume,
                       self.output_volume,
                       copy_transformations=False)
def execution(self, context):

    csd_model = load(self.csd_model.fullPath())
    csd_coeff = aims.read(self.fibre_odf_sh_coeff.fullPath())
    header = csd_coeff.header()
    sh_coeff = np.asarray(csd_coeff)
    mask_vol = aims.read(self.mask.fullPath())
    mask = vol_to_array(mask_vol)
    mask = array_to_mask(mask)

    try:
        S0 = aims.read(self.S0_signal.fullPath())
    except:
        context.write("No B0 volume provided, I assume the non ponderated signal value is 1.0 in all voxels")
        S0 = 1

    csd_fit = SphHarmFit(csd_model,sh_coeff, mask)
    prediction = csd_fit.predict(gtab=None, S0=S0)
    prediction_volume = array_to_vol(prediction, header)
    aims.write(prediction_volume,self.predicted_signal.fullPath())

    #Handling referentials
    transformManager = getTransformationManager()
    # Mandatory parameters
    transformManager.copyReferential(self.fibre_odf_sh_coeff, self.predicted_signal)
    pass
 def __init__(self, dir):
     self._dir = dir
     path = self._make_subpath
     self._classif_vol = aims.read(path("classif.nii.gz"))
     self._classif = numpy.asarray(self._classif_vol)
     self._voxel_size = tuple(self._classif_vol.getVoxelSize()[:3])
     thickness_vol = aims.read(path("reference_thickness.nii.gz"))
     self._thickness = thickness_vol.value(0, 0, 0)
Exemple #7
0
    def compute_volumes(self):
        from pluricent.checkbase.hierarchies import getfilepath
        assert (len(self.get_multiple_subjects()) == 0)
        if not hasattr(self, 'subjects'): self.get_subjects()
        if not hasattr(self, 'existingfiles'):
            self.check_database_for_existing_files()
        self.volumes = {}
        for subject in self.get_flat_subjects():
            print subject
            self.volumes[subject] = {}
            spm_wc_vols = [
                'spm_greymap_warped', 'spm_whitemap_warped',
                'spm_csfmap_warped', 'spm_greymap_modulated',
                'spm_whitemap_modulated', 'spm_csfmap_modulated'
            ]
            if set(spm_wc_vols).issubset(
                    set(self.existingfiles[0][subject].keys())):
                volumes = get_volumes(*[
                    getfilepath(each, self.existingfiles[0][subject][each])
                    for each in spm_wc_vols
                ])
                for v, each in zip(volumes, ['tivol', 'grey', 'white', 'csf']):
                    self.volumes[subject][each] = v

            for key in ['spm_greymap', 'spm_whitemap', 'brainmask']:
                if key in self.existingfiles[0][subject].keys():
                    from soma import aims
                    import numpy as np
                    data = aims.read(
                        getfilepath(key, self.existingfiles[0][subject][key]))
                    n = data.arraydata()
                    r = n.ravel()
                    voxel_size = np.prod(data.header()['voxel_size'])
                    if key[:3] == 'spm':
                        self.volumes[subject][key] = np.sum(
                            r - r.min()) * voxel_size
                    else:
                        self.volumes[subject][key] = r.sum(
                        ) / 255.0 * voxel_size

            for key in ['left_greywhite', 'right_greywhite']:
                if key in self.existingfiles[0][subject].keys():
                    from soma import aims
                    import numpy as np
                    data = aims.read(
                        getfilepath(key, self.existingfiles[0][subject][key]))
                    side = {
                        'L': 'left',
                        'R': 'right'
                    }[self.existingfiles[0][subject][key]['side']]
                    voxel_size = np.prod(data.header()['voxel_size'])
                    for k, v in {'grey': 100., 'white': 200.}.items():
                        self.volumes[subject]['%s_%s' %
                                              (side, k)] = pixelsOfValue(
                                                  data, v) * voxel_size
def difference_from_files(result_file, reference_file, classif_array):
    result_vol = aims.read(result_file)
    result = numpy.asarray(result_vol)

    reference_vol = aims.read(reference_file)
    reference = numpy.asarray(reference_vol)

    difference = numpy.ma.masked_array(result - reference,
                                       mask=(classif_array != CORTEX_LABEL))

    return difference
def execution(self, context):
    tex = aims.read(self.label_texture.fullPath())
    mesh = aims.read(self.mesh.fullPath())
    outmesh = aims.SurfaceManip.meshTextureBoundary(mesh, tex, -1)
    diffuse = [1., 0, 0., 1.]
    ncomp = min(len(self.mesh_color), 4)
    diffuse[:ncomp] = self.mesh_color[:ncomp]
    context.write(self.mesh_color)
    outmesh.header()[ 'material' ] = \
        {'line_width': self.line_width, 'diffuse': diffuse}
    aims.write(outmesh, self.output_boundaries_mesh.fullPath())
    tm = registration.getTransformationManager()
    tm.copyReferential(self.mesh, self.output_boundaries_mesh)
Exemple #10
0
def get_exchanged_propvol_files(classif_filename,
                                CSF_labels_on_white_filename,
                                white_labels_on_CSF_filename,
                                output_filename):
    classif = aims.read(classif_filename)
    CSF_labels_on_white = aims.read(CSF_labels_on_white_filename)
    white_labels_on_CSF = aims.read(white_labels_on_CSF_filename)
    output = aims.Volume(CSF_labels_on_white)

    np_CSF_labels_on_white = np.asarray(CSF_labels_on_white)
    np_white_labels_on_CSF = np.asarray(white_labels_on_CSF)
    np_classif = np.asarray(classif)
    np_output = np.asarray(output)

    white_mask = (np_classif == 150)
    CSF_mask = (np_classif == 50)

    np_output[white_mask] = np_CSF_labels_on_white[white_mask]
    np_output[CSF_mask] = np_white_labels_on_CSF[CSF_mask]

    temp_dir = None
    try:
        temp_dir = tempfile.mkdtemp(prefix="hcortex")
        temp_filename = os.path.join(temp_dir, 'raw_exchanged_labels.nii')
        aims.write(output, temp_filename)

        # These “failed components” will probably be separated by connexity
        # AimsReplaceLevel -i raw_exchanged_labels.nii.gz \
        # -o exchanged_labels.nii.gz \
        # -g 100000000 -n 0 -g 200000000 -n 0

        subprocess.check_call(["AimsConnectComp",
                               "-i", "raw_exchanged_labels.nii",
                               "-o", "connected_exchanged_labels.nii"],
                              cwd=temp_dir)

        # The background is cut in one big region + many small, restore it then
        # relabel
        propvol = aims.read(
            os.path.join(temp_dir, "connected_exchanged_labels.nii"))
    finally:
        if temp_dir:
            shutil.rmtree(temp_dir)
    np_propvol = np.asarray(propvol)
    exclusion_mask = (np_CSF_labels_on_white == -1)
    bulk_mask = (np_CSF_labels_on_white == 0)
    np_propvol[bulk_mask] = 0
    np_propvol[exclusion_mask] = -1

    relabel_positive_labels(propvol)
    aims.write(propvol, output_filename)
def get_exchanged_propvol_files(classif_filename,
                                CSF_labels_on_white_filename,
                                white_labels_on_CSF_filename,
                                output_filename):
    classif = aims.read(classif_filename)
    CSF_labels_on_white = aims.read(CSF_labels_on_white_filename)
    white_labels_on_CSF = aims.read(white_labels_on_CSF_filename)
    output = aims.Volume(CSF_labels_on_white)

    np_CSF_labels_on_white = np.asarray(CSF_labels_on_white)
    np_white_labels_on_CSF = np.asarray(white_labels_on_CSF)
    np_classif = np.asarray(classif)
    np_output = np.asarray(output)

    white_mask = (np_classif == 150)
    CSF_mask = (np_classif == 50)

    np_output[white_mask] = np_CSF_labels_on_white[white_mask]
    np_output[CSF_mask] = np_white_labels_on_CSF[CSF_mask]

    temp_dir = None
    try:
        temp_dir = tempfile.mkdtemp(prefix="hcortex")
        temp_filename = os.path.join(temp_dir, 'raw_exchanged_labels.nii')
        aims.write(output, temp_filename)

        # These “failed components” will probably be separated by connexity
        # AimsReplaceLevel -i raw_exchanged_labels.nii.gz \
        # -o exchanged_labels.nii.gz \
        # -g 100000000 -n 0 -g 200000000 -n 0

        subprocess.check_call(["AimsConnectComp",
                               "-i", "raw_exchanged_labels.nii",
                               "-o", "connected_exchanged_labels.nii"],
                              cwd=temp_dir)

        # The background is cut in one big region + many small, restore it then
        # relabel
        propvol = aims.read(
            os.path.join(temp_dir, "connected_exchanged_labels.nii"))
    finally:
        if temp_dir:
            shutil.rmtree(temp_dir)
    np_propvol = np.asarray(propvol)
    exclusion_mask = (np_CSF_labels_on_white == -1)
    bulk_mask = (np_CSF_labels_on_white == 0)
    np_propvol[bulk_mask] = 0
    np_propvol[exclusion_mask] = -1

    relabel_positive_labels(propvol)
    aims.write(propvol, output_filename)
Exemple #12
0
def get_transformation(path_volume, path_reference_volume, path_transfo,
                       path_inverse):
    volume = aims.read(path_volume)
    reference_volume = aims.read(path_reference_volume)
    header = volume.header()
    header_ref = reference_volume.header()
    transfo_vol_to_mni = aims.AffineTransformation3d(
        header["transformations"][0])
    transfo_reference_to_mni = aims.AffineTransformation3d(
        header_ref["transformations"][0])
    final_transfo = transfo_reference_to_mni.inverse() * transfo_vol_to_mni
    inverse = final_transfo.inverse()
    aims.write(final_transfo, path_transfo)
    aims.write(inverse, path_inverse)
    pass
Exemple #13
0
def nearestVertexForLeds(mesh1, leds):
    m1 = aims.read(mesh1)
    v = np.array(m1.vertex())
    out = []
    for l in leds:
        o.append(closest_node(l, v))
    return out
Exemple #14
0
def execution( self, context):
    bvals, bvecs = read_bvals_bvecs(self.bvals.fullPath(), self.bvecs.fullPath())
    if self.round_bvals:
        context.write("Rouding bvalues to : useful for shell based models")
        bvals = np.round(bvals,-2)
    try:
        minf = self.diffusion_data.minf()
        t = minf['storage_to_memory']
    except KeyError:
        context.write("No storage_to_memory field in the  minf file associated to the volume, using the one of the header of the volume")
        dwi = aims.read(self.diffusion_data.fullPath())
        header = dwi.header()
        t = header['storage_to_memory']
    finally:
        try :
            t1 = aims.AffineTransformation3d(t).toMatrix()
            aff = np.diag(t1)[:-1]
            affine = np.diag(aff)
        except:
            context.write("Warning!: there is no storage to memory matrix, I assume bvecs have an RAS (Nifti convention) orientation")
            affine = -1.0*np.eye(3)

    context.write("The following transformation is going to be applied:", affine)
    bvecs = np.dot(bvecs, np.transpose(affine))
    context.write("Transforming bvecs coordinate from storage to Aims referential")


    gtab = gradient_table(bvals, bvecs,b0_threshold=self.b0_threshold)
    dump(gtab, self.gradient_table.fullPath(), compress=9)

    #Handling metadata
    self.gradient_table.setMinf('rounded_bvals', self.round_bvals)
    self.gradient_table.setMinf('bvalues_uuid', self.bvals.uuid())
    self.gradient_table.setMinf('bvectors_uuid',self.bvecs.uuid())
def extract_sulcus_fundus_from_extremities(path_mesh, path_dpf,
                                           path_extremities):
    """
    Wrapper of the get_sulcus_from_extremities function
    """
    mesh = aims.read(path_mesh)
    dpf = aims.read(path_dpf)
    extremities = aims.read(path_extremities)
    ext = np.array(extremities[0])
    # round the texture to avoid error when texture modified in SurfPaint
    ext = np.round(ext)
    start = np.where(ext == 50.00)[0][0]
    end = np.where(ext == 100.00)[0][0]
    # draw sulcus fundus line and retrieve ist vertices index on mesh
    sulcus = get_sulcus_fundus_from_extremities(start, end, mesh, dpf)
    return sulcus
Exemple #16
0
def peaks_as_spheres(path_peaks_volume, path_spheres, radius=2):
    """
    Represent peaks as spheres centered on peaks location and of radius radius
    :param path_peaks_volume: path of the boolean volume with peaks
    :param path_spheres: path of the mesh (spheres) representing the peaks
    :param radius: radius (in mm) of the spheres used for display
    :return:
    """
    volume = aims.read(path_peaks_volume)
    data = np.array(volume)[..., 0]

    voxel_size = volume.header()['voxel_size'] + [1]
    scaling = aims.AffineTransformation3d()
    scaling.fromMatrix(np.diag(voxel_size))
    peaks_vol_coord = np.transpose(np.vstack(np.where(data != 0)))
    centers = [aims.Point3df(p) for p in peaks_vol_coord]
    print(len(centers))

    for i, center in enumerate(centers):
        center = scaling.transform(center)
        sphere = aims.SurfaceGenerator.sphere(center, radius,300)
        if i == 0:
            spheres = sphere
        else:
            aims.SurfaceManip.meshMerge(spheres, sphere)
    aims.write(spheres, path_spheres)
Exemple #17
0
def execution(self, context):
    intex = aims.read(self.input_texture.fullPath())
    roi = aims.read(self.roi_texture.fullPath())
    aroi = roi[0].arraydata()

    if self.mesh is not None:
        mesh = aims.read(self.mesh.fullPath())
        vert = mesh.vertex()
        poly = np.asarray(mesh.polygon())
        abl = [vert[p[1]] - vert[p[0]] for p in poly]
        acl = [vert[p[2]] - vert[p[0]] for p in poly]
        # polygons areas
        parea = np.sqrt(
            np.asarray([
                ab.norm2() * ac.norm2() - np.square(ab.dot(ac))
                for ab, ac in zip(abl, acl)
            ])) * 0.5
        context.write('area:', np.sum(parea))
        # assign areas to vertices
        weights = np.zeros((intex[0].arraydata().shape[0], ))
        for i, p in enumerate(poly):
            weights[p[0]] += parea[i]
            weights[p[1]] += parea[i]
            weights[p[2]] += parea[i]
        context.write('v area:', np.sum(weights) / 3)
    else:
        context.warning('mesh is not provided: averaging without weighting by '
                        'vertices regions areas')
        weights = np.ones((intex[0].arraydata().shape[0], ))

    rois = np.unique(aroi)
    nt = len(intex)
    shape = (np.max(rois) + 1, nt)
    out_data = np.zeros(shape, dtype=intex[0].arraydata().dtype)
    for value in rois:
        index = value
        if value < 0:
            index = 0
        for t in range(nt):
            out_data[index,
                     t] = np.average(intex[t].arraydata()[aroi == value],
                                     weights=weights[aroi == value])
    if nt == 1:
        out_data = out_data.ravel()
    np.save(self.output_reduced_data.fullPath(), out_data)
Exemple #18
0
def execution(self, context):

    data_vol = aims.read(self.dwi_data.fullPath())
    header = data_vol.header()
    data = vol_to_array(data_vol)
    sigma = piesno(data, self.coil_number, alpha=self.alpha, l=self.trials, itermax=ITERMAX, eps=EPS, return_mask=False)
    sigma_arr = sigma*np.ones(data.shape[:-1], dtype=np.float32)
    sigma_vol = array_to_vol(sigma_arr, header=header)
    aims.write(sigma_vol, self.sigma.fullPath())
def fix_cortex_topology_files(input_filename, output_filename,
                              filling_size, fclosing):
    """Call highres_cortex.cortex_topo.fix_cortex_topology on files."""
    input_volume = aims.read(input_filename)
    output = highres_cortex.cortex_topo.fix_cortex_topology(
        input_volume, filling_size, fclosing)
    # BUG: aims.write offers no error checking, so the program will exit
    # successfully even if writing fails
    aims.write(output, output_filename)
Exemple #20
0
def execution(self, context):
    from soma import aims
    graph = aims.read(self.read.fullPath())
    aims.GraphManip.buckets2Volume(graph)
    if self.extract_volume:
        vol = graph[self.extract_volume]
    else:
        atts = [
            x for x in graph.keys()
            if isinstance(graph[x], aims.rc_ptr_Volume_S16)
        ]
        if len(atts) == 0:
            raise RuntimeError(_t_('the ROI graph contains no voxel data'))
        elif len(atts) > 1:
            raise RuntimeError(
                _t_('the ROI graph contains several volumes. '
                    'Select the extract_volume parameter as one in ') + '( ' +
                ', '.join(atts) + ' )')
        vol = graph[atts[0]]
    # handle bounding box which may have cropped the data
    bmin = graph['boundingbox_min']
    bmax = graph['boundingbox_max']
    context.write('vol:', vol)
    context.write('bmin:', bmin, ', bmax:', bmax, ', size:', vol.getSize())
    if bmin[:3] != [0, 0, 0] \
            or bmax[:3] != [x+1 for x in vol.getSize()[:3]]:
        context.write('enlarging')
        # needs expanding in a bigger volume
        vol2 = aims.Volume_S16(bmax[0] + 1, bmax[1] + 1, bmax[2] + 1)
        vol2.fill(0)
        ar = vol2.np
        ar[bmin[0]:bmax[0] + 1, bmin[1]:bmax[1] + 1, bmin[2]:bmax[2] + 1, :] \
            = vol.np

        if self.extract_contours == 'Yes':
            ar_copy = ar.copy()
            for label in [v['roi_label'] for v in graph.vertices()]:
                ind = list(zip(*np.where(ar_copy == label)))
                for i in ind:
                    erase = True
                    for neigh in neighbors(*i):
                        if ar_copy[neigh] != label:
                            erase = False
                    if erase:
                        ar[i] = 0

        vol2.copyHeaderFrom(vol.header())
        aims.write(vol2, self.write.fullPath())
    else:
        # bounding box OK
        aims.write(vol.get(), self.write.fullPath())
    registration.getTransformationManager().copyReferential(
        self.read, self.write)
    if self.removeSource:
        for f in self.read.fullPaths():
            shelltools.rm(f)
 def create_point_object(cls, coordinates):
     cross_name = os.path.join(cls.anatomist.anatomistSharedPath(),
                               "cursors", "cross.mesh")
     cross_mesh = aims.read(cross_name)
     motion = aims.Motion()
     motion.setTranslation(coordinates)
     aims.SurfaceManip.meshTransform(cross_mesh, motion)
     cross_object = cls.anatomist.toAObject(cross_mesh)
     cross_object.releaseAppRef()
     return cross_object
Exemple #22
0
def main(path_mesh, path_scalar_function, path_gradient, gradient_type='vertex'):
    '''
    :param path_mesh: Path to the triangular mesh
    :type String
    :param path_scalar_function: Path to the Scalar Function
    :type String
    :param path_gradient: Path to save the gradient
    :type string
    :param gradient_type: Gradient on the triangle ('triangles') or Gradient on the vertex ('vertex')
    :type String
    :return:
    '''
    mesh = aims.read(path_mesh)
    scalar_func = aims.read(path_scalar_function)
    if gradient_type == 'triangles':
        grad = TriangleGradient(mesh, scalar_func)
    else:
        grad = Gradient(mesh, scalar_func)
    pickle.dump(grad, path_gradient)
Exemple #23
0
def remove_ref_from_headers(fp, replace=False):
    print fp
    i = aims.read(fp)
    i.header().update({'referentials':[],'transformations':[]})
    s = osp.basename(fp).split('.')
    basename, ext = s[0], '.'.join(s[1:])
    suffix = '_nohdr' if not replace else ''
    fp2 = osp.join(osp.dirname(fp), '%s%s.%s'%(basename, suffix, ext))
    print 'writing', fp2
    aims.write(i, fp2)
Exemple #24
0
def execution(self, context):
    from soma import aims
    import numpy as np
    vol = aims.read(self.input_image.fullPath())
    vol_arr = np.asarray(vol)
    w = np.where(vol_arr == 0)
    noise = np.random.normal(
        self.noise_average, self.noise_stdev, w[0].shape)
    noise[noise < 0] = 0
    vol_arr[w] = noise
    aims.write(vol, self.output_image.fullPath())
Exemple #25
0
def postprocess_equivolumetric_depth(input_filename, classif_filename,
                                     output_filename):
    depth_vol = aims.read(input_filename)
    classif_vol = aims.read(classif_filename)

    depth_arr = np.asarray(depth_vol)
    classif_arr = np.asarray(classif_vol)

    depth_arr[classif_arr == CSF_LABEL] = 0.0
    depth_arr[classif_arr == WHITE_LABEL] = 1.0

    header = depth_vol.header()
    header['cal_min'] = 0.0
    header['cal_max'] = 1.0
    header['intent_code'] = 1001  # NIFTI_INTENT_ESTIMATE
    header['intent_name'] = 'Equivol. depth'
    header['descrip'] = (
        'Equivolumetric cortical depth computed with highres-cortex')

    aims.write(depth_vol, output_filename)
def read_sphere(path):
    """
    Load a sphere stored as an AimsTimeSurface and create a Dipy Sphere Object.
    If no path is provided used the Dipy default sphere symmetric362
    """
    if path is not None:
        sphere_mesh = aims.read(path)
        sphere = mesh_to_sphere(sphere_mesh)
    else:
        sphere = get_sphere()
    return sphere
Exemple #27
0
def execution(self, context):
    from soma import aims, aimsalgo
    import numpy as np
    vol = aims.read(self.image_input.fullPath())
    old_t1_to_scanner = aims.AffineTransformation3d(
        vol.header()['transformations'][0])
    new_t1 = aims.read(self.target_space_image.fullPath())
    new_t1_to_scanner = aims.AffineTransformation3d(
        new_t1.header()['transformations'][0])
    old_to_new = new_t1_to_scanner.inverse() * old_t1_to_scanner
    rsp = getattr(aims, 'ResamplerFactory_' +
                  aims.typeCode(np.asarray(vol).dtype))().getResampler(0)
    rsp.setRef(vol)
    vol_resamp = rsp.doit(old_to_new, new_t1.getSizeX(), new_t1.getSizeY(),
                          new_t1.getSizeZ(),
                          new_t1.getVoxelSize()[:3])

    aims.write(vol_resamp, self.image_output.fullPath())
    tm = registration.getTransformationManager()
    tm.copyReferential(self.target_space_image, self.image_output)
def execution(self, context):

    context.write("Loading input files")
    data_vol = aims.read(self.diffusion_data.fullPath())
    hdr = data_vol.header()
    data = vol_to_array(data_vol)
    del data_vol
    if self.mask is not None:
        mask_vol = aims.read(self.mask.fullPath())
        mask = vol_to_array(mask_vol)
        del mask_vol
        mask = array_to_mask(mask)
    else:
        mask = self.mask
    tensor = load(self.tensor_model.fullPath())
    context.write("Input files loaded successfully")

    context.write(
        "Fitting Diffusion Tensor model on data...it migh take some time")
    tenfit = tensor.fit(data, mask=mask)
    context.write("Diffusion Tensor Model fitted successfully")

    tensor_coefficients = tenfit.model_params
    vol_tensor = array_to_vol(tensor_coefficients, header=hdr)
    context.write('Writing coefficient volume on disk')
    aims.write(vol_tensor, self.tensor_coefficients.fullPath())

    #saving other metadata
    self.tensor_coefficients.setMinf('model_uuid', self.tensor_model.uuid())
    self.tensor_coefficients.setMinf('data_uuid', self.diffusion_data.uuid())
    try:
        assert self.mask is not None
        self.tensor_coefficients.setMinf('mask_uuid', self.mask.uuid())
    except Exception:
        self.tensor_coefficients.setMinf('mask_uuid', 'None')

    transformManager = getTransformationManager()
    transformManager.copyReferential(self.diffusion_data,
                                     self.tensor_coefficients)
    context.write("Processed Finished")
    pass
def getExchangedPropagationVolume(CSF_labels_on_white, white_labels_on_CSF, classif, resultDir, keyWord):
    output = aims.Volume(CSF_labels_on_white)
    np_CSF_labels_on_white = np.asarray(CSF_labels_on_white)
    np_white_labels_on_CSF = np.asarray(white_labels_on_CSF)
    np_classif = np.asarray(classif)
    np_output = np.asarray(output)

    white_mask = (np_classif == 150)
    CSF_mask = (np_classif == 50)

    np_output[white_mask] = np_CSF_labels_on_white[white_mask]
    np_output[CSF_mask] = np_white_labels_on_CSF[CSF_mask]

    aims.write(output, resultDir + 'raw_exchanged_labels_%s.nii' %(keyWord))


    # These “failed components” will probably be separated by connexity
    #AimsReplaceLevel -i raw_exchanged_labels.nii.gz -o exchanged_labels.nii.gz -g 100000000 -n 0 -g 200000000 -n 0

    subprocess.check_call(["AimsConnectComp",
                        "-i", resultDir + 'raw_exchanged_labels_%s.nii' %(keyWord),
                        "-o", resultDir + 'connected_exchanged_labels_%s.nii' %(keyWord)])


    # The background is cut in one big region + many small, restore it then relabel
    propvol = aims.read(resultDir + 'connected_exchanged_labels_%s.nii' %(keyWord))
    np_propvol = np.asarray(propvol)
    exclusion_mask = (np_CSF_labels_on_white == -1)
    bulk_mask = (np_CSF_labels_on_white == 0)
    np_propvol[bulk_mask] = 0
    np_propvol[exclusion_mask] = -1

    def relabel_positive_labels(volume):
        size_x = volume.getSizeX()
        size_y = volume.getSizeY()
        size_z = volume.getSizeZ()
        old_to_new_labels = {}
        next_label = 1
        for z in xrange(size_z):
            for y in xrange(size_y):
                for x in xrange(size_x):
                    old_label = volume.at(x, y, z)
                    if old_label > 0:
                        try:
                            new_label = old_to_new_labels[old_label]
                        except KeyError:
                            new_label = next_label
                            old_to_new_labels[old_label] = new_label
                            next_label += 1
                        volume.setValue(new_label, x, y, z)

    relabel_positive_labels(propvol)
    return(propvol)
Exemple #30
0
def aims__main__():
    interp = "nn"
    # Set default values to parameters
    # parse command line options
    #parser = optparse.OptionParser(description=__doc__)
    parser = argparse.ArgumentParser()
    parser.add_argument('--input', help='Input image to resample', type=str)
    parser.add_argument('--ref', help='Reference volume', type=str)
    parser.add_argument('--interp', help='Interpolation method nn, lin, quad', type=str)
    parser.add_argument('--output', help='Ouptut', type=str)

    options = parser.parse_args()
    src_filename = options.input
    ref_filename = options.ref
    src = aims.read(src_filename)
    ref = aims.read(ref_filename)
    output_ima = resample(src, ref)
    assert output_ima.maximum() == src.arraydata().max()

    writer = aims.Writer()
    writer.write(output_ima, options.output)
Exemple #31
0
def execution(self, context):

    data_vol = aims.read(self.dwi_data.fullPath())
    header = data_vol.header()
    data = vol_to_array(data_vol)
    sigma_vol = aims.read(self.sigma.fullPath())
    sigma = vol_to_array(sigma_vol)
    if self.brain_mask is not None:
        brain_mask_vol = aims.read(self.brain_mask.fullPath())
        brain_mask = vol_to_array(brain_mask_vol)
    else:
        brain_mask = None

    denoised_data = localpca(data,
                             sigma,
                             mask=brain_mask,
                             pca_method=self.method,
                             patch_radius=self.patch_radius,
                             tau_factor=self.tau_factor)
    denoised_data_vol = array_to_vol(denoised_data, header=header)
    aims.write(denoised_data_vol, self.denoised_dwi_data.fullPath())
Exemple #32
0
def execution(self, context):

    data_vol = aims.read(self.dwi_data.fullPath())
    header = data_vol.header()
    data = vol_to_array(data_vol)
    sigma_vol = aims.read(self.sigma.fullPath())
    sigma = vol_to_array(sigma_vol)
    if self.brain_mask is not None:
        brain_mask_vol = aims.read(self.brain_mask.fullPath())
        brain_mask = vol_to_array(brain_mask_vol)
    else:
        brain_mask = None

    denoised_data = nlmeans(data,
                            sigma,
                            mask=brain_mask,
                            patch_radius=self.patch_radius,
                            block_radius=self.block_radius,
                            rician=self.rician_noise)
    denoised_data_vol = array_to_vol(denoised_data, header=header)
    aims.write(denoised_data_vol, self.denoised_dwi_data.fullPath())
Exemple #33
0
def execution(self, context):

    csd_model = load(self.csd_model.fullPath())
    csd_coeff = aims.read(self.fibre_odf_sh_coeff.fullPath())
    h = csd_coeff.header()
    sh_coeff = np.asarray(csd_coeff)
    mask_vol = aims.read(self.mask.fullPath())
    mask = np.array(mask_vol,copy=False)[...,0].copy()
    context.write(mask.shape)


    csd_fit = SphHarmFit(csd_model,sh_coeff, mask)


    transformManager = getTransformationManager()
    # Mandatory parameters
    gfa = csd_fit.gfa
    GFA = self.array_to_vol(gfa,h)
    aims.write(GFA, self.generalized_fractionnal_anisotropy.fullPath())
    transformManager.copyReferential(self.fibre_odf_sh_coeff, self.generalized_fractionnal_anisotropy)

    pass
Exemple #34
0
def execution(self, context):
    #reading object from the lightest to the biggest in memory
    model = load(self.csd_model.fullPath())
    sphere = read_sphere(self.sphere.fullPath())
    mask_vol = aims.read(self.mask.fullPath())
    mask = vol_to_array(mask_vol)
    mask = array_to_mask(mask)

    sh_coeff_vol = aims.read(self.fibre_odf_sh_coeff.fullPath())
    hdr = sh_coeff_vol.header()
    sh_coeff = np.asarray(sh_coeff_vol)
    context.write("Data were successfully loaded.")
    spharmfit = SphHarmFit(model, sh_coeff, mask)

    odf = extract_odf(spharmfit, mask, sphere)
    #odf = spharmfit.odf(sphere)
    #do not use the classical dipy function because it compute odf for the whole volume by default and take far too much
    #memory.
    odf_vol = array_to_vol(odf, header=hdr)
    aims.write(odf_vol, self.fibre_odf.fullPath())

    pass
Exemple #35
0
def compute_distmaps_files(classif_filename, output_distwhite_filename,
                           output_distCSF_filename, output_classif_filename):
    classif = aims.read(classif_filename)

    dist_from_white = highres_cortex.cortex_topo.signed_distance(
        classif, [100], [200], 150)
    aims.write(dist_from_white, output_distwhite_filename)

    dist_from_CSF = highres_cortex.cortex_topo.signed_distance(
        classif, [100], [0], 50)
    aims.write(dist_from_CSF, output_distCSF_filename)

    aims.write(classif, output_classif_filename)
Exemple #36
0
def compute_distmaps_files(classif_filename, output_distwhite_filename,
                           output_distCSF_filename, output_classif_filename):
    classif = aims.read(classif_filename)

    dist_from_white = highres_cortex.cortex_topo.signed_distance(
        classif, [100], [200], 150)
    aims.write(dist_from_white, output_distwhite_filename)

    dist_from_CSF = highres_cortex.cortex_topo.signed_distance(
        classif, [100], [0], 50)
    aims.write(dist_from_CSF, output_distCSF_filename)

    aims.write(classif, output_classif_filename)
Exemple #37
0
    def compute_volumes(self):
       from pluricent.checkbase.hierarchies import getfilepath
       assert(len(self.get_multiple_subjects()) == 0)
       if not hasattr(self, 'subjects'): self.get_subjects()
       if not hasattr(self, 'existingfiles'): self.check_database_for_existing_files()
       self.volumes = {}
       for subject in self.get_flat_subjects():
          print subject
          self.volumes[subject] = {}
          spm_wc_vols = ['spm_greymap_warped', 'spm_whitemap_warped', 'spm_csfmap_warped',
                'spm_greymap_modulated', 'spm_whitemap_modulated', 'spm_csfmap_modulated']
          if set(spm_wc_vols).issubset(set(self.existingfiles[0][subject].keys())):
            volumes = get_volumes(*[getfilepath(each, self.existingfiles[0][subject][each]) for each in spm_wc_vols])
            for v, each in zip(volumes, ['tivol', 'grey', 'white', 'csf']):
               self.volumes[subject][each] = v

          for key in ['spm_greymap', 'spm_whitemap', 'brainmask']:
               if key in self.existingfiles[0][subject].keys():
                  from soma import aims
                  import numpy as np
                  data = aims.read(getfilepath(key, self.existingfiles[0][subject][key]))
                  n = data.arraydata()
                  r = n.ravel()
                  voxel_size = np.prod(data.header()['voxel_size'])
                  if key[:3] == 'spm': self.volumes[subject][key] = np.sum(r - r.min()) * voxel_size
                  else: self.volumes[subject][key] = r.sum() / 255.0 * voxel_size

          for key in ['left_greywhite', 'right_greywhite']:
               if key in self.existingfiles[0][subject].keys():
                  from soma import aims
                  import numpy as np
                  data = aims.read(getfilepath(key, self.existingfiles[0][subject][key]))
                  side = {'L':'left', 'R':'right'}[self.existingfiles[0][subject][key]['side']]
                  voxel_size = np.prod(data.header()['voxel_size'])
                  for k, v in {'grey': 100., 'white':200.}.items():
                     self.volumes[subject]['%s_%s'%(side, k)] = pixelsOfValue(data, v) * voxel_size
Exemple #38
0
def get_volumes( wc_gray, wc_white, wc_csf, mwc_gray, mwc_white, mwc_csf ):
  #Compute an approximate intracranial mask from unmodulated segmentations
  from soma import aims
  wc_gm_im = aims.read(wc_gray)
  wc_wm_im = aims.read(wc_white)
  wc_csf_im = aims.read(wc_csf)
  wc_gm_arr = wc_gm_im.arraydata()
  wc_wm_arr = wc_wm_im.arraydata()
  wc_csf_arr = wc_csf_im.arraydata()

  wc_sum_arr = wc_gm_arr + wc_wm_arr + wc_csf_arr
  mask = (wc_sum_arr > 0.5)

  #Compute volumes by masking the modulated segmentations with the previous mask
  mwc_gm_im = aims.read(mwc_gray)
  mwc_wm_im = aims.read(mwc_white)
  mwc_csf_im = aims.read(mwc_csf)
  mwc_gm_arr = mwc_gm_im.arraydata()
  mwc_wm_arr = mwc_wm_im.arraydata()
  mwc_csf_arr = mwc_csf_im.arraydata()
  mwc_gm_arr = mwc_gm_arr.astype('float64')
  mwc_wm_arr = mwc_wm_arr.astype('float64')
  mwc_csf_arr = mwc_csf_arr.astype('float64')

  mwc_sum_arr = mwc_gm_arr + mwc_wm_arr + mwc_csf_arr
  mwc_sum_arr[mask == False] = 0.
  mwc_gm_arr[mask == False] = 0.
  mwc_wm_arr[mask == False] = 0.
  mwc_csf_arr[mask == False] = 0.
  mwc_sum_arr[mwc_sum_arr < 0] = 0.
  mwc_gm_arr[mwc_gm_arr < 0] = 0.
  mwc_wm_arr[mwc_wm_arr < 0] = 0.
  mwc_csf_arr[mwc_csf_arr < 0] = 0.

  vox_sizes = mwc_gm_im.header()['voxel_size'].arraydata()
  vox_vol = vox_sizes[0]*vox_sizes[1]*vox_sizes[2]

  mwc_sum_arr_mm3 = mwc_sum_arr*vox_vol
  mwc_gm_arr_mm3 = mwc_gm_arr*vox_vol
  mwc_wm_arr_mm3 = mwc_wm_arr*vox_vol
  mwc_csf_arr_mm3 = mwc_csf_arr*vox_vol

  tivol = mwc_sum_arr_mm3.sum()/1000.
  gmvol = mwc_gm_arr_mm3.sum()/1000.
  wmvol = mwc_wm_arr_mm3.sum()/1000.
  csfvol = mwc_csf_arr_mm3.sum()/1000.
  volumes = [tivol, gmvol, wmvol, csfvol]
  return volumes
def voronoiFromTexture(volGW_border, tex, hemi, stopLabel, directory, keyWord):
    """ This function takes a volume from which ROI will be cut out
    This volume should be read in with the border = 1 (for later dilation)
    It also takes a texture labelled (e.g. manually) from which seeds will be taken for the
    Voronoi classification.
   # DomainLabel - where to propagate the classification (is taken from the dilated volume)
    StopLabel - where to stop it
    directory - is a path where some intermediate file will be written to. Names will include the keyWord
    This function returns a volume with the selected region
    """
    vs = volGW_border.getVoxelSize()[:3]
    dims = volGW_border.getSize()
    c = aims.Converter(intype=volGW_border, outtype=aims.Volume('S16'))
    volShort = c(volGW_border)
    volGW_dilated = aimsalgo.AimsMorphoDilation(volShort, 1)

    # Had problem with some textures: instead of value '10', it was '9.9999999999'. -> use round
    for i, vertex in enumerate(hemi.vertex()):
        label = round(tex[0].item(i)) + 1
        posVox = (int(round(vertex[0] / vs[0])), int(round(vertex[1] / vs[1])), int(round(vertex[2] / vs[2])))
        # print posVox
        if posVox[0] >= 0 and posVox[0] < dims[0] and posVox[1] >= 0 and posVox[1] < dims[1] and posVox[2] >= 0 and posVox[2] < dims[2]:
            volGW_dilated.setValue(label, posVox[0], posVox[1], posVox[2])
                
    arrDilated = np.asarray(volGW_dilated.volume())  
    arrDilated[np.asarray(volGW_border) == 200] = 0    
    
    aims.write(volGW_dilated, directory + 'seedsNoWM_%s.nii.gz' %(keyWord))
    # Voronoi classification
    subprocess.call(['AimsVoronoi', '-i', directory + 'seedsNoWM_%s.nii.gz' %(keyWord), '-o', directory +  'voronoi_%s.nii.gz' %(keyWord), '-d', '32767', '-f', str(stopLabel)]) 
    volVoronoi = aims.read(directory +  'voronoi_%s.nii.gz' %(keyWord))
    aims.write(volVoronoi, directory +  'outputFromVoronoi_%s.nii.gz' %(keyWord))  
    
    # label '0' in texture was transformed into '1'. So we will set all voxels with value '1' to '0'
    arrVor = np.array(volVoronoi, copy = False)
    arrVor[arrVor == (stopLabel + 1)] = 0
    aims.write(volVoronoi, directory +  'voronoi_%s.nii.gz' %(keyWord))
    return(volVoronoi)
def fix_cortex_topology_files(input_filename, output_filename,
                              filling_size, fclosing):
    """Call highres_cortex.cortex_topo.fix_cortex_topology on files."""
    input_volume = aims.read(input_filename)

    try:
        output = highres_cortex.cortex_topo.fix_cortex_topology(
            input_volume, filling_size, fclosing)
    except OSError as exc:
        print("error: the VipHomotopic command cannot be"
              " found or executed ({0}). Please make sure that"
              " Morphologist is properly installed and that the"
              " command is in your PATH.".format(exc.strerror))
        return 1
    except subprocess.CalledProcessError as exc:
        print("error: the VipHomotopic command returned an error code ({0})."
              " Please inspect its output above for more information."
              .format(exc.returncode))
        return 1

    # BUG: aims.write offers no error checking, so the program will exit
    # successfully even if writing fails
    aims.write(output, output_filename)
Exemple #41
0
    def _create_truncated_file(self, filename, truncated_filename):
        ext = filename.split('.')[-1]
        tmp_file = None
        if truncated_filename.split('.')[-1] != ext:
            tmp_file = tempfile.mkstemp(suffix=ext, prefix='morpho')
            os.close(tmp_file[0])
            from soma import aims
            im = aims.read(filename)
            aims.write(im, tmp_file[1])
            filename = tmp_file[1]
            del im
        image_file = open(filename, "rb")
        image_piece = image_file.read(os.path.getsize(filename)/2)
        image_file.close()
        if tmp_file:
            os.unlink(tmp_file[1])
            del tmp_file

        path = os.path.dirname(truncated_filename)
        if not os.path.exists(path):
            os.makedirs(path)
        truncated_file = open(truncated_filename, "w")
        truncated_file.write(image_piece)
        truncated_file.close()
def correctVoxelLabels(vol, pathToVol, directory, keyWord, minSize, connectivity):
    """ This function takes a volume, finds all unique labels, and a minimum size of a connected component required
    It takes all the too small connected components, and re-classifies their voxels depending on the neighborhood
    ignoreLabel - is the label for background (do not need to re-classify these pixels
    
    take connectivity 6 : it will better clean the classification than 26.
    for correction of classification: use the connectivity 6 
    """
    arr = np.asarray(vol)
    uniqueLabels = np.unique(arr)
    totalN = 0
    volCheckLabelMode = aims.read(pathToVol)
    
    for i in uniqueLabels:
        print '----------------------------------work with unique label %s. Re-classify small CCs. -----------------' %(i)
        
        # threshold the volume at the current label
        subprocess.call(['AimsThreshold', '-i', pathToVol, '-o', directory + 'voronoi_Thr%s_%s.nii.gz' %(str(i), keyWord), '-m', 'eq', '-t', str(i), '-b', 'true', '--fg', str(i)])

        # find connected components
        subprocess.call(['AimsConnectComp', '-i', directory + 'voronoi_Thr%s_%s.nii.gz' %(str(i), keyWord), '-o', directory + 'voronoi_Thr%s_CC%s_%s.nii.gz' %(str(i), str(connectivity), keyWord), '-c', str(connectivity)])

        # find big connected components
        subprocess.call(['AimsConnectComp', '-i', directory + 'voronoi_Thr%s_%s.nii.gz' %(str(i), keyWord), '-o', directory + 'voronoi_Thr%s_CC%sBigger%s_%s.nii.gz' %(str(i), str(connectivity), str(minSize), keyWord),  '-c', str(connectivity), '-s', str(minSize), '--verbose'])

        volAllCC = aims.read(directory + 'voronoi_Thr%s_CC%s_%s.nii.gz' %(str(i), str(connectivity), keyWord))
        arrAllCC = np.asarray(volAllCC)
        arrAllCC[arrAllCC != 0] = 1     # set all included voxels to "1"
        volBigCC = aims.read(directory + 'voronoi_Thr%s_CC%sBigger%s_%s.nii.gz' %(str(i), str(connectivity), str(minSize), keyWord))
        arrBigCC = np.asarray(volBigCC)
        arrBigCC[arrBigCC != 0] = 1     # set all included voxels to "1"
        verticesDel = np.where(arrAllCC != arrBigCC)   # vertices that are labeled initially but those that are in too small CCs
        numberChanged = 0

        print 'for label ', i, '  the number of voxels to check is  ', len(verticesDel[0])
        for j in range(len(verticesDel[0])):
            x = verticesDel[0][j]
            y = verticesDel[1][j]
            z = verticesDel[2][j]
            
            listOfNeighbors = []
            listOfNeighbors.append(vol.value(x, y, z + 1))
            listOfNeighbors.append(vol.value(x, y, z - 1))
            listOfNeighbors.append(vol.value(x + 1, y, z))
            listOfNeighbors.append(vol.value(x - 1, y, z))
            listOfNeighbors.append(vol.value(x, y + 1, z))
            listOfNeighbors.append(vol.value(x, y - 1, z))
            oldL = vol.value(x, y, z)
            newL = np.argmax(np.bincount(listOfNeighbors))
            
            # TODO later?: check how many neighbours have the most frequent label. If there is the equal number of labels e.g. '0' and '11', then '0' is assigned. In this case maybe prefer to set '11'?
            #if len(mode(listOfNeighbors)) > 1:
                #lab1 = int(mode(listOfNeighbors)[0][0])
                #lab2 = int(mode(listOfNeighbors)[1][0])
                #labNew = int(str(lab2) + str(lab1))
                #print 'several labels with equal frequency: ', lab1, ' and ', lab2
                #volCheckLabelMode.setValue
            
            if newL != oldL:
                print 'component number ', j, x, y, z, listOfNeighbors,  ' old label: ', oldL, ' newLabel: ', newL
                # substitute the value of this voxel with the mode value of the neighboring voxels
                vol.setValue(int(newL), x, y, z)
                numberChanged += 1
        print 'numberChanged ', numberChanged, 'from : ', len(verticesDel[0])
        totalN += numberChanged
    
    print 'totalN changed: ', totalN
    return(vol)
# software, that may mean that it is complicated to manipulate, and that
# also therefore means that it is reserved for developers and experienced
# professionals having in-depth computer knowledge. Users are therefore
# encouraged to load and test the software's suitability as regards their
# requirements in conditions enabling the security of their systems and/or
# data to be ensured and, more generally, to use and operate it in the
# same conditions as regards security.
#
# The fact that you are presently reading this means that you have had
# knowledge of the CeCILL licence and that you accept its terms.

import sys
import numpy as np
from soma import aims

CSF_labels = aims.read("./heat_CSF_on_bulk.nii.gz")
white_labels = aims.read("./heat_white_on_bulk.nii.gz")


def relabel_conjunctions(labels1, labels2):
    output = aims.Volume(labels1)
    output.fill(0)
    size_x = output.getSizeX()
    size_y = output.getSizeY()
    size_z = output.getSizeZ()
    old_to_new_labels = {}
    next_label = 1
    for z in xrange(size_z):
        for y in xrange(size_y):
            for x in xrange(size_x):
                labels = (labels1.at(x, y, z), labels2.at(x, y, z))
import os
import numpy as np
from soma import aims
import sys
rl = sys.argv[1]

PRD = os.environ['PRD']
os.chdir(os.path.join(PRD, 'surface'))
a = aims.read(rl + '_mesh_low.mesh')
b = np.zeros((len(a.vertex().list()),3))
g = a.vertex().list()
for i in range(len(g)):
    b[i,:] = g[i][0:3] 
np.savetxt(rl + '_vertices_low.txt', b)

a = aims.read(rl + '_mesh_low.mesh')
b = np.zeros((len(a.polygon().list()),3))
g = a.polygon().list()
for i in range(len(g)):
    b[i,:] = g[i][0:3] 
np.savetxt(rl + '_triangles_low.txt', b)
    print >> sys.stderr, 'New: exit. No keyword for results was given'
    sys.exit(1)
else:
    keyWord = options.keyWord
    
# in the given directory create the subdirectory for the results
if not os.path.exists(result_directory):
    os.makedirs(result_directory)

print '############################################# starting od_heatMain.py #####################################################'

#subprocess.check_call(['AimsThreshold', '-b', '-m', 'di', '-t', '100', '-i', pathToClassifFile, '-o', result_directory + 'all_but_cortex_%s.nii' %(keyWord)])
#AimsThreshold -b -m di -t 100 \
    #-i ../classif.nii.gz \
    #-o ./all_but_cortex.nii
volClassif = aims.read(pathToClassifFile)
arrClassif = np.array(volClassif, copy = False)
arrClassif[arrClassif != 100] = 32767
arrClassif[arrClassif == 100] = 0
aims.write(volClassif, result_directory + 'all_but_cortex_%s.nii' %(keyWord))

# read in the classification file and convert it into float format    
#AimsFileConvert -t FLOAT \
    #-i ../classif.nii.gz \
    #-o heat.nii.gz
heatmap_before = aims.read(pathToClassifFile, 1)
c = aims.Converter(intype = heatmap_before, outtype = aims.Volume('FLOAT'))
heatmap_before = c(heatmap_before)
aims.write(heatmap_before, result_directory + 'heat_before_%s.nii.gz' %(keyWord))

statFileName = result_directory + "statFile_%s.txt" %(keyWord)
else:
    data_directory = options.data_directory
    result_directory = data_directory + '/dist/'
    
if options.keyWord is None:
    print >> sys.stderr, 'New: exit. No keyword for results was given'
    sys.exit(1)
else:
    keyWord = options.keyWord
    
# in the given directory create the subdirectory for the results
if not os.path.exists(result_directory):
    os.makedirs(result_directory)

print '####################################### starting od_dystmaps.py ##############################################'
classif = aims.read(pathToClassifFile)
#dist_from_white = highres_cortex.cortex_topo.fastmarching_negative(classif, [100], [200], 150)
dist_from_white = highres_cortex.cortex_topo.fastmarching_negative(classif, [100], [200], 150, False)

aims.write(dist_from_white, result_directory + 'distwhite_%s.nii.gz' %(keyWord))
print '####################################### done : dist_from_white ###############################################'

# need to visualize the distance map. Therefore get all negative values and set them to some positive values
volDistFromWhiteCopy = aims.read(result_directory + 'distwhite_%s.nii.gz' %(keyWord))
arrDistFromWhiteCopy = np.array(volDistFromWhiteCopy, copy = False)
arrDistFromWhiteCopy[arrDistFromWhiteCopy < 0] = np.max(arrDistFromWhiteCopy) + 5
aims.write(volDistFromWhiteCopy, result_directory + 'distwhiteVisu_%s.nii.gz' %(keyWord))

# now calculate distance to CSF
#dist_from_CSF = highres_cortex.cortex_topo.fastmarching_negative(classif, [100], [0], 50)
dist_from_CSF = highres_cortex.cortex_topo.fastmarching_negative(classif, [100], [0], 50, False)
    parser.add_option('-m', dest='mergedFile', help='mergedFile')   
    parser.add_option('-d', dest='resultDir', help='directory for results')
    parser.add_option('-k', dest='keyWord', help='keyword for results')

    options, args = parser.parse_args(sys.argv)
    print options
    print args

    if options.mergedFile is None:
        print >> sys.stderr, 'New: exit. no mergedFile given'
        sys.exit(1)
    else:
        mergedFile = options.mergedFile
             
    if options.resultDir is None:
        print >> sys.stderr, 'New: exit. no directory for results given'
        sys.exit(1)
    else:
        resultDir = options.resultDir    
        
    if options.keyWord is None:
        print >> sys.stderr, 'New: exit. no keyWord given'
        sys.exit(1)
    else:
        keyWord = options.keyWord      
    
    
    input_labels = aims.read(mergedFile)        
    output = relabel(input_labels)
    aims.write(output, resultDir + "merged_randomized_%s.nii.gz" %(keyWord))
Exemple #48
0
def relabel_files(input_filename, output_filename):
    input_vol = aims.read(input_filename)
    output_vol = relabel(input_vol)
    aims.write(output_vol, output_filename)
Exemple #49
0
    def test_single_surface_PFPS_ML(self):
        """PF estimation method : path sampling. ML on p(label|beta).
        topology from a surfacic RDI
        """
        # generate a field:
        beta = 0.4
        nbClasses = 2
        print 'generating potts ..., beta =', beta

        
        # grab surfacic data:
        from pyhrf.graph import graph_from_mesh, sub_graph, graph_is_sane
        from pyhrf.tools._io.tio import Texture
        from soma import aims
        print 'import done'
        roiId = 20
        mfn = pyhrf.get_data_file_name('right_hemisphere.mesh')
        print 'mesh file:', mfn
        mesh = aims.read(mfn)
        print 'mesh read'
        triangles = [t.arraydata() for t in mesh.polygon().list()]
        print 'building graph ... '
        wholeGraph = graph_from_mesh(triangles)
        
        roiMaskFile = pyhrf.get_data_file_name('roimask_gyrii_tuned.tex')
        roiMask = Texture.read(roiMaskFile).data.astype(int)
        mroi = np.where(roiMask==roiId)
        g, nm = sub_graph(wholeGraph, mroi[0])
        print "g:", len(g), len(g[0])

        nnodes = len(g)
        points = np.vstack([v.arraydata() for v in mesh.vertex().list()])
        weights = [[1./dist(points[j],points[k]) for k in g[j]]
                   for j in xrange(nnodes)]
        print "weights:", len(weights), len(weights[0])
        
        if 1:
            for j in xrange(nnodes):
                s = sum(weights[j]) * 1.
                for k in xrange(len(weights[j])):
                    weights[j][k] = weights[j][k]/s * len(weights[j])

        labels = genPotts(g, beta, nbClasses, weights=weights)
        print labels
        # partition function estimation
        gridLnz = Cpt_Vec_Estim_lnZ_Graph(g, nbClasses, 
                                          GraphWeight=weights)

        print 'gridLnz with weights:'
        print gridLnz

        # beta estimation
        be, pb = beta_estim_obs_field(g, labels, gridLnz, 'ML', weights)
        print 'betaML:', be

        weights = None
        gridLnz = Cpt_Vec_Estim_lnZ_Graph(g, nbClasses, 
                                          GraphWeight=weights)

        
        print 'gridLnz without weights:'
        print gridLnz

        # beta estimation
        be, pb = beta_estim_obs_field(g, labels, gridLnz, 'ML', weights)
        print 'betaML:', be

        gridPace = gridLnz[1][1] - gridLnz[1][0]
        assert abs(be-beta) <= gridPace
def fix_cortex_topology(input_classif, filling_size=2., fclosing=10.):
    """Fix the topology of a cortical segmentation.

    The topology of a hollow sphere is imposed onto a voxelwise segmentation of
    the cortex, which consists of the following labels:

    Label 0 (`CSF_LABEL`)
        Outside of the cortex, corresponding to the cerebrospinal fluid,
        defined in 26-connectivity.

    Label 100 (`CORTEX_LABEL`)
        The cortex itself, defined using 6-connectivity.

    Label 200 (`WHITE_LABEL`)
        Inside of the cortex, corresponds to the white matter,
        defined in 26-connectivity.


    Parameters
    ----------

    classif: aims.Volume
        The input voxelwise classification.

    filling_size: float
        The size, in millimetres, of the largest holes in either cortical
        boundary that will be filled. This must be kept smaller than the
        smallest cortical thickness in the image (see `Method` below for a more
        precise description of this parameter). The default value is 2 mm,
        which is appropriate for a human brain.

    fclosing: float
        The radius, in millimetres, of the morphological closing which is used
        by VipHomotopic in Cortical surface mode to retrieve the brain's outer
        envelope. The default value, 10 mm, is appropriate for a human brain.

    Returns
    -------

    The topology-corrected voxelwise classification is returned in an
    `aims.Volume_S16`.

    Raises
    ------

    OSError
        This function throws ``OSError`` if ``VipHomotopic`` cannot be found
        or executed.

    soma.subprocess.CalledProcessError
        This exception can occur if ``VipHomotopic``, which is in charge of the
        homotopic morphological operations, terminates with an error.


    Environment
    -----------

    This function needs the ``VipHomotopic`` command from the Morphologist
    image segmentation pipeline to reside in the ``PATH``. Note that the
    original ``VipHomotopic`` has hard-coded limits on the number of iterations
    for morphological operations, which may be exceeded when working on
    high-resolution (sub-millimetre) images.

    Input/output
    ------------

    The command ``VipHomotopic``, which is used to perform the homotopic
    morphological operations, reports progress on stdout/stderr.

    Images are passed to ``VipHomotopic`` using files under a temporary
    directory allocated with `tempfile.mkdtemp`.

    Method
    ------

    The topology correction is done in two main steps:

    1. A topologically spherical bounding box of the brain is computed and
    dilated towards the inside until it reaches the white matter. This
    retrieves a topologically correct object which fits the grey--white
    boundary.

    2. The previous object is eroded from the inside in the region where it
    overlaps with the cortex. This retrieves a topologically correct pial
    boundary.

    Each of these main steps is performed in two sub-steps: first the homotopic
    morpholological operation is performed until a boundary which is dilated by
    `filling_size`, then to the original boundary. This guides the front
    propagation, in order to prevent the formation of spurious strands.

    This method will change voxels from the cortex class to either the white
    matter or the CSF class, as needed to ensure the topology.

    Note that the output is not a deterministic function of the input, because
    the homotopic operations use a pseudo-random order for the front
    propagation.
    """
    fclosing = float(fclosing)
    assert fclosing >= 0
    filling_size = float(filling_size)
    assert filling_size >= 0

    # VipHomotopic only works with 16-bit signed integer voxels.
    conv = aims.ShallowConverter(intype=input_classif, outtype="Volume_S16")
    classif = conv(input_classif)

    tmp_classif = _prepare_classif_for_VipHomotopic_Cortical(classif,
                                                             filling_size)

    tmp_dir = None
    try:
        tmp_dir = tempfile.mkdtemp(prefix="highres-cortex.")
        aims.write(tmp_classif, os.path.join(tmp_dir, "tmp_classif.nii.gz"))
        del tmp_classif
        with open(os.path.join(tmp_dir, "fake.han"), "w") as f:
            f.write("sequence: unknown\n"
                    "gray: mean: 120 sigma: 10\n"
                    "white: mean: 433 sigma: 10\n")
        # VipHomotopic in Cortical surface mode retrieves a spherical
        # grey--white boundary by iteratively eroding the bounding box of the
        # cortex in a homotopic manner. It will proceed in two steps, first
        # stopping at STEP1_FRONT_BARRIER, and finally at WHITE_LABEL.
        subprocess.check_call(["VipHomotopic", "-mode", "C",
                               "-input", "tmp_classif.nii.gz",
                               "-classif", "tmp_classif.nii.gz",
                               "-hana", "fake.han",
                               "-fclosing", repr(fclosing),
                               "-output", "cortex.nii.gz"], cwd=tmp_dir)

        aims.write(classif, os.path.join(tmp_dir, "classif.nii.gz"))

        # First boundary to guide VipHomotopic (prevent leaking through holes
        # in sulci).
        aimsdata_classif = aims.AimsData_S16(classif, 1)
        # Restore the header (in particular the voxel_size), which may not have
        # been copied in the constructor because a border is requested.
        aimsdata_classif.header().update(classif.header())
        eroded = aimsalgo.AimsMorphoErosion(aimsdata_classif, filling_size)
        del classif, aimsdata_classif
        aims.write(eroded, os.path.join(tmp_dir, "eroded.nii.gz"))
        del eroded

        # The spherical grey--white boundary is dilated in a homotopic manner
        # until the border of eroded_classif is reached.
        subprocess.check_call(["VipHomotopic", "-mode", "H",
                               "-input", "eroded.nii.gz",
                               "-cortex", "cortex.nii.gz",
                               "-fclosing", "0",
                               "-output", "bigsulci.nii.gz"], cwd=tmp_dir)
        subprocess.check_call(["VipHomotopic", "-mode", "H",
                               "-input", "classif.nii.gz",
                               "-cortex", "bigsulci.nii.gz",
                               "-fclosing", "0",
                               "-output", "pial_surface.nii.gz"], cwd=tmp_dir)

        cortex = aims.read(os.path.join(tmp_dir, "cortex.nii.gz"))
        pial_surface = aims.read(os.path.join(tmp_dir, "pial_surface.nii.gz"))
    finally:
        shutil.rmtree(tmp_dir, ignore_errors=True)
    array_cortex = np.asarray(cortex)
    array_pial_surface = np.asarray(pial_surface)

    array_cortex[array_cortex == 0] = 200
    array_cortex[array_cortex == 255] = 100
    array_cortex[array_pial_surface != 0] = 0
    return cortex
Exemple #51
0
# software by the user in light of its specific status of scientific
# software, that may mean that it is complicated to manipulate, and that
# also therefore means that it is reserved for developers and experienced
# professionals having in-depth computer knowledge. Users are therefore
# encouraged to load and test the software's suitability as regards their
# requirements in conditions enabling the security of their systems and/or
# data to be ensured and, more generally, to use and operate it in the
# same conditions as regards security.
#
# The fact that you are presently reading this means that you have had
# knowledge of the CeCILL licence and that you accept its terms.

import numpy as np
from soma import aims

heatmap_volume = aims.read("heat.nii.gz")
header = heatmap_volume.header()
voxel_size_x, voxel_size_y, voxel_size_z = header["voxel_size"][:3]

heat = np.asarray(heatmap_volume)

class HalfcentredGradients:
    def __init__(self, array):
        self.ppp = array[1:, 1:, 1:]
        self.ppm = array[1:, 1:, :-1]
        self.pmp = array[1:, :-1, 1:]
        self.pmm = array[1:, :-1, :-1]
        self.mpp = array[:-1, 1:, 1:]
        self.mpm = array[:-1, 1:, :-1]
        self.mmp = array[:-1, :-1, 1:]
        self.mmm = array[:-1, :-1, :-1]
def randomize_labels_files(input_filename, output_filename):
    input_vol = aims.read(input_filename)
    output_vol = randomize_labels(input_vol)
    aims.write(output_vol, output_filename)
Exemple #53
0
# software by the user in light of its specific status of scientific
# software, that may mean that it is complicated to manipulate, and that
# also therefore means that it is reserved for developers and experienced
# professionals having in-depth computer knowledge. Users are therefore
# encouraged to load and test the software's suitability as regards their
# requirements in conditions enabling the security of their systems and/or
# data to be ensured and, more generally, to use and operate it in the
# same conditions as regards security.
#
# The fact that you are presently reading this means that you have had
# knowledge of the CeCILL licence and that you accept its terms.

import numpy as np
from soma import aims

input_labels = aims.read("./merged.nii")

def relabel(labels):
    output = aims.Volume(labels)
    size_x = output.getSizeX()
    size_y = output.getSizeY()
    size_z = output.getSizeZ()
    old_to_new_labels = {}
    next_label = 1
    for z in xrange(size_z):
        for y in xrange(size_y):
            for x in xrange(size_x):
                label = labels.at(x, y, z)
                if label == 0:
                    new_label = 0
                else:
import os
import numpy as np
from soma import aims
PRD = os.environ['PRD']
os.chdir(os.path.join(PRD, 'surface'))
a = aims.read('lh_mesh_low.mesh')
b = np.zeros((len(a.vertex().list()),3))
g = a.vertex().list()
for i in range(len(g)):
    b[i,:] = g[i][0:3] 
np.savetxt('lh_vertices_low.txt', b)

a = aims.read('lh_mesh_low.mesh')
b = np.zeros((len(a.polygon().list()),3))
g = a.polygon().list()
for i in range(len(g)):
    b[i,:] = g[i][0:3] 
np.savetxt('lh_triangles_low.txt', b)
# decided to take T1 segmentation, (: the given pathToClassifFile)
# 1. either cut out the regions of interest or not,     -> update the keyWord
# 2. eliminate sulci skeletons, -> update the keyWord
# 3. transform it and resample into T2 space
# 4. apply Yann's scripts
    
    
############################# 1. either cut out the regions of interest or not, update the keyWord ##############################
print 'cutOut is ', str(cutOut), 'type(cutOut) is ', type(cutOut)
if cutOut is True:
    print '###################################### cutOut ROIs ###################################################################'
    keyWord += '_cut'
    print 'updated keyWord is : ', keyWord
    # take the seeds from the texture and perform the Voronoi classification of the voxels
    print pathToClassifFile
    volGWBorder = aims.read(pathToClassifFile, 1)
    # find the path to the texture file
    if realSide == 'L':
        hemisphere = 'left'
    else:
        hemisphere = 'right'
        
    filesTex = glob.glob(pathToTextures + '%s/%s/' %(realPatientID, hemisphere) + 'subject[0-9]*_side[0-1]_texture.gii')
    if len(filesTex) != 1:
        # abort the calculation, as too many or not a single texture file was found
        print 'abort the calculation, as too many or not a single texture file was found'
        statFileName = data_directory + "statFile_%s" %(keyWord)

        # note if the processing was performed on laptop:
        if workOnLaptop:
            statFileName += '_laptop.txt'
    "s12636",
    "s12898",
    "s12081",
    "s12165",
    "s12207",
    "s12344",
    "s12352",
    "s12370",
    "s12381",
    "s12405",
    "s12414",
    "s12432",
]
# subjects = ['s12207']
for s in subjects:
    mesh = aims.read("/data/home/virgile/virgile_internship/%s/surf/lh.r.aims.white.normalized.mesh" % s)
    # anat = aims.read('/data/home/virgile/virgile_internship/s12069/mri/orig/001.nii')
    anat = aims.read(
        "/data/home/virgile/virgile_internship/s12069/experiments/smoothed_FWHM5/audio-video_z_map_smin5_theta3.3/leaves.nii"
    )

    z = array(anat.header()["transformations"][0]).reshape(4, 4)

    for i in range(len(mesh.vertex())):
        mesh.vertex()[i] = dot(z, hstack((mesh.vertex()[i], [1])))[:3]

    for p in mesh.polygon():
        p[0], p[2] = p[2], p[0]

    mesh.updateNormals()
    aims.write(mesh, "/data/home/virgile/virgile_internship/%s/surf/lh.r.white.normalized.gii" % s)
Exemple #57
0
 def _convert_data(old_name, new_name):
     print('converting:', old_name, 'to:', new_name)
     data = aims.read(old_name)
     aims.write(data, new_name)
    heat_directory = data_directory + 'heat/'

    
if options.keyWord is None:
    print >> sys.stderr, 'New: exit. No keyword for results was given'
    sys.exit(1)
else:
    keyWord = options.keyWord
    
# in the given directory create the subdirectory for the results
if not os.path.exists(result_directory):
    os.makedirs(result_directory)


#subprocess.check_call(['time', 'AimsThreshold', '-b', '--fg', '1', '-m', 'eq', '-t', '100', '-i', pathToClassifFile, '-o', result_directory + 'domain_%s.nii' %(keyWord)])
volClassif = aims.read(pathToClassifFile)
arrClassif = np.array(volClassif, copy = False) 
arrClassif[arrClassif != 100] = 0
arrClassif[arrClassif == 100] = 1
aims.write(volClassif, result_directory + 'domain_%s.nii' %(keyWord))
 
 
subprocess.check_call(['time', 'ylAdvectTubes', '--verbose', '--step', '0.05', '--domain', result_directory + 'domain_%s.nii' %(keyWord), '--grad-field', heat_directory + 'heat_%s.nii.gz' %(keyWord), '--divergence', heat_directory + 'heat_div_gradn_%s.nii.gz' %(keyWord), '--output-volumes', result_directory + 'white-tube-volumes_%s.nii.gz' % (keyWord), '--output-surfaces', result_directory + 'white-tube-surfaces_%s.nii.gz' % (keyWord)])
# time for the whole cortex : 6m48.759s

volWV = aims.read(result_directory + 'white-tube-volumes_%s.nii.gz' % (keyWord))
volWS = aims.read(result_directory + 'white-tube-surfaces_%s.nii.gz' % (keyWord))
volWVS = volWV / volWS
aims.write(volWVS, result_directory + 'white-tube-VoverS_%s.nii.gz' % (keyWord))
del volWS, volWVS
Exemple #59
0
# In this respect, the user's attention is drawn to the risks associated
# with loading, using, modifying and/or developing or reproducing the
# software by the user in light of its specific status of scientific
# software, that may mean that it is complicated to manipulate, and that
# also therefore means that it is reserved for developers and experienced
# professionals having in-depth computer knowledge. Users are therefore
# encouraged to load and test the software's suitability as regards their
# requirements in conditions enabling the security of their systems and/or
# data to be ensured and, more generally, to use and operate it in the
# same conditions as regards security.
#
# The fact that you are presently reading this means that you have had
# knowledge of the CeCILL licence and that you accept its terms.

import numpy as np
from soma import aims, aimsalgo

import highres_cortex.cortex_topo

classif = aims.read("../classif.nii.gz")

dist_from_white = highres_cortex.cortex_topo.fastmarching_negative(
    classif, [100], [200], 150)
aims.write(dist_from_white, "./distwhite.nii.gz")

dist_from_CSF = highres_cortex.cortex_topo.fastmarching_negative(
    classif, [100], [0], 50)
aims.write(dist_from_CSF, "./distCSF.nii.gz")

aims.write(classif, "./classif_with_outer_boundaries.nii.gz")
    keyWord = options.keyWord
    
if options.workOnT1inT2Space is not None:
    # need to change parameters for the heat equation calculation
    n_iter = 200 # 500
    time_step = 0.04
    n_iter2 = 10 # 100
    time_step2 = 0.001
   
# in the given directory create the subdirectory for the results
if not os.path.exists(result_directory):
    os.makedirs(result_directory)

print '############################################# starting od_heatMain_NEW.py #####################################################'

############# new version by Yann (July 2015):
#ylLaplacian --classif ../classif.nii.gz --output heat.nii.gz


subprocess.check_call(['ylLaplacian', '--classif', pathToClassifFile, '--output', result_directory + 'heat_%s.nii.gz' %(keyWord)])
volHeat = aims.read(result_directory + 'heat_%s.nii.gz' %(keyWord))

# Normalized gradient's divergence
vol_divGrad = highres_cortex.div_gradn.divergence_gradient(volHeat)
aims.write(vol_divGrad, result_directory + "heat_div_gradn_%s.nii.gz" %(keyWord))