Ejemplo n.º 1
0
def _test():
    """Tests for the Resampling Module"""
    import ClearMap.Alignment.Resampling as self
    reload(self)
    from ClearMap.Settings import ClearMapPath as basedir
    import iDISCO.IO.IO as io
    import os, numpy

    fn = os.path.join(
        basedir,
        'Test/Data/OME/16-17-27_0_8X-s3-20HF_UltraII_C00_xyz-Table Z\d{4}.ome.tif'
    )
    outfn = os.path.join(basedir, "Test/Data/Resampling/test.mhd")

    print "Making resampled stack " + outfn
    print "source datasize %s" % str(io.dataSize(fn))
    data = self.resampleData(fn,
                             sink=None,
                             resolutionSource=(1, 1, 1),
                             orientation=(1, 2, 3),
                             resolutionSink=(10, 10, 2))
    print data.shape
    io.writeData(outfn, data)

    data = self.resampleData(fn,
                             sink=None,
                             dataSizeSink=(50, 70, 10),
                             orientation=(1, 2, 3))
    print data.shape
    io.writeData(outfn, data)

    dataSizeSource, dataSizeSink, resolutionSource, resolutionSink = self.resampleDataSize(
        dataSizeSource=(100, 200, 303),
        dataSizeSink=None,
        resolutionSource=(1, 1, 1),
        resolutionSink=(5, 5, 5),
        orientation=(1, 2, 3))

    print dataSizeSource, dataSizeSink, resolutionSource, resolutionSink

    points = numpy.array([[0, 0, 0], [1, 1, 1],
                          io.dataSize(fn)])
    points = points.astype('float')
    pr = self.resamplePoints(points,
                             dataSizeSource=fn,
                             dataSizeSink=(50, 70, 10),
                             orientation=(1, 2, 3))
    print pr

    pri = self.resamplePointsInverse(pr,
                                     dataSizeSource=fn,
                                     dataSizeSink=(50, 70, 10),
                                     orientation=(-1, 2, 3))
    print pri

    result = self.resampleDataInverse(
        outfn,
        os.path.join(basedir, 'Test/Data/OME/resample_\d{4}.ome.tif'),
        dataSizeSource=fn)
    print result
Ejemplo n.º 2
0
def overlayLabel(dataSource,
                 labelSource,
                 sink=None,
                 alpha=False,
                 labelColorMap='jet',
                 x=all,
                 y=all,
                 z=all):
    """Overlay a gray scale image with colored labeled image
    
    Arguments:
        dataSouce (str or array): volumetric image data
        labelSource (str or array): labeled image to be overlayed on the image data
        sink (str or None): destination for the overlayed image
        alpha (float or False): transparency
        labelColorMap (str or object): color map for the labels
        x, y, z (all or tuple): sub-range specification
    
    Returns:
        (array or str): figure handle
        
    See Also:
        :func:`overlayPoints`
    """

    label = io.readData(labelSource, x=x, y=y, z=z)
    image = io.readData(dataSource, x=x, y=y, z=z)

    lmax = labelSource.max()

    if lmax <= 1:
        carray = np.array([[1, 0, 0, 1]])
    else:
        cm = mpl.cm.get_cmap(labelColorMap)
        cNorm = mpl.colors.Normalize(vmin=1, vmax=int(lmax))
        carray = mpl.cm.ScalarMappable(norm=cNorm, cmap=cm)
        carray = carray.to_rgba(np.arange(1, int(lmax + 1)))

    if alpha == False:
        carray = np.concatenate(([[0, 0, 0, 1]], carray), axis=0)
    else:
        carray = np.concatenate(([[1, 1, 1, 1]], carray), axis=0)

    cm = mpl.colors.ListedColormap(carray)
    carray = cm(label)
    carray = carray.take([0, 1, 2], axis=-1)

    if alpha == False:
        cimage = (label == 0) * image
        cimage = np.repeat(cimage, 3)
        cimage = cimage.reshape(image.shape + (3, ))
        cimage = cimage.astype(carray.dtype)
        cimage += carray
    else:
        cimage = np.repeat(image, 3)
        cimage = cimage.reshape(image.shape + (3, ))
        cimage = cimage.astype(carray.dtype)
        cimage *= carray

    return io.writeData(sink, cimage)
Ejemplo n.º 3
0
def overlayPoints(dataSource,
                  pointSource,
                  sink=None,
                  pointColor=[1, 0, 0],
                  x=all,
                  y=all,
                  z=all):
    """Overlay points on 3D data and return as color image
    
    Arguments:
        dataSouce (str or array): volumetric image data
        pointSource (str or array): point data to be overlayed on the image data
        pointColor (array): RGB color for the overlayed points
        x, y, z (all or tuple): sub-range specification
    
    Returns:
        (str or array): image overlayed with points
        
    See Also:
        :func:`overlayLabel`
    """
    data = io.readData(dataSource, x=x, y=y, z=z)
    points = io.readPoints(pointSource, x=x, y=y, z=z, shift=True)
    #print data.shape

    if not pointColor is None:
        dmax = data.max()
        dmin = data.min()
        if dmin == dmax:
            dmax = dmin + 1
        cimage = np.repeat((data - dmin) / (dmax - dmin), 3)
        cimage = cimage.reshape(data.shape + (3, ))

        if data.ndim == 2:
            for p in points:  # faster version using voxelize ?
                cimage[p[0], p[1], :] = pointColor
        elif data.ndim == 3:
            for p in points:  # faster version using voxelize ?
                cimage[p[0], p[1], p[2], :] = pointColor
        else:
            raise RuntimeError(
                'overlayPoints: data dimension %d not suported' % data.ndim)

    else:
        cimage = vox.voxelize(points, data.shape, method='Pixel')
        cimage = cimage.astype(data.dtype) * data.max()
        data.shape = data.shape + (1, )
        cimage.shape = cimage.shape + (1, )
        cimage = np.concatenate((data, cimage), axis=3)

    #print cimage.shape
    return io.writeData(sink, cimage)
Ejemplo n.º 4
0
def _test():
    """Tests for the Resampling Module"""
    import ClearMap.Alignment.Resampling as self
    reload(self)
    from ClearMap.Settings import ClearMapPath as basedir 
    import iDISCO.IO.IO as io
    import os, numpy

    fn = os.path.join(basedir, 'Test/Data/OME/16-17-27_0_8X-s3-20HF_UltraII_C00_xyz-Table Z\d{4}.ome.tif');
    outfn = os.path.join(basedir, "Test/Data/Resampling/test.mhd")
    
    print "Making resampled stack " + outfn
    print "source datasize %s" % str(io.dataSize(fn));
    data = self.resampleData(fn, sink = None, resolutionSource = (1,1,1), orientation = (1,2,3), resolutionSink = (10,10,2));
    print data.shape
    io.writeData(outfn, data)   

    data = self.resampleData(fn, sink = None, dataSizeSink = (50,70,10), orientation = (1,2,3));
    print data.shape
    io.writeData(outfn, data)   


    dataSizeSource, dataSizeSink, resolutionSource, resolutionSink = self.resampleDataSize(dataSizeSource = (100,200, 303), dataSizeSink = None, 
                                                                                      resolutionSource = (1,1,1), resolutionSink = (5,5,5), orientation = (1,2,3));

    print dataSizeSource, dataSizeSink, resolutionSource, resolutionSink
    

    points = numpy.array([[0,0,0], [1,1,1], io.dataSize(fn)]);
    points = points.astype('float')
    pr = self.resamplePoints(points, dataSizeSource = fn, dataSizeSink = (50,70,10), orientation = (1,2,3))
    print pr

    pri = self.resamplePointsInverse(pr, dataSizeSource = fn, dataSizeSink = (50,70,10), orientation = (-1,2,3))
    print pri


    result = self.resampleDataInverse(outfn, os.path.join(basedir, 'Test/Data/OME/resample_\d{4}.ome.tif'), dataSizeSource = fn);
    print result
Ejemplo n.º 5
0
def resampleXY(source,
               dataSizeSink,
               sink=None,
               interpolation='linear',
               out=sys.stdout,
               verbose=True):
    """Resample a 2d image slice
    
    This routine is used for resampling a large stack in parallel in xy or xz direction.
    
    Arguments:
        source (str or array): 2d image source
        dataSizeSink (tuple): size of the resmapled image
        sink (str or None): location for the resmapled image
        interpolation (str): interpolation method to use: 'linear' or None (nearest pixel)
        out (stdout): where to write progress information
        vebose (bool): write progress info if true
    
    Returns:
        array or str: resampled data or file name
    """

    #out.write("Input: %s Output: " % (inputFile, soutputFile))
    data = io.readData(source)
    dataSize = data.shape

    #print dataSize, dataSizeSink

    if data.ndim != 2:
        raise RuntimeError('resampleXY: expects 2d image source, found %dd' %
                           data.ndim)
    #print sagittalImageSize;

    #dataSizeSink = tuple([int(math.ceil(dataSize[i] *  resolutionSource[i]/resolutionSink[i])) for i in range(2)]);
    if verbose:
        out.write(("resampleData: Imagesize: %d, %d " %
                   (dataSize[0], dataSize[1])) +
                  ("Resampled Imagesize: %d, %d" %
                   (dataSizeSink[0], dataSizeSink[1])))
        #out.write(("resampleData: Imagesize: %d, %d " % dataSize) + ("Resampled Imagesize: %d, %d" % (outputSize[1], outputSize[0])))

    # note: cv2.resize reverses x-Y axes
    interpolation = fixInterpolation(interpolation)
    sinkData = cv2.resize(data, (dataSizeSink[1], dataSizeSink[0]),
                          interpolation=interpolation)
    #sinkData = cv2.resize(data,  outputSize);
    #sinkData = scipy.misc.imresize(sagittalImage, outputImageSize, interp = 'bilinear'); #normalizes images -> not usefull for stacks !

    #out.write("resampleData: resized Image size: %d, %d " % sinkData.shape)

    return io.writeData(sink, sinkData)
Ejemplo n.º 6
0
def resampleXY(source, dataSizeSink, sink = None, interpolation = 'linear', out = sys.stdout, verbose = True):
    """Resample a 2d image slice
    
    This routine is used for resampling a large stack in parallel in xy or xz direction.
    
    Arguments:
        source (str or array): 2d image source
        dataSizeSink (tuple): size of the resmapled image
        sink (str or None): location for the resmapled image
        interpolation (str): interpolation method to use: 'linear' or None (nearest pixel)
        out (stdout): where to write progress information
        vebose (bool): write progress info if true
    
    Returns:
        array or str: resampled data or file name
    """   
    
    #out.write("Input: %s Output: " % (inputFile, soutputFile))
    data = io.readData(source);
    dataSize = data.shape;
    
    #print dataSize, dataSizeSink    
    
    if data.ndim != 2:
        raise RuntimeError('resampleXY: expects 2d image source, found %dd' % data.ndim)
    #print sagittalImageSize;
    
    #dataSizeSink = tuple([int(math.ceil(dataSize[i] *  resolutionSource[i]/resolutionSink[i])) for i in range(2)]);
    if verbose:
        out.write(("resampleData: Imagesize: %d, %d " % (dataSize[0], dataSize[1])) + ("Resampled Imagesize: %d, %d" % (dataSizeSink[0], dataSizeSink[1])))
        #out.write(("resampleData: Imagesize: %d, %d " % dataSize) + ("Resampled Imagesize: %d, %d" % (outputSize[1], outputSize[0])))
    
    # note: cv2.resize reverses x-Y axes
    interpolation = fixInterpolation(interpolation)
    sinkData = cv2.resize(data,  (dataSizeSink[1], dataSizeSink[0]), interpolation = interpolation);
    #sinkData = cv2.resize(data,  outputSize);
    #sinkData = scipy.misc.imresize(sagittalImage, outputImageSize, interp = 'bilinear'); #normalizes images -> not usefull for stacks !
    
    #out.write("resampleData: resized Image size: %d, %d " % sinkData.shape)
    
    return io.writeData(sink, sinkData);
Ejemplo n.º 7
0
def sagittalToCoronalData(source, sink=None):
    """Change from saggital to coronal orientation
     
    Arguments:
        source (str or array): source data to be reoriented
        sink (str or None): destination for reoriented image
    
    Returns:
        str or array: reoriented data
    """

    source = io.readData(source)
    d = source.ndim
    if d < 3:
        raise RuntimeError('sagittalToCoronalData: 3d image required!')

    tp = range(d)
    tp[0:3] = [2, 0, 1]
    source = source.transpose(tp)
    source = source[::-1]
    #source = source[::-1,:,:];
    return io.writeData(sink, source)
Ejemplo n.º 8
0
def sagittalToCoronalData(source, sink = None):
    """Change from saggital to coronal orientation
     
    Arguments:
        source (str or array): source data to be reoriented
        sink (str or None): destination for reoriented image
    
    Returns:
        str or array: reoriented data
    """
      
    source = io.readData(source);
    d = source.ndim;
    if d < 3:
        raise RuntimeError('sagittalToCoronalData: 3d image required!');
    
    tp = range(d);
    tp[0:3] = [2,0,1];
    source = source.transpose(tp);
    source = source[::-1];
    #source = source[::-1,:,:];
    return io.writeData(sink, source);
Ejemplo n.º 9
0
def resampleDataInverse(sink,
                        source=None,
                        dataSizeSource=None,
                        orientation=None,
                        resolutionSource=(4.0625, 4.0625, 3),
                        resolutionSink=(25, 25, 25),
                        processingDirectory=None,
                        processes=1,
                        cleanup=True,
                        verbose=True,
                        interpolation='linear',
                        **args):
    """Resample data inversely to :func:`resampleData` routine
    
    Arguments:
        sink (str or None): image to be inversly resampled (=sink in :func:`resampleData`)
        source (str or array): destination for inversly resmapled image (=source in :func:`resampleData`)
        dataSizeSource (tuple or None): target size of the resampled image
        orientation (tuple): orientation specified by permuation and change in sign of (1,2,3)
        resolutionSource (tuple): resolution of the source image (in length per pixel)
        resolutionSink (tuple): resolution of the resampled image (in length per pixel)
        processingDirectory (str or None): directory in which to perform resmapling in parallel, None a temporary directry will be created
        processes (int): number of processes to use for parallel resampling
        cleanup (bool): remove temporary files
        verbose (bool): display progress information
        interpolation (str): method to use for interpolating to the resmapled image
    
    Returns:
        (array or str): data or file name of resampled image

    Notes: 
        * resolutions are assumed to be given for the axes of the intrinsic 
          orientation of the data and reference as when viewed by matplotlib or ImageJ
        * orientation: permuation of 1,2,3 with potential sign, indicating which 
          axes map onto the reference axes, a negative sign indicates reversal 
          of that particular axes
        * only a minimal set of information to detremine the resampling parameter 
          has to be given, e.g. dataSizeSource and dataSizeSink
    """

    #orientation
    orientation = fixOrientation(orientation)

    #assume we can read data fully into memory
    resampledData = io.readData(sink)

    dataSizeSink = resampledData.shape

    if isinstance(dataSizeSource, basestring):
        dataSizeSource = io.dataSize(dataSizeSource)

    dataSizeSource, dataSizeSink, resolutionSource, resolutionSink = resampleDataSize(
        dataSizeSource=dataSizeSource,
        dataSizeSink=dataSizeSink,
        resolutionSource=resolutionSource,
        resolutionSink=resolutionSink,
        orientation=orientation)

    #print (dataSizeSource, dataSizeSink, resolutionSource, resolutionSink )

    dataSizeSinkI = orientDataSizeInverse(dataSizeSink, orientation)

    #flip axes back and permute inversely
    if not orientation is None:
        if orientation[0] < 0:
            resampledData = resampledData[::-1, :, :]
        if orientation[1] < 0:
            resampledData = resampledData[:, ::-1, :]
        if orientation[2] < 0:
            resampledData = resampledData[:, :, ::-1]

        #reorient
        peri = inverseOrientation(orientation)
        peri = orientationToPermuation(peri)
        resampledData = resampledData.transpose(peri)

    # upscale in z
    interpolation = fixInterpolation(interpolation)

    resampledDataXY = numpy.zeros(
        (dataSizeSinkI[0], dataSizeSinkI[1], dataSizeSource[2]),
        dtype=resampledData.dtype)

    for i in range(dataSizeSinkI[0]):
        if verbose and i % 25 == 0:
            print "resampleDataInverse: processing %d/%d" % (i,
                                                             dataSizeSinkI[0])

        #cv2.resize takes reverse order of sizes !
        resampledDataXY[i, :, :] = cv2.resize(
            resampledData[i, :, :], (dataSizeSource[2], dataSizeSinkI[1]),
            interpolation=interpolation)

    # upscale x, y in parallel

    if io.isFileExpression(source):
        files = source
    else:
        if processingDirectory == None:
            processingDirectory = tempfile.mkdtemp()
        files = os.path.join(sink[0], 'resample_\d{4}.tif')

    io.writeData(files, resampledDataXY)

    nZ = dataSizeSource[2]
    pool = multiprocessing.Pool(processes=processes)
    argdata = []
    for i in range(nZ):
        argdata.append((source, fl.fileExpressionToFileName(files, i),
                        dataSizeSource, interpolation, i, nZ))
    pool.map(_resampleXYParallel, argdata)

    if io.isFileExpression(source):
        return source
    else:
        data = io.convertData(files, source)

        if cleanup:
            shutil.rmtree(processingDirectory)

        return data
Ejemplo n.º 10
0
def resampleData(source,
                 sink=None,
                 orientation=None,
                 dataSizeSink=None,
                 resolutionSource=(4.0625, 4.0625, 3),
                 resolutionSink=(25, 25, 25),
                 processingDirectory=None,
                 processes=1,
                 cleanup=True,
                 verbose=True,
                 interpolation='linear',
                 **args):
    """Resample data of source in resolution and orientation
    
    Arguments:
        source (str or array): image to be resampled
        sink (str or None): destination of resampled image
        orientation (tuple): orientation specified by permuation and change in sign of (1,2,3)
        dataSizeSink (tuple or None): target size of the resampled image
        resolutionSource (tuple): resolution of the source image (in length per pixel)
        resolutionSink (tuple): resolution of the resampled image (in length per pixel)
        processingDirectory (str or None): directory in which to perform resmapling in parallel, None a temporary directry will be created
        processes (int): number of processes to use for parallel resampling
        cleanup (bool): remove temporary files
        verbose (bool): display progress information
        interpolation (str): method to use for interpolating to the resmapled image
    
    Returns:
        (array or str): data or file name of resampled image

    Notes: 
        * resolutions are assumed to be given for the axes of the intrinsic 
          orientation of the data and reference as when viewed by matplotlib or ImageJ
        * orientation: permuation of 1,2,3 with potential sign, indicating which 
          axes map onto the reference axes, a negative sign indicates reversal 
          of that particular axes
        * only a minimal set of information to detremine the resampling parameter 
          has to be given, e.g. dataSizeSource and dataSizeSink
    """

    orientation = fixOrientation(orientation)

    if isinstance(dataSizeSink, basestring):
        dataSizeSink = io.dataSize(dataSizeSink)

    #orient actual resolutions onto reference resolution
    dataSizeSource = io.dataSize(source)

    dataSizeSource, dataSizeSink, resolutionSource, resolutionSink = resampleDataSize(
        dataSizeSource=dataSizeSource,
        dataSizeSink=dataSizeSink,
        resolutionSource=resolutionSource,
        resolutionSink=resolutionSink,
        orientation=orientation)

    dataSizeSinkI = orientDataSizeInverse(dataSizeSink, orientation)

    #print dataSizeSource, dataSizeSink, resolutionSource, resolutionSink, dataSizeSinkI

    #rescale in x y in parallel
    if processingDirectory == None:
        processingDirectory = tempfile.mkdtemp()

    interpolation = fixInterpolation(interpolation)

    nZ = dataSizeSource[2]
    pool = multiprocessing.Pool(processes=processes)
    argdata = []
    for i in range(nZ):
        argdata.append(
            (source, os.path.join(processingDirectory,
                                  'resample_%04d.tif' % i), dataSizeSinkI,
             interpolation, i, nZ, verbose))
        #print argdata[i]
    pool.map(_resampleXYParallel, argdata)

    #rescale in z
    fn = os.path.join(processingDirectory, 'resample_%04d.tif' % 0)
    data = io.readData(fn)
    zImage = numpy.zeros((dataSizeSinkI[0], dataSizeSinkI[1], nZ),
                         dtype=data.dtype)
    for i in range(nZ):
        if verbose and i % 10 == 0:
            print "resampleData; reading %d/%d" % (i, nZ)
        fn = os.path.join(processingDirectory, 'resample_%04d.tif' % i)
        zImage[:, :, i] = io.readData(fn)

    resampledData = numpy.zeros(dataSizeSinkI, dtype=zImage.dtype)

    for i in range(dataSizeSinkI[0]):
        if verbose and i % 25 == 0:
            print "resampleData: processing %d/%d" % (i, dataSizeSinkI[0])
        #resampledImage[:, iImage ,:] =  scipy.misc.imresize(zImage[:,iImage,:], [resizedZAxisSize, sagittalImageSize[1]] , interp = 'bilinear');
        #cv2.resize takes reverse order of sizes !
        resampledData[i, :, :] = cv2.resize(
            zImage[i, :, :], (dataSizeSinkI[2], dataSizeSinkI[1]),
            interpolation=interpolation)
        #resampledData[i ,:, :] =  cv2.resize(zImage[i,:, :], (dataSize[1], resizedZSize));

    #account for using (z,y,x) array representation -> (y,x,z)
    #resampledData = resampledData.transpose([1,2,0]);
    #resampledData = resampledData.transpose([2,1,0]);

    if cleanup:
        shutil.rmtree(processingDirectory)

    if not orientation is None:

        #reorient
        per = orientationToPermuation(orientation)
        resampledData = resampledData.transpose(per)

        #reverse orientation after permuting e.g. (-2,1) brings axis 2 to first axis and we can reorder there
        if orientation[0] < 0:
            resampledData = resampledData[::-1, :, :]
        if orientation[1] < 0:
            resampledData = resampledData[:, ::-1, :]
        if orientation[2] < 0:
            resampledData = resampledData[:, :, ::-1]

        #bring back from y,x,z to z,y,x
        #resampledImage = resampledImage.transpose([2,0,1]);
    if verbose:
        print "resampleData: resampled data size: " + str(resampledData.shape)

    if sink == []:
        if io.isFileExpression(source):
            sink = os.path.split(source)
            sink = os.path.join(sink[0], 'resample_\d{4}.tif')
        elif isinstance(source, basestring):
            sink = source + '_resample.tif'
        else:
            raise RuntimeError(
                'resampleData: automatic sink naming not supported for non string source!'
            )

    return io.writeData(sink, resampledData)
import ClearMap.Visualization.Plot as plt
import ClearMap.Analysis.Label as lbl
import numpy as np

sampleName = 'LowNIC'
#execfile('/d2/studies/ClearMap/IA_iDISCO/' + sampleName + '/parameter_file_' + sampleName + '.py')
execfile(
    '/d2/studies/ClearMap/Alex_Acute_iDISCO/3RB_HighNIC/parameter_file_template_3RB.py'
)
baseDirectory = '/d2/studies/ClearMap/Alex_Acute_iDISCO/NIC_HeatMaps/FiguresForPaper/'
region = 'MSC'

points = io.readPoints(TransformedCellsFile)
data = plt.overlayPoints(AnnotationFile, points.astype(int), pointColor=None)
io.writeData(
    os.path.join(BaseDirectory,
                 sampleName + '_Annotations_Points_Overlay_newAtlas2.tif'),
    data)
data = data[:, :, :, 1:]
io.writeData(
    os.path.join(BaseDirectory,
                 sampleName + '_Points_Transformed_newAtlas2.tif'), data)

label = io.readData(AnnotationFile)
label = label.astype('int32')
labelids = np.unique(label)

outside = np.zeros(label.shape, dtype=bool)
"""
In order to find out the level to use, in console input:
>>> lbl.labelAtLevel(r, n)
where r is region ID, and n is level (usually start at 5), if the output is not the
Ejemplo n.º 12
0
 

g1 = stat.readDataGroup(group1);
g2 = stat.readDataGroup(group2);



#Generated average and standard deviation maps
##############################################
g1a = numpy.mean(g1,axis = 0);
g1s = numpy.std(g1,axis = 0);

g2a = numpy.mean(g2,axis = 0);
g2s = numpy.std(g2,axis = 0);

io.writeData(os.path.join(baseDirectory, 'group1_mean.raw'), rsp.sagittalToCoronalData(g1a));
io.writeData(os.path.join(baseDirectory, 'group1_std.raw'), rsp.sagittalToCoronalData(g1s));

io.writeData(os.path.join(baseDirectory, 'group2_fast_mean.raw'), rsp.sagittalToCoronalData(g2a));
io.writeData(os.path.join(baseDirectory, 'group2_fast_std.raw'), rsp.sagittalToCoronalData(g2s));





#Generate the p-values map
##########################
#pcutoff: only display pixels below this level of significance
pvals, psign = stat.tTestVoxelization(g1.astype('float'), g2.astype('float'), signed = True, pcutoff = 0.05);

#color the p-values according to their sign (defined by the sign of the difference of the means between the 2 groups)
Ejemplo n.º 13
0
    '/home/yourname/experiment/sample7/cells_heatmap.tif',
    '/home/yourname/experiment/sample8/cells_heatmap.tif'
]

g1 = stat.readDataGroup(group1)
g2 = stat.readDataGroup(group2)

#Generated average and standard deviation maps
##############################################
g1a = numpy.mean(g1, axis=0)
g1s = numpy.std(g1, axis=0)

g2a = numpy.mean(g2, axis=0)
g2s = numpy.std(g2, axis=0)

io.writeData(os.path.join(baseDirectory, 'group1_mean.raw'),
             rsp.sagittalToCoronalData(g1a))
io.writeData(os.path.join(baseDirectory, 'group1_std.raw'),
             rsp.sagittalToCoronalData(g1s))

io.writeData(os.path.join(baseDirectory, 'group2_fast_mean.raw'),
             rsp.sagittalToCoronalData(g2a))
io.writeData(os.path.join(baseDirectory, 'group2_fast_std.raw'),
             rsp.sagittalToCoronalData(g2s))

#Generate the p-values map
##########################
#pcutoff: only display pixels below this level of significance
pvals, psign = stat.tTestVoxelization(g1.astype('float'),
                                      g2.astype('float'),
                                      signed=True,
                                      pcutoff=0.05)
Ejemplo n.º 14
0
    os.path.join(baseDirectory, 'cells_heatmap_vox15_ROC17.tif'),
    os.path.join(baseDirectory, 'cells_heatmap_vox15_ROC18.tif'),
]

g1 = stat.readDataGroup(group1)
g2 = stat.readDataGroup(group2)

#Generated average and standard deviation maps
##############################################
g1a = numpy.mean(g1, axis=0)
g1s = numpy.std(g1, axis=0)

g2a = numpy.mean(g2, axis=0)
g2s = numpy.std(g2, axis=0)

io.writeData(os.path.join(baseDirectory, 'Group1_full_mean_horizontal.mhd'),
             g1a)  # rsp.sagittalToCoronalData(g1a));
io.writeData(os.path.join(baseDirectory, 'Group1_full_std_horizontal.mhd'),
             g1s)  # rsp.sagittalToCoronalData(g1s));

io.writeData(os.path.join(baseDirectory, 'Group2_full_mean_horizontal.mhd'),
             g2a)  # rsp.sagittalToCoronalData(g2a));
io.writeData(os.path.join(baseDirectory, 'Group2_full_std_horizontal.mhd'),
             g2s)  # rsp.sagittalToCoronalData(g2s));

#Generate the p-values map
##########################
#pcutoff: only display pixels below this level of significance
pvals, psign = stat.tTestVoxelization(g1.astype('float'),
                                      g2.astype('float'),
                                      signed=True,
                                      pcutoff=0.05)
Ejemplo n.º 15
0
    batchList.append(paramFile)
    execfile(paramFile)

    baseDirectory = BaseDirectory

    #baseDirectory = '/d2/studies/ClearMap/IA_iDISCO/IA1_RB/'
    sampleName = mouse
    region = 'Caudoputamen'

    points = io.readPoints(TransformedCellsFile)
    data = plt.overlayPoints(AnnotationFile,
                             points.astype(int),
                             pointColor=None)
    data = data[:, :, :, 1:]
    io.writeData(
        os.path.join(BaseDirectory, sampleName + '_Points_Transformed.tif'),
        data)

    #If you are using the same annotation file for every sample, comment out lines 45-54 to drastically reduce run time
    label = io.readData(AnnotationFile)
    label = label.astype('int32')
    labelids = np.unique(label)

    outside = np.zeros(label.shape, dtype=bool)

    for l in labelids:
        if not (lbl.labelAtLevel(l, 6) == 672):
            outside = np.logical_or(outside, label == l)

#DP = 814 (level 6)
#MHb = 483 (level 7)
#DP = 814 (level 6)
#MHb = 483 (level 7)
#Caudoputamen = 672 (level 6)
#Accumbens = 56
#CA3 = 463 (Level 8)
#Prelimbic = 972 (Level 6)
#Endopiriform = 942 (Level 5)

heatmap = np.load(
    '/d2/studies/ClearMap/IA_iDISCO/IA2_LT/cells_transformed_to_Atlas.npy')
#heatmap = io.readData(os.path.join(baseDirectory, 'pvalues_full_vox15.tif'))
#heatmap = io.readData('/d2/studies/ClearMap/FosTRAP_ChR2/F2_LT/cells_heatmap_F2_LT_TrailMap_Reg3.tif')
heatmap[outside] = 0

io.writeData(
    os.path.join(baseDirectory, sampleName + '_' + region + '_isolated.tif'),
    heatmap)

samples = [
    'IA1_RT', 'IA1_RB', 'IA1_LT', 'IA1_LB', 'IA2_NP', 'IA2_RT', 'IA2_RB',
    'IA2_LT', 'IA2_LB'
]

for mouse in samples:
    baseDirectory = '/d2/studies/ClearMap/IA_iDISCO/' + mouse
    heatmap = io.readData(
        os.path.join(baseDirectory, 'cells_heatmap_vox15.tif'))
    if not os.path.exists(heatmap):
        raise ValueError('Data does not exist. Check naming convention')

for mouse in samples:
Ejemplo n.º 17
0
def _test():
    #%%
    import numpy as np
    import scipy.ndimage as ndi
    import ClearMap.DataProcessing.LargeData as ld
    import ClearMap.Visualization.Plot3d as p3d
    import ClearMap.DataProcessing.ConvolvePointList as cpl
    import ClearMap.ImageProcessing.Skeletonization.Topology3d as t3d
    import ClearMap.ImageProcessing.Skeletonization.SkeletonCleanUp as scu

    import ClearMap.ImageProcessing.Tracing.Connect as con
    reload(con)

    data = np.load('/home/ckirst/Desktop/data.npy')
    binary = np.load('/home/ckirst/Desktop/binarized.npy')
    skel = np.load('/home/ckirst/Desktop/skel.npy')
    #points = np.load('/home/ckirst/Desktop/pts.npy');

    data = np.copy(data, order='F')
    binary = np.copy(binary, order='F')
    skel = np.copy(skel, order='F')
    skel_copy = np.copy(skel, order='F')
    points = np.ravel_multi_index(np.where(skel), skel.shape, order='F')

    skel, points = scu.cleanOpenBranches(skel,
                                         skel_copy,
                                         points,
                                         length=3,
                                         clean=True)
    deg = cpl.convolve3DIndex(skel, t3d.n26, points)

    ends, isolated = con.findEndpoints(skel, points, border=25)
    special = np.sort(np.hstack([ends, isolated]))

    ends_xyz = np.array(np.unravel_index(ends, data.shape, order='F')).T
    isolated_xyz = np.array(np.unravel_index(isolated, data.shape,
                                             order='F')).T
    special_xyz = np.vstack([ends_xyz, isolated_xyz])

    #%%
    import ClearMap.ParallelProcessing.SharedMemoryManager as smm
    data_s = smm.asShared(data, order='F')
    binary_s = smm.asShared(binary.view('uint8'), order='F')
    skel_s = smm.asShared(skel.view('uint8'), order='F')

    smm.clean()
    res = con.addConnections(data_s,
                             binary_s,
                             skel_s,
                             points,
                             radius=20,
                             start_points=None,
                             add_to_skeleton=True,
                             add_to_mask=True,
                             verbose=True,
                             processes=4,
                             debug=False,
                             block_size=10)

    skel_s = skel_s.view(bool)
    binary_s = binary_s.view(bool)

    #%%
    mask_img = np.asarray(binary, dtype=int, order='A')
    mask_img[:] = mask_img + binary_s
    mask_img[:] = mask_img + skel

    data_img = np.copy(data, order='A')
    data_img[skel] = 120

    mask_img_f = np.reshape(mask_img, -1, order='A')
    data_img_f = np.reshape(data_img, -1, order='A')

    mask_img_f[res] = 7
    data_img_f[res] = 512

    mask_img_f[special] = 8
    data_img_f[special] = 150

    for d in [3, 4, 5]:
        mask_img_f[points[deg == d]] = d + 1

    try:
        con.viewer[0].setSource(mask_img)
        con.viewer[1].setSource(data_img)
    except:
        con.viewer = p3d.plot([mask_img, data_img])

    con.viewer[0].setMinMax([0, 8])
    con.viewer[1].setMinMax([24, 160])

    #%%
    mask = binary
    data_new = np.copy(data, order='A')
    data_new[skel] = 120

    skel_new = np.asarray(skel, dtype=int, order='A')
    skel_new[:] = skel_new + binary

    binary_new = np.copy(binary, order='A')
    qs = []
    for i, e in enumerate(special):
        print('------')
        print('%d / %d' % (i, len(special)))
        path, quality = con.connectPoint(data,
                                         mask,
                                         special,
                                         i,
                                         radius=25,
                                         skeleton=skel,
                                         tubeness=None,
                                         remove_local_mask=True,
                                         min_quality=15.0,
                                         verbose=True,
                                         maxSteps=15000,
                                         costPerDistance=1.0)

        #print path, quality
        if len(path) > 0:
            qs.append(quality * 1.0 / len(path))

            q = con.addPathToMask(skel_new, path, value=7)
            q = con.addPathToMask(data_new, path, value=512)
            binary_new = con.addDilatedPathToMask(binary_new,
                                                  path,
                                                  iterations=1)

    skel_new[:] = skel_new + binary_new
    q = con.addPathToMask(skel_new, special_xyz, value=6)
    for d in [3, 4, 5]:
        xyz = np.array(
            np.unravel_index(points[deg == d], data.shape, order='F')).T
        q = con.addPathToMask(skel_new, xyz, value=d)
    q = con.addPathToMask(data_new, special_xyz, value=150)

    try:
        con.viewer[0].setSource(skel_new)
        con.viewer[1].setSource(data_new)
    except:
        con.viewer = p3d.plot([skel_new, data_new])

    con.viewer[0].setMinMax([0, 8])
    con.viewer[1].setMinMax([24, 160])

    #%%
    import matplotlib.pyplot as plt
    plt.figure(1)
    plt.clf()
    #plt.plot(qs);
    plt.hist(qs)

    #%%
    i = 20
    i = 21
    i = 30
    i = 40
    r = 25
    center = np.unravel_index(ends[i], data.shape)
    print(center, data.shape)
    mask = binary
    path = con.tracePointToMask(data,
                                mask,
                                center,
                                radius=r,
                                points=special_xyz,
                                plot=True,
                                skel=skel,
                                binary=binary,
                                tubeness=None,
                                removeLocalMask=True,
                                maxSteps=None,
                                verbose=False,
                                costPerDistance=0.0)

    #%%

    nbs = ap.findNeighbours(ends, i, skel.shape, skel.strides, r)
    center = np.unravel_index(ends[i], skel.shape)

    nbs_xyz = np.array(np.unravel_index(nbs, skel.shape)).T
    dists = nbs_xyz - center
    dists = np.sum(dists * dists, axis=1)

    nb = np.argmin(dists)

    center = np.unravel_index(ends[i], data.shape)
    print(center, data.shape)
    mask = binary
    path = con.tracePointToNeighbor(data,
                                    mask,
                                    center,
                                    nbs_xyz[nb],
                                    radius=r,
                                    points=special_xyz,
                                    plot=True,
                                    skel=skel,
                                    binary=binary,
                                    tubeness=None,
                                    removeLocalMask=True,
                                    maxSteps=None,
                                    verbose=False,
                                    costPerDistance=0.0)

    #%%

    import ClearMap.ImageProcessing.Filter.FilterKernel as fkr
    dog = fkr.filterKernel('DoG', size=(13, 13, 13))
    dv.plot(dog)

    data_filter = ndi.correlate(np.asarray(data, dtype=float), dog)
    data_filter -= data_filter.min()
    data_filter = data_filter / 3.0
    #dv.dualPlot(data, data_filter);

    #%%add all paths
    reload(con)

    r = 25
    mask = binary
    data_new = data.copy()
    data_new[skel] = 120

    skel_new = np.asarray(skel, dtype=int)
    skel_new = skel_new + binary

    binary_new = binary.copy()

    for i, e in enumerate(special):
        center = np.unravel_index(e, data.shape)

        print(i, e, center)
        path = con.tracePointToMask(data,
                                    mask,
                                    center,
                                    radius=r,
                                    points=special_xyz,
                                    plot=False,
                                    skel=skel,
                                    binary=binary,
                                    tubeness=None,
                                    removeLocalMask=True,
                                    maxSteps=15000,
                                    costPerDistance=1.0)

        q = con.addPathToMask(skel_new, path, value=7)
        q = con.addPathToMask(data_new, path, value=512)
        binary_new = con.addDilatedPathToMask(binary_new, path, iterations=1)

    q = con.addPathToMask(skel_new, special_xyz, value=6)
    for d in [3, 4, 5]:
        xyz = np.array(np.unravel_index(points[deg == d], data.shape)).T
        q = con.addPathToMask(skel_new, xyz, value=d)
    q = con.addPathToMask(data_new, special_xyz, value=150)

    skel_new = skel_new + binary_new
    try:
        con.viewer[0].setSource(skel_new)
        con.viewer[1].setSource(data_new)
    except:
        con.viewer = dv.dualPlot(skel_new, data_new)

    con.viewer[0].setMinMax([0, 8])
    con.viewer[1].setMinMax([24, 160])

    #%%

    import ClearMap.ImageProcessing.Skeletonization.Skeletonize as skl

    skel_2 = skl.skeletonize3D(binary_new.copy())

    #%%

    np.save('/home/ckirst/Desktop/binarized_con.npy', binary_new)
    #%%

    # write image

    import ClearMap.IO.IO as io

    #r = np.asarray(128 * binary_new, dtype = 'uint8');
    #g = r.copy(); b = r.copy();
    #r[:] = r + 127 * skel_2[0];
    #g[:] = g - 128 * skel_2[0];
    #b[:] = b - 128 * skel_2[0];
    #img = np.stack((r,g,b), axis = 3)

    img = np.asarray(128 * binary_new, dtype='uint8')
    img[:] = img + 127 * skel_2[0]

    io.writeData('/home/ckirst/Desktop/3d.tif', img)
Ejemplo n.º 18
0
def resampleDataInverse(sink, source = None, dataSizeSource = None, orientation = None, resolutionSource = (4.0625, 4.0625, 3), resolutionSink = (25, 25, 25), 
                        processingDirectory = None, processes = 1, cleanup = True, verbose = True, interpolation = 'linear', **args):
    """Resample data inversely to :func:`resampleData` routine
    
    Arguments:
        sink (str or None): image to be inversly resampled (=sink in :func:`resampleData`)
        source (str or array): destination for inversly resmapled image (=source in :func:`resampleData`)
        dataSizeSource (tuple or None): target size of the resampled image
        orientation (tuple): orientation specified by permuation and change in sign of (1,2,3)
        resolutionSource (tuple): resolution of the source image (in length per pixel)
        resolutionSink (tuple): resolution of the resampled image (in length per pixel)
        processingDirectory (str or None): directory in which to perform resmapling in parallel, None a temporary directry will be created
        processes (int): number of processes to use for parallel resampling
        cleanup (bool): remove temporary files
        verbose (bool): display progress information
        interpolation (str): method to use for interpolating to the resmapled image
    
    Returns:
        (array or str): data or file name of resampled image

    Notes: 
        * resolutions are assumed to be given for the axes of the intrinsic 
          orientation of the data and reference as when viewed by matplotlib or ImageJ
        * orientation: permuation of 1,2,3 with potential sign, indicating which 
          axes map onto the reference axes, a negative sign indicates reversal 
          of that particular axes
        * only a minimal set of information to detremine the resampling parameter 
          has to be given, e.g. dataSizeSource and dataSizeSink
    """    
    
    
    #orientation
    orientation = fixOrientation(orientation);
    
    #assume we can read data fully into memory
    resampledData = io.readData(sink);

    dataSizeSink = resampledData.shape;
    
    if isinstance(dataSizeSource, basestring):
        dataSizeSource = io.dataSize(dataSizeSource);

    dataSizeSource, dataSizeSink, resolutionSource, resolutionSink = resampleDataSize(dataSizeSource = dataSizeSource, dataSizeSink = dataSizeSink, 
                                                                                      resolutionSource = resolutionSource, resolutionSink = resolutionSink, orientation = orientation);

    #print (dataSizeSource, dataSizeSink, resolutionSource, resolutionSink )
    
    dataSizeSinkI = orientDataSizeInverse(dataSizeSink, orientation);
    
    
    #flip axes back and permute inversely
    if not orientation is None:
        if orientation[0] < 0:
            resampledData = resampledData[::-1, :, :];
        if orientation[1] < 0:
            resampledData = resampledData[:, ::-1, :]; 
        if orientation[2] < 0:
            resampledData = resampledData[:, :, ::-1];

        
        #reorient
        peri = inverseOrientation(orientation);
        peri = orientationToPermuation(peri);
        resampledData = resampledData.transpose(peri);
    
    # upscale in z
    interpolation = fixInterpolation(interpolation);
    
    resampledDataXY = numpy.zeros((dataSizeSinkI[0], dataSizeSinkI[1], dataSizeSource[2]), dtype = resampledData.dtype);    
    
    for i in range(dataSizeSinkI[0]):
        if verbose and i % 25 == 0:
            print "resampleDataInverse: processing %d/%d" % (i, dataSizeSinkI[0])

        #cv2.resize takes reverse order of sizes !
        resampledDataXY[i ,:, :] =  cv2.resize(resampledData[i,:,:], (dataSizeSource[2], dataSizeSinkI[1]), interpolation = interpolation);

    # upscale x, y in parallel
    
    if io.isFileExpression(source):
        files = source;
    else:
        if processingDirectory == None:
            processingDirectory = tempfile.mkdtemp();   
        files = os.path.join(sink[0], 'resample_\d{4}.tif');
    
    io.writeData(files, resampledDataXY);
    
    nZ = dataSizeSource[2];
    pool = multiprocessing.Pool(processes=processes);
    argdata = [];
    for i in range(nZ):
        argdata.append( (source, fl.fileExpressionToFileName(files, i), dataSizeSource, interpolation, i, nZ) );  
    pool.map(_resampleXYParallel, argdata);
    
    if io.isFileExpression(source):
        return source;
    else:
        data = io.convertData(files, source);
        
        if cleanup:
            shutil.rmtree(processingDirectory);
        
        return data;
Ejemplo n.º 19
0
def resampleData(source, sink = None,  orientation = None, dataSizeSink = None, resolutionSource = (4.0625, 4.0625, 3), resolutionSink = (25, 25, 25), 
                 processingDirectory = None, processes = 1, cleanup = True, verbose = True, interpolation = 'linear', **args):
    """Resample data of source in resolution and orientation
    
    Arguments:
        source (str or array): image to be resampled
        sink (str or None): destination of resampled image
        orientation (tuple): orientation specified by permuation and change in sign of (1,2,3)
        dataSizeSink (tuple or None): target size of the resampled image
        resolutionSource (tuple): resolution of the source image (in length per pixel)
        resolutionSink (tuple): resolution of the resampled image (in length per pixel)
        processingDirectory (str or None): directory in which to perform resmapling in parallel, None a temporary directry will be created
        processes (int): number of processes to use for parallel resampling
        cleanup (bool): remove temporary files
        verbose (bool): display progress information
        interpolation (str): method to use for interpolating to the resmapled image
    
    Returns:
        (array or str): data or file name of resampled image

    Notes: 
        * resolutions are assumed to be given for the axes of the intrinsic 
          orientation of the data and reference as when viewed by matplotlib or ImageJ
        * orientation: permuation of 1,2,3 with potential sign, indicating which 
          axes map onto the reference axes, a negative sign indicates reversal 
          of that particular axes
        * only a minimal set of information to detremine the resampling parameter 
          has to be given, e.g. dataSizeSource and dataSizeSink
    """
        
    orientation = fixOrientation(orientation);
    
    if isinstance(dataSizeSink, basestring):
        dataSizeSink = io.dataSize(dataSizeSink);

    #orient actual resolutions onto reference resolution    
    dataSizeSource = io.dataSize(source);
        
    dataSizeSource, dataSizeSink, resolutionSource, resolutionSink = resampleDataSize(dataSizeSource = dataSizeSource, dataSizeSink = dataSizeSink, 
                                                                                      resolutionSource = resolutionSource, resolutionSink = resolutionSink, orientation = orientation);
    
    dataSizeSinkI = orientDataSizeInverse(dataSizeSink, orientation);
    
    #print dataSizeSource, dataSizeSink, resolutionSource, resolutionSink, dataSizeSinkI
    
     
    #rescale in x y in parallel
    if processingDirectory == None:
        processingDirectory = tempfile.mkdtemp();     
        
    interpolation = fixInterpolation(interpolation);
     
    nZ = dataSizeSource[2];
    pool = multiprocessing.Pool(processes=processes);
    argdata = [];
    for i in range(nZ):
        argdata.append( (source, os.path.join(processingDirectory, 'resample_%04d.tif' % i), dataSizeSinkI, interpolation, i, nZ, verbose) );  
        #print argdata[i]
    pool.map(_resampleXYParallel, argdata);
    
    #rescale in z
    fn = os.path.join(processingDirectory, 'resample_%04d.tif' % 0);
    data = io.readData(fn);
    zImage = numpy.zeros((dataSizeSinkI[0], dataSizeSinkI[1], nZ), dtype = data.dtype);    
    for i in range(nZ):
        if verbose and i % 10 == 0:
            print "resampleData; reading %d/%d" % (i, nZ);
        fn = os.path.join(processingDirectory, 'resample_%04d.tif' % i);
        zImage[:,:, i] = io.readData(fn);

    
    resampledData = numpy.zeros(dataSizeSinkI, dtype = zImage.dtype);

    for i in range(dataSizeSinkI[0]):
        if verbose and i % 25 == 0:
            print "resampleData: processing %d/%d" % (i, dataSizeSinkI[0])
        #resampledImage[:, iImage ,:] =  scipy.misc.imresize(zImage[:,iImage,:], [resizedZAxisSize, sagittalImageSize[1]] , interp = 'bilinear'); 
        #cv2.resize takes reverse order of sizes !
        resampledData[i ,:, :] =  cv2.resize(zImage[i,:,:], (dataSizeSinkI[2], dataSizeSinkI[1]), interpolation = interpolation);
        #resampledData[i ,:, :] =  cv2.resize(zImage[i,:, :], (dataSize[1], resizedZSize));
    

    #account for using (z,y,x) array representation -> (y,x,z)
    #resampledData = resampledData.transpose([1,2,0]);
    #resampledData = resampledData.transpose([2,1,0]);
    
    if cleanup:
        shutil.rmtree(processingDirectory);

    if not orientation is None:
        
        #reorient
        per = orientationToPermuation(orientation);
        resampledData = resampledData.transpose(per);
    
        #reverse orientation after permuting e.g. (-2,1) brings axis 2 to first axis and we can reorder there
        if orientation[0] < 0:
            resampledData = resampledData[::-1, :, :];
        if orientation[1] < 0:
            resampledData = resampledData[:, ::-1, :]; 
        if orientation[2] < 0:
            resampledData = resampledData[:, :, ::-1];
        
        #bring back from y,x,z to z,y,x
        #resampledImage = resampledImage.transpose([2,0,1]);
    if verbose:
        print "resampleData: resampled data size: " + str(resampledData.shape)  
    
    if sink == []:
        if io.isFileExpression(source):
            sink = os.path.split(source);
            sink = os.path.join(sink[0], 'resample_\d{4}.tif');
        elif isinstance(source, basestring):
            sink = source + '_resample.tif';
        else:
            raise RuntimeError('resampleData: automatic sink naming not supported for non string source!');
    
    return io.writeData(sink, resampledData);
def generate_p_value_maps(src):
    """ 
    generates p-value maps as per ClearMap/analysis.py
    #TODO: generalise function
    """
    #Load the data (heat maps generated previously )
    #make groups
    groupA = [
        os.path.join(flds, fld) for fld in os.listdir(flds)
        if conditions[os.path.basename(fld)] == "homecage_control"
    ]
    groupA.sort()
    groupB = [
        os.path.join(flds, fld) for fld in os.listdir(flds)
        if conditions[os.path.basename(fld)] == "CNO_control_no_reversal"
    ]
    groupB.sort()
    groupC = [
        os.path.join(flds, fld) for fld in os.listdir(flds)
        if conditions[os.path.basename(fld)] == "CNO_control_reversal"
    ]
    groupC.sort()
    groupD = [
        os.path.join(flds, fld) for fld in os.listdir(flds)
        if conditions[os.path.basename(fld)] == "DREADDs"
    ]
    groupC.sort()

    group_a = [xx + "/cells_heatmap_60um_erosion.tif" for xx in groupA]
    group_b = [xx + "/cells_heatmap_60um_erosion.tif" for xx in groupB]
    group_c = [xx + "/cells_heatmap_60um_erosion.tif" for xx in groupC]
    group_d = [xx + "/cells_heatmap_60um_erosion.tif" for xx in groupD]

    grp_a = stat.readDataGroup(group_a)
    grp_b = stat.readDataGroup(group_b)
    grp_c = stat.readDataGroup(group_c)
    grp_d = stat.readDataGroup(group_d)

    #Generated average and standard deviation maps
    ##############################################
    grp_aa = np.mean(grp_a, axis=0)
    grp_as = np.std(grp_a, axis=0)

    grp_ba = np.mean(grp_b, axis=0)
    grp_bs = np.std(grp_b, axis=0)

    grp_ca = np.mean(grp_c, axis=0)
    grp_cs = np.std(grp_c, axis=0)

    grp_da = np.mean(grp_d, axis=0)
    grp_ds = np.std(grp_d, axis=0)

    io.writeData(os.path.join(src, "group_a_mean.raw"),
                 rsp.sagittalToCoronalData(grp_aa))
    io.writeData(os.path.join(src, "group_a_std.raw"),
                 rsp.sagittalToCoronalData(grp_as))

    io.writeData(os.path.join(src, "group_b_mean.raw"),
                 rsp.sagittalToCoronalData(grp_ba))
    io.writeData(os.path.join(src, "group_b_std.raw"),
                 rsp.sagittalToCoronalData(grp_bs))

    io.writeData(os.path.join(src, "group_c_mean.raw"),
                 rsp.sagittalToCoronalData(grp_ca))
    io.writeData(os.path.join(src, "group_c_std.raw"),
                 rsp.sagittalToCoronalData(grp_cs))

    io.writeData(os.path.join(src, "group_d_mean.raw"),
                 rsp.sagittalToCoronalData(grp_da))
    io.writeData(os.path.join(src, "group_d_std.raw"),
                 rsp.sagittalToCoronalData(grp_ds))

    #Generate the p-values map
    ##########################
    #first comparison
    #pcutoff: only display pixels below this level of significance
    pvals, psign = stat.tTestVoxelization(grp_a.astype("float"),
                                          grp_d.astype("float"),
                                          signed=True,
                                          pcutoff=0.05)

    #color the p-values according to their sign (defined by the sign of the difference of the means between the 2 groups)
    pvalsc = stat.colorPValues(pvals, psign, positive=[0, 1], negative=[1, 0])
    io.writeData(os.path.join(src, "pvalues_homecage_control_vs_DREADDs.tif"),
                 rsp.sagittalToCoronalData(pvalsc.astype("float32")))

    #second comparison
    pvals, psign = stat.tTestVoxelization(grp_a.astype("float"),
                                          grp_b.astype("float"),
                                          signed=True,
                                          pcutoff=0.05)
    pvalsc = stat.colorPValues(pvals, psign, positive=[0, 1], negative=[1, 0])
    io.writeData(
        os.path.join(
            src, "pvalues_homecage_control_vs_CNO_control_no_reversal.tif"),
        rsp.sagittalToCoronalData(pvalsc.astype("float32")))

    #third comparison
    pvals, psign = stat.tTestVoxelization(grp_b.astype("float"),
                                          grp_c.astype("float"),
                                          signed=True,
                                          pcutoff=0.05)
    pvalsc = stat.colorPValues(pvals, psign, positive=[0, 1], negative=[1, 0])
    io.writeData(
        os.path.join(
            src,
            "pvalues_CNO_control_no_reversal_vs_CNO_control_reversal.tif"),
        rsp.sagittalToCoronalData(pvalsc.astype("float32")))

    #fourth comparison
    pvals, psign = stat.tTestVoxelization(grp_c.astype("float"),
                                          grp_d.astype("float"),
                                          signed=True,
                                          pcutoff=0.05)
    pvalsc = stat.colorPValues(pvals, psign, positive=[0, 1], negative=[1, 0])
    io.writeData(
        os.path.join(src, "pvalues_CNO_control_reversal_vs_DREADDs.tif"),
        rsp.sagittalToCoronalData(pvalsc.astype("float32")))
    c = stat.readDataGroup(ctrl_du_heatmaps)
    o = stat.readDataGroup(obv_du_heatmaps)
    d = stat.readDataGroup(dmn_du_heatmaps)

    ca = np.mean(c, axis=0)
    cstd = np.std(c, axis=0)

    oa = np.mean(o, axis=0)
    ostd = np.std(o, axis=0)

    da = np.mean(d, axis=0)
    dstd = np.std(d, axis=0)

    #write
    io.writeData(os.path.join(pvaldst, "dorsal_up/control_mean.raw"),
                 rsp.sagittalToCoronalData(ca))
    io.writeData(os.path.join(pvaldst, "dorsal_up/control_std.raw"),
                 rsp.sagittalToCoronalData(cstd))

    io.writeData(os.path.join(pvaldst, "dorsal_up/observers_mean.raw"),
                 rsp.sagittalToCoronalData(oa))
    io.writeData(os.path.join(pvaldst, "dorsal_up/observers_std.raw"),
                 rsp.sagittalToCoronalData(ostd))

    io.writeData(os.path.join(pvaldst, "dorsal_up/demonstrators_mean.raw"),
                 rsp.sagittalToCoronalData(da))
    io.writeData(os.path.join(pvaldst, "dorsal_up/demonstrators_std.raw"),
                 rsp.sagittalToCoronalData(dstd))

    #Generate the p-values map
    ##########################