def test_crosscorrImage(self):
        random.seed(42)
        ref = random.randn(25, 25)

        reg = Registration('crosscorr')

        im = shift(ref, [2, 0], mode='constant', order=0)
        imIn = ImagesLoader(self.sc).fromArrays(im)
        paramOut = reg.prepare(ref).fit(imIn).transformations[0].delta
        imOut = reg.prepare(ref).run(imIn).first()[1]
        assert_true(allclose(ref[:-2, :], imOut[:-2, :]))
        assert_true(allclose(paramOut, [2, 0]))

        im = shift(ref, [0, 2], mode='constant', order=0)
        imIn = ImagesLoader(self.sc).fromArrays(im)
        paramOut = reg.prepare(ref).fit(imIn).transformations[0].delta
        imOut = reg.prepare(ref).run(imIn).first()[1]
        assert_true(allclose(ref[:, :-2], imOut[:, :-2]))
        assert_true(allclose(paramOut, [0, 2]))

        im = shift(ref, [2, -2], mode='constant', order=0)
        imIn = ImagesLoader(self.sc).fromArrays(im)
        paramOut = reg.prepare(ref).fit(imIn).transformations[0].delta
        imOut = reg.prepare(ref).run(imIn).first()[1]
        assert_true(allclose(ref[:-2, 2:], imOut[:-2, 2:]))
        assert_true(allclose(paramOut, [2, -2]))

        im = shift(ref, [-2, 2], mode='constant', order=0)
        imIn = ImagesLoader(self.sc).fromArrays(im)
        paramOut = reg.prepare(ref).fit(imIn).transformations[0].delta
        imOut = reg.prepare(ref).run(imIn).first()[1]
        assert_true(allclose(ref[2:, :-2], imOut[2:, :-2]))
        assert_true(allclose(paramOut, [-2, 2]))
    def test_fromMultipleMultiTimepointTifs(self):
        imagePath = os.path.join(self.testResourcesDir, "multilayer_tif",
                                 "dotdotdot_lzw*.tif")

        tiffImages = ImagesLoader(self.sc).fromTif(imagePath, nplanes=1)
        assert_true(tiffImages._nrecords is None)
        assert_equals(6, tiffImages.nrecords)

        collectedTiffImages = tiffImages.collect()

        assert_equals(6, len(collectedTiffImages),
                      "Expected 6 images, got %d" % len(collectedTiffImages))
        expectedSums = [1140006, 1119161, 1098917, 1140006, 1119161, 1098917]
        expectedIdx = 0
        for idx, tiffAry in collectedTiffImages:
            assert_equals((70, 75), tiffAry.shape)
            assert_equals(expectedIdx, idx)
            assert_equals(expectedSums[idx], tiffAry.ravel().sum())
            expectedIdx += 1

        # 3 pages / file is not evenly divisible by 2 planes
        # note this will still log a big traceback, since the exception is in the executor,
        # not the driver. But this is expected behavior.
        assert_raises(
            Exception,
            ImagesLoader(self.sc).fromTif(imagePath, nplanes=2).count)
    def test_reference2d(self):
        """test default reference calculation in 2D
        """
        random.seed(42)

        im0 = random.rand(25, 25).astype('float')
        im1 = random.rand(25, 25).astype('float')
        im2 = random.rand(25, 25).astype('float')
        imIn = ImagesLoader(self.sc).fromArrays([im0, im1, im2])

        reg = Registration('crosscorr').prepare(imIn)
        assert_true(allclose(reg.reference, (im0 + im1 + im2) / 3))

        reg = Registration('crosscorr').prepare(imIn, startIdx=0, stopIdx=2)
        assert_true(allclose(reg.reference, (im0 + im1) / 2))

        reg = Registration('crosscorr').prepare(imIn, startIdx=1, stopIdx=2)
        assert_true(allclose(reg.reference, im1))

        reg = Registration('crosscorr').prepare(imIn, defaultNImages=1)
        assert_true(allclose(reg.reference, im1))

        imgs = [random.randn(25, 25).astype('float') for _ in xrange(27)]
        imIn = ImagesLoader(self.sc).fromArrays(imgs)
        reg = Registration('crosscorr').prepare(imIn)
        expected = mean(dstack(imgs[3:23]), axis=2)
        assert_true(allclose(expected, reg.reference))
Esempio n. 4
0
    def test_fromMultiTimepointStacks(self):
        ary = arange(16, dtype=dtypeFunc('uint8')).reshape((4, 2, 2))
        ary2 = arange(16, 32, dtype=dtypeFunc('uint8')).reshape((4, 2, 2))
        ary.tofile(os.path.join(self.outputdir, "test01.stack"))
        ary2.tofile(os.path.join(self.outputdir, "test02.stack"))

        image = ImagesLoader(self.sc).fromStack(self.outputdir, dtype="uint8", dims=(2, 2, 4), nplanes=2)
        collectedImage = image.collect()

        # we don't expect to have nrecords cached, since we get an unknown number of images per file
        assert_true(image._nrecords is None)
        assert_equals(4, image.nrecords)
        assert_equals(4, len(collectedImage))
        # check keys:
        assert_equals(0, collectedImage[0][0])
        assert_equals(1, collectedImage[1][0])
        assert_equals(2, collectedImage[2][0])
        assert_equals(3, collectedImage[3][0])
        # check values:
        assert_true(array_equal(ary[:2].T, collectedImage[0][1]))
        assert_true(array_equal(ary[2:].T, collectedImage[1][1]))
        assert_true(array_equal(ary2[:2].T, collectedImage[2][1]))
        assert_true(array_equal(ary2[2:].T, collectedImage[3][1]))

        # 3 planes does not divide 4
        assert_raises(ValueError, ImagesLoader(self.sc).fromStack, self.outputdir, dtype="uint8",
                      dims=(2, 2, 4), nplanes=3)
Esempio n. 5
0
    def test_crosscorr_image(self):

        random.seed(42)
        ref = random.randn(25, 25)

        im = shift(ref, [2, 0], mode='constant', order=0)
        imin = ImagesLoader(self.sc).fromArrays(im)
        paramout = Register('crosscorr').estimate(imin, ref)[0][1]
        imout = Register('crosscorr').transform(imin, ref).first()[1]
        assert (allclose(ref[:-2, :], imout[:-2, :]))
        assert (allclose(paramout, [2, 0]))

        im = shift(ref, [0, 2], mode='constant', order=0)
        imin = ImagesLoader(self.sc).fromArrays(im)
        paramout = Register('crosscorr').estimate(imin, ref)[0][1]
        imout = Register('crosscorr').transform(imin, ref).first()[1]
        assert (allclose(ref[:, :-2], imout[:, :-2]))
        assert (allclose(paramout, [0, 2]))

        im = shift(ref, [2, -2], mode='constant', order=0)
        imin = ImagesLoader(self.sc).fromArrays(im)
        paramout = Register('crosscorr').estimate(imin, ref)[0][1]
        imout = Register('crosscorr').transform(imin, ref).first()[1]
        assert (allclose(ref[:-2, 2:], imout[:-2, 2:]))
        assert (allclose(paramout, [2, -2]))

        im = shift(ref, [-2, 2], mode='constant', order=0)
        imin = ImagesLoader(self.sc).fromArrays(im)
        paramout = Register('crosscorr').estimate(imin, ref)[0][1]
        imout = Register('crosscorr').transform(imin, ref).first()[1]
        assert (allclose(ref[2:, :-2], imout[2:, :-2]))
        assert (allclose(paramout, [-2, 2]))
Esempio n. 6
0
    def loadImagesFromArray(self, values, npartitions=None):
        """
        Load Images data from a local array

        Parameters
        ----------
        values : list or ndarray
            A list of 2d or 3d numpy arrays,
            or a single 3d or 4d numpy array

        npartitions : position int, optional, default = None
            Number of partitions for RDD, if unspecified will use
            default parallelism.
        """
        from numpy import ndarray, asarray

        from thunder.rdds.fileio.imagesloader import ImagesLoader
        loader = ImagesLoader(self._sc)

        if isinstance(values, list):
            values = asarray(values)

        if isinstance(values, ndarray) and values.ndim > 2:
            values = list(values)

        if not npartitions:
            npartitions = self._sc.defaultParallelism

        return loader.fromArrays(values, npartitions=npartitions)
Esempio n. 7
0
    def generate(self,
                 dims=(100, 200),
                 centers=5,
                 t=100,
                 margin=35,
                 sd=3,
                 noise=0.1,
                 npartitions=1,
                 seed=None):

        from scipy.ndimage.filters import gaussian_filter, gaussian_filter1d
        from skimage.draw import circle
        from thunder.rdds.fileio.imagesloader import ImagesLoader
        from thunder.extraction.source import SourceModel

        random.seed(seed)

        if len(dims) != 2:
            raise Exception("Can only generate for two-dimensional sources.")

        if size(centers) == 1:
            n = centers
            xcenters = (dims[1] -
                        margin) * random.random_sample(n) + margin / 2
            ycenters = (dims[0] -
                        margin) * random.random_sample(n) + margin / 2
            centers = zip(xcenters, ycenters)
        else:
            centers = asarray(centers)
            n = len(centers)

        ts = [random.randn(t) for i in range(0, n)]
        ts = clip(asarray([gaussian_filter1d(vec, 5) for vec in ts]), 0, 1)
        for ii, tt in enumerate(ts):
            ts[ii] = (tt / tt.max()) * 2
        allframes = []
        for tt in range(0, t):
            frame = zeros(dims)
            for nn in range(0, n):
                base = zeros(dims)
                base[centers[nn][0], centers[nn][1]] = 1
                img = gaussian_filter(base, sd)
                img = img / max(img)
                frame += img * ts[nn][tt]
            frame += clip(random.randn(dims[0], dims[1]) * noise, 0, inf)
            allframes.append(frame)

        def pointToCircle(center, radius):
            rr, cc = circle(center[0], center[1], radius)
            return array(zip(rr, cc))

        r = round(sd * 1.5)
        sources = SourceModel([pointToCircle(c[::-1], r) for c in centers])

        data = ImagesLoader(self.sc).fromArrays(allframes,
                                                npartitions).astype('float')
        if self.returnParams is True:
            return data, ts, sources
        else:
            return data
    def test_saveAndLoad(self):

        # test basic saving a loading functionality
        # new registration methods should add tests
        # for loading and saving

        random.seed(42)
        ref = random.randn(25, 25)

        im = shift(ref, [2, 0], mode='constant', order=0)
        im2 = shift(ref, [0, 2], mode='constant', order=0)
        imIn = ImagesLoader(self.sc).fromArrays([im, im2])
        reg = Registration('crosscorr')
        reg.prepare(ref)
        model1 = reg.fit(imIn)

        t = tempfile.mkdtemp()
        model1.save(t + '/test.json')
        # with open(t + '/test.json', 'r') as fp:
        #    print fp.read()
        model2 = Registration.load(t + '/test.json')
        # print model2

        out1 = model1.transform(imIn).first()[1]
        out2 = model2.transform(imIn).first()[1]

        assert_true(allclose(out1, out2))
Esempio n. 9
0
    def test_toSeriesWithPack(self):
        ary = arange(8, dtype=dtypeFunc('int16')).reshape((2, 4))

        image = ImagesLoader(self.sc).fromArrays(ary)
        series = image.toBlocks("150M").toSeries()

        seriesVals = series.collect()
        seriesAry = series.pack()
        seriesAry_xpose = series.pack(transpose=True)

        # check ordering of keys
        assert_equals((0, 0), seriesVals[0][0])  # first key
        assert_equals((1, 0), seriesVals[1][0])  # second key
        assert_equals((0, 1), seriesVals[2][0])
        assert_equals((1, 1), seriesVals[3][0])
        assert_equals((0, 2), seriesVals[4][0])
        assert_equals((1, 2), seriesVals[5][0])
        assert_equals((0, 3), seriesVals[6][0])
        assert_equals((1, 3), seriesVals[7][0])

        # check dimensions tuple matches numpy shape
        assert_equals(image.dims.count, series.dims.count)
        assert_equals(ary.shape, series.dims.count)

        # check that values are in Fortran-convention order
        collectedVals = array([kv[1] for kv in seriesVals],
                              dtype=dtypeFunc('int16')).ravel()
        assert_true(array_equal(ary.ravel(order='F'), collectedVals))

        # check that packing returns original array
        assert_true(array_equal(ary, seriesAry))
        assert_true(array_equal(ary.T, seriesAry_xpose))
Esempio n. 10
0
    def _run_tst_multitif(self, filename, expectedDtype):
        imagePath = os.path.join(self.testResourcesDir, "multilayer_tif",
                                 filename)
        tiffImages = ImagesLoader(self.sc).fromTif(imagePath,
                                                   self.sc).collect()

        expectedNum = 1
        expectedShape = (
            70, 75, 3
        )  # 3 concatenated pages, each with single luminance channel
        # 3 images have increasing #s of black dots, so lower luminance overall
        expectedSums = [1140006, 1119161, 1098917]
        expectedKey = 0

        assert_equals(
            expectedNum, len(tiffImages),
            "Expected %s images, got %d" % (expectedNum, len(tiffImages)))
        tiffImage = tiffImages[0]
        assert_equals(
            expectedKey, tiffImage[0],
            "Expected key %s, got %s" % (str(expectedKey), str(tiffImage[0])))
        assert_true(
            isinstance(tiffImage[1], ndarray),
            "Value type error; expected image value to be numpy ndarray, was "
            + str(type(tiffImage[1])))
        assert_equals(expectedDtype, str(tiffImage[1].dtype))
        assert_equals(expectedShape, tiffImage[1].shape)
        for channelidx in xrange(0, expectedShape[2]):
            assert_equals(expectedSums[channelidx],
                          tiffImage[1][:, :, channelidx].flatten().sum())
Esempio n. 11
0
 def setUp(self):
     super(TestBlockKeys, self).setUp()
     shape = (30, 30)
     arys = [ones(shape) for _ in range(0, 3)]
     data = ImagesLoader(self.sc).fromArrays(arys)
     self.blocks = data.toBlocks(size=(10, 10)).collect()
     self.keys = [k for k, v in self.blocks]
Esempio n. 12
0
    def test_toSeriesWithSplitsAndPack(self):
        ary = arange(8, dtype=dtype('int16')).reshape((4, 2))

        image = ImagesLoader(self.sc).fromArrays(ary)
        series = image.toSeries(splitsPerDim=(1, 2))

        seriesvals = series.collect()
        seriesary = series.pack()

        # check ordering of keys
        assert_equals((0, 0), seriesvals[0][0])  # first key
        assert_equals((1, 0), seriesvals[1][0])  # second key
        assert_equals((2, 0), seriesvals[2][0])
        assert_equals((3, 0), seriesvals[3][0])
        assert_equals((0, 1), seriesvals[4][0])
        assert_equals((1, 1), seriesvals[5][0])
        assert_equals((2, 1), seriesvals[6][0])
        assert_equals((3, 1), seriesvals[7][0])

        # check dimensions tuple matches numpy shape
        assert_equals(ary.shape, series.dims.count)

        # check that values are in Fortran-convention order
        collectedvals = array([kv[1] for kv in seriesvals],
                              dtype=dtype('int16')).ravel()
        assert_true(array_equal(ary.ravel(order='F'), collectedvals))

        # check that packing returns original array
        assert_true(array_equal(ary, seriesary))
Esempio n. 13
0
    def test_castToFloat(self):
        arys, shape, size = _generateTestArrays(2, 'uint8')
        imageData = ImagesLoader(self.sc).fromArrays(arys)
        catData = imageData.astype("smallfloat")

        assert_equals('float16', str(catData.dtype))
        assert_equals('float16', str(catData.first()[1].dtype))
Esempio n. 14
0
    def test_fromStackToSeriesWithPack(self):
        ary = arange(8, dtype=dtypeFunc('int16')).reshape((2, 4))
        filename = os.path.join(self.outputdir, "test.stack")
        ary.tofile(filename)

        image = ImagesLoader(self.sc).fromStack(filename, dims=(4, 2))
        strategy = SimpleBlockingStrategy.generateFromBlockSize(image, "150M")
        series = image.toBlocks(strategy).toSeries()

        seriesVals = series.collect()
        seriesAry = series.pack()

        # check ordering of keys
        assert_equals((0, 0), seriesVals[0][0])  # first key
        assert_equals((1, 0), seriesVals[1][0])  # second key
        assert_equals((2, 0), seriesVals[2][0])
        assert_equals((3, 0), seriesVals[3][0])
        assert_equals((0, 1), seriesVals[4][0])
        assert_equals((1, 1), seriesVals[5][0])
        assert_equals((2, 1), seriesVals[6][0])
        assert_equals((3, 1), seriesVals[7][0])

        # check dimensions tuple is reversed from numpy shape
        assert_equals(ary.shape[::-1], series.dims.count)

        # check that values are in original order
        collectedVals = array([kv[1] for kv in seriesVals],
                              dtype=dtypeFunc('int16')).ravel()
        assert_true(array_equal(ary.ravel(), collectedVals))

        # check that packing returns transpose of original array
        assert_true(array_equal(ary.T, seriesAry))
Esempio n. 15
0
    def test_subsample(self):
        narys = 3
        arys, sh, sz = _generateTestArrays(narys)
        sampFactors = [2, (2, 3, 3)]

        def subsamp(ary, factor):
            if not hasattr(factor, "__len__"):
                factor = [factor] * ary.ndim

            slices = [
                slice(0, ary.shape[i], factor[i]) for i in xrange(ary.ndim)
            ]
            return ary[slices]

        imageData = ImagesLoader(self.sc).fromArrays(arys)
        for sampFactor in sampFactors:
            subsampData = imageData.subsample(sampFactor)
            expectedArys = map(lambda ary: subsamp(ary, sampFactor), arys)
            subsampled = subsampData.collect()
            for actual, expected in zip(subsampled, expectedArys):
                assert_true(array_equal(expected, actual[1]))

            assert_equals(tuple(expectedArys[0].shape), subsampled[0][1].shape)
            assert_equals(tuple(expectedArys[0].shape),
                          subsampData._dims.count)
            assert_equals(str(arys[0].dtype), str(subsampled[0][1].dtype))
            assert_equals(str(subsampled[0][1].dtype), subsampData._dtype)
Esempio n. 16
0
    def test_toBlocksBySlices(self):
        narys = 3
        arys, sh, sz = _generate_test_arrays(narys)

        imagedata = ImagesLoader(self.sc).fromArrays(arys)

        test_params = [(1, 1, 1), (1, 1, 2), (1, 1, 3), (1, 2, 1), (1, 2, 2),
                       (1, 2, 3), (1, 3, 1), (1, 3, 2), (1, 3, 3), (2, 1, 1),
                       (2, 1, 2), (2, 1, 3), (2, 2, 1), (2, 2, 2), (2, 2, 3),
                       (2, 3, 1), (2, 3, 2), (2, 3, 3)]
        for bpd in test_params:
            blocks = imagedata._toBlocksBySplits(bpd).collect()

            expectednuniquekeys = reduce(mul, bpd)
            expectedvalsperkey = narys

            keystocounts = Counter([kv[0] for kv in blocks])
            assert_equals(expectednuniquekeys, len(keystocounts))
            assert_equals([expectedvalsperkey] * expectednuniquekeys,
                          keystocounts.values())

            gatheredary = None
            for _, block in blocks:
                if gatheredary is None:
                    gatheredary = zeros(block.origshape, dtype='int16')
                gatheredary[block.origslices] = block.values

            for i in xrange(narys):
                assert_true(array_equal(arys[i], gatheredary[i]))
Esempio n. 17
0
    def test_toSeriesWithInefficientSplitAndSortedPack(self):
        ary = arange(8, dtype=dtypeFunc('int16')).reshape((4, 2))

        image = ImagesLoader(self.sc).fromArrays(ary)
        series = image.toBlocks((2, 1), units="s").toSeries()

        seriesVals = series.collect()
        seriesAry = series.pack(sorting=True)

        # check ordering of keys
        assert_equals((0, 0), seriesVals[0][0])  # first key
        assert_equals((1, 0), seriesVals[1][0])  # second key
        assert_equals((0, 1), seriesVals[2][0])
        assert_equals((1, 1), seriesVals[3][0])
        # end of first block
        # beginning of second block
        assert_equals((2, 0), seriesVals[4][0])
        assert_equals((3, 0), seriesVals[5][0])
        assert_equals((2, 1), seriesVals[6][0])
        assert_equals((3, 1), seriesVals[7][0])

        # check dimensions tuple matches numpy shape
        assert_equals(ary.shape, series.dims.count)

        # check that values are in expected order
        collectedVals = array([kv[1] for kv in seriesVals],
                              dtype=dtypeFunc('int16')).ravel()
        assert_true(array_equal(ary[:2, :].ravel(order='F'),
                                collectedVals[:4]))  # first block
        assert_true(
            array_equal(ary[2:4, :].ravel(order='F'),
                        collectedVals[4:]))  # second block

        # check that packing returns original array (after sort)
        assert_true(array_equal(ary, seriesAry))
Esempio n. 18
0
    def loadImagesOCP(self,
                      bucketName,
                      resolution,
                      server='ocp.me',
                      startIdx=None,
                      stopIdx=None,
                      minBound=None,
                      maxBound=None):
        """
        Load Images from OCP (Open Connectome Project).

        The OCP is a web service for access to EM brain images and other neural image data.
        The web-service can be accessed at http://www.openconnectomeproject.org/.
        
        Parameters
        ----------
        bucketName: string
            Token name for the project in OCP. This name should exist on the server from which data is loaded.

        resolution: nonnegative int
            Resolution of the data in OCP

        server: string, optional, default = 'ocp.me'
            Name of the OCP server with the specified token.
        
        startIdx: nonnegative int, optional, default = None
            Convenience parameters to read only a subset of input files. Uses python slice conventions
            (zero-based indexing with exclusive final position).

        stopIdx: nonnegative int, optional
            See startIdx.
        
        minBound, maxBound: tuple of nonnegative int, optional, default = None
            X,Y,Z bounds of the data to fetch from OCP. minBound contains the (xMin,yMin,zMin) while
            maxBound contains (xMax,yMax,zMax).

        Returns
        -------
        data: thunder.rdds.Images
             An Images object, wrapping an RDD of with (int) : (numpy array) pairs
        """

        from thunder.rdds.fileio.imagesloader import ImagesLoader
        loader = ImagesLoader(self._sc)

        # Checking StartIdx is smaller or equal to StopIdx
        if startIdx is not None and stopIdx is not None and startIdx > stopIdx:
            raise Exception(
                "Error. startIdx {} is larger than stopIdx {}".format(
                    startIdx, stopIdx))
        data = loader.fromOCP(bucketName,
                              resolution=resolution,
                              server=server,
                              startIdx=startIdx,
                              stopIdx=stopIdx,
                              minBound=minBound,
                              maxBound=maxBound)

        return data
Esempio n. 19
0
 def test_mean(self):
     from test_utils import elementwiseMean
     arys, shape, size = _generateTestArrays(2, 'uint8')
     imageData = ImagesLoader(self.sc).fromArrays(arys)
     meanVal = imageData.mean()
     expected = elementwiseMean(arys).astype('float16')
     assert_true(allclose(expected, meanVal))
     assert_equals('float64', str(meanVal.dtype))
Esempio n. 20
0
    def test_reference_3d(self):

        random.seed(42)
        im0 = random.randn(25, 25, 3).astype('uint16')
        im1 = random.randn(25, 25, 3).astype('uint16')
        imin = ImagesLoader(self.sc).fromArrays([im0, im1])
        ref = Register.reference(imin)
        assert (allclose(ref, (im0 + im1) / 2))
Esempio n. 21
0
    def test_roundtripConvertToSeries(self):
        imagepath = findSourceTreeDir("utils/data/fish/tif-stack")

        images = ImagesLoader(self.sc).fromTif(imagepath)
        strategy = SimpleBlockingStrategy.generateFromBlockSize(images,
                                                                blockSize=76 *
                                                                20)
        self._run_tst_roundtripConvertToSeries(images, strategy)
Esempio n. 22
0
    def test_toSeries(self):
        # create 3 arrays of 4x3x3 images (C-order), containing sequential integers
        narys = 3
        arys, sh, sz = _generateTestArrays(narys)

        imageData = ImagesLoader(self.sc).fromArrays(arys)
        series = imageData.toBlocks((4, 1, 1), units="s").toSeries().collect()

        self.evaluateSeries(arys, series, sz)
Esempio n. 23
0
    def test_stdev(self):
        from test_utils import elementwiseStdev
        arys, shape, size = _generateTestArrays(2, 'uint8')
        imageData = ImagesLoader(self.sc).fromArrays(arys)
        stdval = imageData.stdev()

        expected = elementwiseStdev([ary.astype('float16') for ary in arys])
        assert_true(allclose(expected, stdval))
        assert_equals('float64', str(stdval.dtype))
 def test_reference_3d(self):
     """ test default reference calculation in 3D
     """
     random.seed(42)
     im0 = random.randn(25, 25, 3).astype('float')
     im1 = random.randn(25, 25, 3).astype('float')
     imIn = ImagesLoader(self.sc).fromArrays([im0, im1])
     reg = Registration('crosscorr').prepare(imIn)
     assert_true(allclose(reg.reference, (im0 + im1) / 2))
Esempio n. 25
0
    def test_toSeries(self):
        # create 3 arrays of 4x3x3 images (C-order), containing sequential integers
        narys = 3
        arys, sh, sz = _generate_test_arrays(narys)

        imagedata = ImagesLoader(self.sc).fromArrays(arys)
        series = imagedata.toSeries(groupingDim=0).collect()

        self.evaluate_series(arys, series, sz)
Esempio n. 26
0
    def test_toTimeSeries(self):
        # create 3 arrays of 4x3x3 images (C-order), containing sequential integers
        narys = 3
        arys, sh, sz = _generateTestArrays(narys)

        imageData = ImagesLoader(self.sc).fromArrays(arys)
        series = imageData.toTimeSeries()

        assert (isinstance(series, TimeSeries))
Esempio n. 27
0
    def _run_tstSaveAsBinarySeries(self, testIdx, narys_, valDtype, groupingDim_):
        """Pseudo-parameterized test fixture, allows reusing existing spark context
        """
        paramStr = "(groupingdim=%d, valuedtype='%s')" % (groupingDim_, valDtype)
        arys, aryShape, arySize = _generateTestArrays(narys_, dtype_=valDtype)
        dims = aryShape[:]
        outdir = os.path.join(self.outputdir, "anotherdir%02d" % testIdx)

        images = ImagesLoader(self.sc).fromArrays(arys)

        slicesPerDim = [1]*arys[0].ndim
        slicesPerDim[groupingDim_] = arys[0].shape[groupingDim_]
        images.toBlocks(slicesPerDim, units="splits").saveAsBinarySeries(outdir)

        ndims = len(aryShape)
        # prevent padding to 4-byte boundaries: "=" specifies no alignment
        unpacker = struct.Struct('=' + 'h'*ndims + dtypeFunc(valDtype).char*narys_)

        def calcExpectedNKeys():
            tmpShape = list(dims[:])
            del tmpShape[groupingDim_]
            return prod(tmpShape)
        expectedNKeys = calcExpectedNKeys()

        def byrec(f_, unpacker_, nkeys_):
            rec = True
            while rec:
                rec = f_.read(unpacker_.size)
                if rec:
                    allRecVals = unpacker_.unpack(rec)
                    yield allRecVals[:nkeys_], allRecVals[nkeys_:]

        outFilenames = glob.glob(os.path.join(outdir, "*.bin"))
        assert_equals(dims[groupingDim_], len(outFilenames))
        for outFilename in outFilenames:
            with open(outFilename, 'rb') as f:
                nkeys = 0
                for keys, vals in byrec(f, unpacker, ndims):
                    nkeys += 1
                    assert_equals(narys_, len(vals))
                    for valIdx, val in enumerate(vals):
                        assert_equals(arys[valIdx][keys], val, "Expected %g, got %g, for test %d %s" %
                                      (arys[valIdx][keys], val, testIdx, paramStr))
                assert_equals(expectedNKeys, nkeys)

        confName = os.path.join(outdir, "conf.json")
        assert_true(os.path.isfile(confName))
        with open(os.path.join(outdir, "conf.json"), 'r') as fconf:
            import json
            conf = json.load(fconf)
            assert_equals(outdir, conf['input'])
            assert_equals(len(aryShape), conf['nkeys'])
            assert_equals(narys_, conf['nvalues'])
            assert_equals(valDtype, conf['valuetype'])
            assert_equals('int16', conf['keytype'])

        assert_true(os.path.isfile(os.path.join(outdir, 'SUCCESS')))
Esempio n. 28
0
    def test_variance(self):
        from test_utils import elementwise_var
        arys, shape, size = _generate_test_arrays(2, 'uint8')
        imagedata = ImagesLoader(self.sc).fromArrays(arys)
        varval = imagedata.variance()

        expected = elementwise_var([ary.astype('float16') for ary in arys])
        assert_true(allclose(expected, varval))
        assert_equals('float16', str(varval.dtype))
Esempio n. 29
0
    def test_fromTifWithMultipleFiles(self):
        imagepath = os.path.join(self.testresourcesdir, "singlelayer_tif", "dot*_lzw.tif")
        tifimages = ImagesLoader(self.sc).fromTif(imagepath, self.sc).collect()

        expectednum = 3
        expectedshape = (70, 75, 4)  # 4 channel tif; RGBalpha
        expectedsums = [1282192, 1261328, 1241520]  # 3 images have increasing #s of black dots, so lower luminance overall
        expectedkeys = range(expectednum)
        self._evaluateMultipleImages(tifimages, expectednum, expectedshape, expectedkeys, expectedsums)
Esempio n. 30
0
    def test_sum(self):
        from numpy import add
        arys, shape, size = _generateTestArrays(2, 'uint8')
        imageData = ImagesLoader(self.sc).fromArrays(arys)
        sumVal = imageData.sum(dtype='uint32')

        arys = [ary.astype('uint32') for ary in arys]
        expected = reduce(add, arys)
        assert_true(array_equal(expected, sumVal))
        assert_equals('uint32', str(sumVal.dtype))