def test_from_array_bolt(eng): a = arange(8).reshape((1, 2, 4)) if eng is not None: b = barray(a, context=eng) else: b = barray(a) data = fromarray(b) assert allclose(data.shape, a.shape) assert allclose(data.dims, a.shape[1:]) assert allclose(data.toarray(), a)
def test_from_array_bolt(eng): a = arange(8).reshape((1, 2, 4)) if eng is not None: b = barray(a, context=eng) else: b = barray(a) data = fromarray(b) assert allclose(data.shape, a.shape) assert allclose(data.dims, a.shape[1:]) assert allclose(data.toarray(), a)
def centercorr(self, sigma_blur=0): """ Correlate every pixel in an block to its central pixel, possible with blurring that pixel before hand. Parameters ---------- size : int or tuple, optional, default = 0 Size of the filter in pixels. If a scalar, will use the same filter size along each dimension. """ nimages = self.shape[0] # spatially average the original image set over the specified neighborhood def restrictedGaussianFilter(x, sigma): return array([gaussian_filter(y, sigma) for y in x]) if sigma_blur > 0: blurred = self.map(lambda x: restrictedGaussianFilter(x, sigma_blur)) else: blurred = self def copyCenter(x): return tile(x[:, x.shape[1] / 2 + 1, x.shape[2] / 2 + 1], (x.shape[1], x.shape[2], 1)).transpose(2, 0, 1) blurred = blurred.map(copyCenter) # union the averaged images with the originals to create an # Images object containing 2N images (where N is the original number of images), # ordered such that the first N images are the central ones. if self.mode == 'spark': combined = self.values.concatenate(blurred.values) combined_images = fromrdd(combined.tordd()) else: combined = concatenate((self.values, blurred.values), axis=0) combined_images = fromarray(combined) # correlate the first N (central) records with the last N (original) records series = combined_images.toseries() corr = series.map( lambda x: corrcoef(x[:nimages], x[nimages:])[0, 1]).toarray() corr[isnan(corr)] = 0 def centerFilter(img): x = img.shape[0] / 2 + 1 y = img.shape[1] / 2 + 1 img[x, y] = (img[x + 1, y] + img[x - 1, y] + img[x, y + 1] + img[x, y - 1]) / 4 return img corr = array([centerFilter(im) for im in corr]) return corr
def tolocal(self): """ Convert to local mode. """ from thunder.images.readers import fromarray if self.mode == 'local': logging.getLogger('thunder').warn('images already in local mode') pass return fromarray(self.toarray())
def tolocal(self): """ Convert to local mode. """ from thunder.images.readers import fromarray if self.mode == 'local': logging.getLogger('thunder').warn('images already in local mode') pass return fromarray(self.toarray())
def tospark(self, engine=None): """ Convert to distributed spark mode. """ from thunder.images.readers import fromarray if self.mode == 'spark': logging.getLogger('thunder').warn('images already in spark mode') pass if engine is None: raise ValueError('Must provide a SparkContext') return fromarray(self.toarray(), engine=engine)
def tospark(self, engine=None): """ Convert to distributed spark mode. """ from thunder.images.readers import fromarray if self.mode == 'spark': logging.getLogger('thunder').warn('images already in spark mode') pass if engine is None: raise ValueError('Must provide a SparkContext') return fromarray(self.toarray(), engine=engine)
def localcorr(self, size=2): """ Correlate every pixel in an image sequence to the average of its local neighborhood. This algorithm computes, for every pixel, the correlation coefficient between the sequence of values for that pixel, and the average of all pixels in a local neighborhood. It does this by blurring the image(s) with a uniform filter, and then correlates the original sequence with the blurred sequence. Parameters ---------- size : int or tuple, optional, default = 2 Size of the filter in pixels. If a scalar, will use the same filter size along each dimension. """ if not isinstance(neighborhood, int): raise ValueError( 'The neighborhood must be specified as an integer.') from thunder.images.readers import fromarray, fromrdd from numpy import corrcoef, concatenate nimages = self.shape[0] # spatially average the original image set over the specified neighborhood blurred = self.uniform_filter(size) # union the averaged images with the originals to create an # Images object containing 2N images (where N is the original number of images), # ordered such that the first N images are the averaged ones. if self.mode == 'spark': combined = self.values.concatenate(blurred.values) combined_images = fromrdd(combined.tordd()) else: combined = concatenate((self.values, blurred.values), axis=0) combined_images = fromarray(combined) # correlate the first N (averaged) records with the last N (original) records series = combined_images.toseries() corr = series.map(lambda x: corrcoef(x[:nimages], x[nimages:])[0, 1]) return corr.toarray()
def localcorr(self, size=2): """ Correlate every pixel in an image sequence to the average of its local neighborhood. This algorithm computes, for every pixel, the correlation coefficient between the sequence of values for that pixel, and the average of all pixels in a local neighborhood. It does this by blurring the image(s) with a uniform filter, and then correlates the original sequence with the blurred sequence. Parameters ---------- size : int or tuple, optional, default = 2 Size of the filter in pixels. If a scalar, will use the same filter size along each dimension. """ if not isinstance(neighborhood, int): raise ValueError('The neighborhood must be specified as an integer.') from thunder.images.readers import fromarray, fromrdd from numpy import corrcoef, concatenate nimages = self.shape[0] # spatially average the original image set over the specified neighborhood blurred = self.uniform_filter(size) # union the averaged images with the originals to create an # Images object containing 2N images (where N is the original number of images), # ordered such that the first N images are the averaged ones. if self.mode == 'spark': combined = self.values.concatenate(blurred.values) combined_images = fromrdd(combined.tordd()) else: combined = concatenate((self.values, blurred.values), axis=0) combined_images = fromarray(combined) # correlate the first N (averaged) records with the last N (original) records series = combined_images.toseries() corr = series.map(lambda x: corrcoef(x[:nimages], x[nimages:])[0, 1]) return corr.toarray()
def test_reshape_values(eng): original = fromarray(arange(72).reshape(2, 6, 6), engine=eng) arr = original.toarray() assert allclose(arr.reshape(2, 12, 3), original.reshape(2, 12, 3).toarray()) assert allclose(arr.reshape(2, 4, 3, 3), original.reshape(2, 4, 3, 3).toarray()) # must converve number or elements with pytest.raises(ValueError): original.reshape(2, 3, 6) # cannot change number of images with pytest.raises(ValueError): original.reshape(4, 3, 6) # cannot create images with less than 2 dimensions with pytest.raises(ValueError): original.reshape(2, 36) # cannot create images with more than 3 dimensions with pytest.raises(ValueError): original.reshape(2, 2, 2, 3, 3)
def test_reshape_values(eng): original = fromarray(arange(72).reshape(2, 6, 6), engine=eng) arr = original.toarray() assert allclose(arr.reshape(2, 12, 3), original.reshape(2, 12, 3).toarray()) assert allclose(arr.reshape(2, 4, 3, 3), original.reshape(2, 4, 3, 3).toarray()) # must converve number or elements with pytest.raises(ValueError): original.reshape(2, 3, 6) # cannot change number of images with pytest.raises(ValueError): original.reshape(4, 3, 6) # cannot create images with less than 2 dimensions with pytest.raises(ValueError): original.reshape(2, 36) # cannot create images with more than 3 dimensions with pytest.raises(ValueError): original.reshape(2, 2, 2, 3, 3)
def test_from_array_single(eng): a = arange(8).reshape((2, 4)) data = fromarray(a, engine=eng) assert allclose(data.shape, (1, ) + a.shape) assert allclose(data.dims, a.shape) assert allclose(data.toarray(), a)
def test_from_array(eng): a = arange(8).reshape((1, 2, 4)) data = fromarray(a, engine=eng) assert allclose(data.shape, a.shape) assert allclose(data.dims, a.shape[1:]) assert allclose(data.toarray(), a)
def test_from_array_single(eng): a = arange(8).reshape((2, 4)) data = fromarray(a, engine=eng) assert allclose(data.shape, (1,) + a.shape) assert allclose(data.dims, a.shape) assert allclose(data.toarray(), a)
def test_from_array(eng): a = arange(8).reshape((1, 2, 4)) data = fromarray(a, engine=eng) assert allclose(data.shape, a.shape) assert allclose(data.dims, a.shape[1:]) assert allclose(data.toarray(), a)