def test_data_array_metadata(self): size = (1024, 512) depth = 2**12 img12 = numpy.zeros(size, dtype="uint16") + depth // 2 watermark = 538 # write a square of watermark img12[20:40, 50:70] = watermark metadata = { model.MD_PIXEL_SIZE: (1e-6, 2e-5), model.MD_BINNING: (1, 1), model.MD_AR_POLE: (253.1, 65.1) } img12da = model.DataArray(img12, metadata) # rescale out = img.rescale_hq(img12da, (512, 256)) self.assertEqual(out.shape, (512, 256)) # test if the watermark is in the right place self.assertEqual(out[15, 30], watermark) self.assertEqual(out[30, 60], depth // 2) # assert metadata self.assertEqual(out.metadata[model.MD_PIXEL_SIZE], (2e-06, 4e-05)) self.assertEqual(out.metadata[model.MD_BINNING], (2.0, 2.0)) self.assertEqual(out.metadata[model.MD_AR_POLE], (126.55, 32.55))
def test_data_array_metadata(self): size = (1024, 512) depth = 2 ** 12 img12 = numpy.zeros(size, dtype="uint16") + depth // 2 watermark = 538 # write a square of watermark img12[20:40, 50:70] = watermark metadata = { model.MD_PIXEL_SIZE: (1e-6, 2e-5), model.MD_BINNING: (1, 1), model.MD_AR_POLE: (253.1, 65.1) } img12da = model.DataArray(img12, metadata) # rescale out = img.rescale_hq(img12da, (512, 256)) self.assertEqual(out.shape, (512, 256)) # test if the watermark is in the right place self.assertEqual(out[15, 30], watermark) self.assertEqual(out[30, 60], depth // 2) # assert metadata self.assertEqual(out.metadata[model.MD_PIXEL_SIZE], (2e-06, 4e-05)) self.assertEqual(out.metadata[model.MD_BINNING], (2.0, 2.0)) self.assertEqual(out.metadata[model.MD_AR_POLE], (126.55, 32.55))
def _getPolarProjection(self, pos): """ Return the polar projection of the image at the given position. pos (tuple of 2 floats): position (must be part of the ._sempos returns DataArray: the polar projection """ if pos in self._polar: polard = self._polar[pos] else: # Compute the polar representation data = self._sempos[pos] try: if numpy.prod(data.shape) > (1280 * 1080): # AR conversion fails one very large images due to too much # memory consumed (> 2Gb). So, rescale + use a "degraded" type that # uses less memory. As the display size is small (compared # to the size of the input image, it shouldn't actually # affect much the output. logging.info( "AR image is very large %s, will convert to " "azymuthal projection in reduced precision.", data.shape) y, x = data.shape if y > x: small_shape = 1024, int(round(1024 * x / y)) else: small_shape = int(round(1024 * y / x)), 1024 # resize data = img.rescale_hq(data, small_shape) dtype = numpy.float16 else: dtype = None # just let the function use the best one size = min(min(data.shape) * 2, 1134) # TODO: First compute quickly a low resolution and then # compute a high resolution version. # TODO: could use the size of the canvas that will display # the image to save some computation time. bg_data = self.background.value if bg_data is None: # Simple version: remove the background value data0 = polar.ARBackgroundSubtract(data) else: data0 = img.Subtract(data, bg_data) # metadata from data # 2 x size of original image (on smallest axis) and at most # the size of a full-screen canvas polard = polar.AngleResolved2Polar(data0, size, hole=False, dtype=dtype) self._polar[pos] = polard except Exception: logging.exception("Failed to convert to azymuthal projection") return data # display it raw as fallback return polard
def _project2Polar(self, pos): """ Return the polar projection of the image at the given position. pos (tuple of 2 floats): position (must be part of the ._sempos returns DataArray: the polar projection """ if pos in self._polar: polard = self._polar[pos] else: # Compute the polar representation data = self._sempos[pos] try: if numpy.prod(data.shape) > (1280 * 1080): # AR conversion fails with very large images due to too much # memory consumed (> 2Gb). So, rescale + use a "degraded" type that # uses less memory. As the display size is small (compared # to the size of the input image, it shouldn't actually # affect much the output. logging.info("AR image is very large %s, will convert to " "azimuthal projection in reduced precision.", data.shape) y, x = data.shape if y > x: small_shape = 1024, int(round(1024 * x / y)) else: small_shape = int(round(1024 * y / x)), 1024 # resize data = img.rescale_hq(data, small_shape) dtype = numpy.float16 else: dtype = None # just let the function use the best one # 2 x size of original image (on smallest axis) and at most # the size of a full-screen canvas size = min(min(data.shape) * 2, 1134) # TODO: First compute quickly a low resolution and then # compute a high resolution version. # TODO: could use the size of the canvas that will display # the image to save some computation time. bg_data = self.background.value if bg_data is None: # Simple version: remove the background value data0 = polar.ARBackgroundSubtract(data) else: data0 = img.Subtract(data, bg_data) # metadata from data # Warning: allocates lot of memory, which will not be free'd until # the current thread is terminated. polard = polar.AngleResolved2Polar(data0, size, hole=False, dtype=dtype) # TODO: don't hold too many of them in cache (eg, max 3 * 1134**2) self._polar[pos] = polard except Exception: logging.exception("Failed to convert to azimuthal projection") return data # display it raw as fallback return polard
def test_5d(self): # C=3, T=2, Z=2, Y=1024, X=512 size = (3, 2, 2, 1024, 512) background = 58 img_in = numpy.zeros(size, dtype="uint8") + background img_in = model.DataArray(img_in) out = img.rescale_hq(img_in, (3, 2, 2, 512, 256)) self.assertEqual(out.shape, (3, 2, 2, 512, 256))
def test_5d(self): # C=3, T=2, Z=2, Y=1024, X=512 size = (3, 2, 2, 1024, 512) background = 58 img_in = numpy.zeros(size, dtype="uint8") + background img_in = model.DataArray(img_in) out = img.rescale_hq(img_in, (3, 2, 2, 512, 256)) self.assertEqual(out.shape, (3, 2, 2, 512, 256))
def test_smoothness(self): size = (100, 100) img_in = numpy.zeros(size, dtype="uint8") # draw an image like a chess board for i in range(0, 100): for j in range(0, 100): img_in[i, j] = ((i + j) % 2) * 255 # rescale out = img.rescale_hq(img_in, (50, 50)) # if the image is smooth, all the values are the same for i in range(10, 20): self.assertEqual(128, out[0, i])
def test_smoothness(self): size = (100, 100) img_in = numpy.zeros(size, dtype="uint8") # draw an image like a chess board for i in range(0, 100): for j in range(0, 100): img_in[i, j] = ((i + j) % 2) * 255 # rescale out = img.rescale_hq(img_in, (50, 50)) # if the image is smooth, all the values are the same for i in range(10, 20): self.assertEqual(128, out[0, i])
def test_simple(self): size = (1024, 512) background = 2**12 img12 = numpy.zeros(size, dtype="uint16") + background watermark = 538 # write a square of watermark img12[20:40, 50:70] = watermark # rescale out = img.rescale_hq(img12, (512, 256)) self.assertEqual(out.shape, (512, 256)) self.assertEqual(out.dtype, img12.dtype) # test if the watermark is in the right place self.assertEqual(out[15, 30], watermark) self.assertEqual(out[30, 60], background)
def test_simple(self): size = (1024, 512) background = 2 ** 12 img12 = numpy.zeros(size, dtype="uint16") + background watermark = 538 # write a square of watermark img12[20:40, 50:70] = watermark # rescale out = img.rescale_hq(img12, (512, 256)) self.assertEqual(out.shape, (512, 256)) self.assertEqual(out.dtype, img12.dtype) # test if the watermark is in the right place self.assertEqual(out[15, 30], watermark) self.assertEqual(out[30, 60], background)
def test_rgb(self): """ Test downscaling an RGB in YXC format """ # X=1024, Y=512 size = (512, 1024, 3) background = 58 img_in = numpy.zeros(size, dtype="uint8") + background # watermark img_in[246:266, 502:522, 0] = 50 img_in[246:266, 502:522, 1] = 100 img_in[246:266, 502:522, 2] = 150 img_in = model.DataArray(img_in) img_in.metadata[model.MD_DIMS] = "YXC" out = img.rescale_hq(img_in, (256, 512, 3)) self.assertEqual(out.shape, (256, 512, 3)) self.assertEqual(out.dtype, img_in.dtype) # Check watermark. Should be no interpolation between color channels self.assertEqual(50, out[128, 256, 0]) self.assertEqual(100, out[128, 256, 1]) self.assertEqual(150, out[128, 256, 2])
def test_rgb(self): """ Test downscaling an RGB in YXC format """ # X=1024, Y=512 size = (512, 1024, 3) background = 58 img_in = numpy.zeros(size, dtype="uint8") + background # watermark img_in[246:266, 502:522, 0] = 50 img_in[246:266, 502:522, 1] = 100 img_in[246:266, 502:522, 2] = 150 img_in = model.DataArray(img_in) img_in.metadata[model.MD_DIMS] = "YXC" out = img.rescale_hq(img_in, (256, 512, 3)) self.assertEqual(out.shape, (256, 512, 3)) self.assertEqual(out.dtype, img_in.dtype) # Check watermark. Should be no interpolation between color channels self.assertEqual(50, out[128, 256, 0]) self.assertEqual(100, out[128, 256, 1]) self.assertEqual(150, out[128, 256, 2])
def test_25d(self): """ Test downscaling an 2.5D image (YXC, with C=14) """ # X=1024, Y=512 size = (512, 1024, 14) background = 58 img_in = numpy.zeros(size, dtype=numpy.float) + background # watermark img_in[246:266, 502:522, 0] = 50 img_in[246:266, 502:522, 1] = 100 img_in[246:266, 502:522, 2] = 150 img_in[246:266, 502:522, 3] = 255 # Alpha img_in = model.DataArray(img_in) img_in.metadata[model.MD_DIMS] = "YXC" out = img.rescale_hq(img_in, (256, 512, 14)) self.assertEqual(out.shape, (256, 512, 14)) self.assertEqual(out.dtype, img_in.dtype) # Check watermark. Should be no interpolation between color channels self.assertEqual(50, out[128, 256, 0]) self.assertEqual(100, out[128, 256, 1]) self.assertEqual(150, out[128, 256, 2]) self.assertEqual(255, out[128, 256, 3])
def test_25d(self): """ Test downscaling an 2.5D image (YXC, with C=14) """ # X=1024, Y=512 size = (512, 1024, 14) background = 58 img_in = numpy.zeros(size, dtype=numpy.float) + background # watermark img_in[246:266, 502:522, 0] = 50 img_in[246:266, 502:522, 1] = 100 img_in[246:266, 502:522, 2] = 150 img_in[246:266, 502:522, 3] = 255 # Alpha img_in = model.DataArray(img_in) img_in.metadata[model.MD_DIMS] = "YXC" out = img.rescale_hq(img_in, (256, 512, 14)) self.assertEqual(out.shape, (256, 512, 14)) self.assertEqual(out.dtype, img_in.dtype) # Check watermark. Should be no interpolation between color channels self.assertEqual(50, out[128, 256, 0]) self.assertEqual(100, out[128, 256, 1]) self.assertEqual(150, out[128, 256, 2]) self.assertEqual(255, out[128, 256, 3])
def _project2Polar(self, pos): """ Return the polar projection of the image at the given position. pos (float, float, string or None): position (must be part of the ._pos) returns DataArray: the polar projection """ # Note: Need a copy of the link to the dict. If self._polar is reset while # still running this method, the dict might get new entries again, though it should be empty. polar = self._polar if pos in polar: polard = polar[pos] else: # Compute the polar representation data = self._pos[pos] try: # Get bg image, if existing. It must match the polarization (defaulting to MD_POL_NONE). bg_image = self._getBackground( data.metadata.get(MD_POL_MODE, MD_POL_NONE)) if bg_image is None: # Simple version: remove the background value data_bg_corr = angleres.ARBackgroundSubtract(data) else: data_bg_corr = img.Subtract(data, bg_image) # metadata from data if numpy.prod(data_bg_corr.shape) > (1280 * 1080): # AR conversion fails with very large images due to too much # memory consumed (> 2Gb). So, rescale + use a "degraded" type that # uses less memory. As the display size is small (compared # to the size of the input image, it shouldn't actually # affect much the output. logging.info( "AR image is very large %s, will convert to " "azimuthal projection in reduced precision.", data_bg_corr.shape) y, x = data_bg_corr.shape if y > x: small_shape = 1024, int(round(1024 * x / y)) else: small_shape = int(round(1024 * y / x)), 1024 # resize data_bg_corr = img.rescale_hq(data_bg_corr, small_shape) # 2 x size of original image (on smallest axis) and at most # the size of a full-screen canvas size = min(min(data_bg_corr.shape) * 2, 1134) # TODO: First compute quickly a low resolution and then # compute a high resolution version. # TODO: could use the size of the canvas that will display # the image to save some computation time. # Warning: allocates lot of memory, which will not be free'd until # the current thread is terminated. polard = angleres.AngleResolved2Polar(data_bg_corr, size, hole=False) # TODO: don't hold too many of them in cache (eg, max 3 * 1134**2) polar[pos] = polard except Exception: logging.exception("Failed to convert to azimuthal projection") return data # display it raw as fallback return polard
def _project2Polar(self, pos): """ Return the polar projection of the image at the given position. pos (float, float, string or None): position (must be part of the ._pos) returns DataArray: the polar projection """ # Note: Need a copy of the link to the dict. If self._polar is reset while # still running this method, the dict might get new entries again, though it should be empty. polar = self._polar if pos in polar: polard = polar[pos] else: # Compute the polar representation data = self._pos[pos] try: # Get bg image, if existing. It must match the polarization (defaulting to MD_POL_NONE). bg_image = self._getBackground(data.metadata.get(MD_POL_MODE, MD_POL_NONE)) if bg_image is None: # Simple version: remove the background value data_bg_corr = angleres.ARBackgroundSubtract(data) else: data_bg_corr = img.Subtract(data, bg_image) # metadata from data if numpy.prod(data_bg_corr.shape) > (1280 * 1080): # AR conversion fails with very large images due to too much # memory consumed (> 2Gb). So, rescale + use a "degraded" type that # uses less memory. As the display size is small (compared # to the size of the input image, it shouldn't actually # affect much the output. logging.info("AR image is very large %s, will convert to " "azimuthal projection in reduced precision.", data_bg_corr.shape) y, x = data_bg_corr.shape if y > x: small_shape = 1024, int(round(1024 * x / y)) else: small_shape = int(round(1024 * y / x)), 1024 # resize data_bg_corr = img.rescale_hq(data_bg_corr, small_shape) # 2 x size of original image (on smallest axis) and at most # the size of a full-screen canvas size = min(min(data_bg_corr.shape) * 2, 1134) # TODO: First compute quickly a low resolution and then # compute a high resolution version. # TODO: could use the size of the canvas that will display # the image to save some computation time. # Warning: allocates lot of memory, which will not be free'd until # the current thread is terminated. polard = angleres.AngleResolved2Polar(data_bg_corr, size, hole=False) # TODO: don't hold too many of them in cache (eg, max 3 * 1134**2) polar[pos] = polard except Exception: logging.exception("Failed to convert to azimuthal projection") return data # display it raw as fallback return polard