Пример #1
0
 def load_tilespec_cutout(self, x0, x1, y0, y1, z, w):
     '''Load a cutout from tilespecs'''
     kdtree = self.kdtrees[z]
     assert isinstance(kdtree, KDTree)
     x0a = x0 - self.tile_width / 2
     x1a = x1 + self.tile_width / 2
     y0a = y0 - self.tile_height / 2
     y1a = y1 + self.tile_height / 2
     nx = 2 * (x1a - x0a) / self.tile_width + 1
     ny = 2 * (y1a - y0a) / self.tile_height + 1
     xr = np.vstack([np.linspace(x0a, x1a, nx)] * ny)
     yr = np.column_stack([np.linspace(y0a, y1a, ny)] * nx)
     coords = np.column_stack([xr.flatten(), yr.flatten()])
     d, idxs = kdtree.query(coords)
     idxs = np.unique(idxs)
     single_renderers = []
     for idx in idxs:
         ts = self.ts[z][idx]
         transformation_models = []
         for ts_transform in ts.get_transforms():
             model = Transforms.from_tilespec(ts_transform)
             transformation_models.append(model)
         renderer = TilespecSingleTileRenderer(
             ts,
             transformation_models=transformation_models,
             compute_distances=False)
         single_renderers.append(renderer)
         if w > 0:
             model = AffineModel(m=np.eye(3) / 2.0**w)
             renderer.add_transformation(model)
     renderer = MultipleTilesRenderer(single_renderers,
                                      blend_type='AVERAGING',
                                      dtype=self.dtype)
     return renderer.crop(int(x0 / 2**w), int(y0 / 2**w), int(x1 / 2**w),
                          int(y1 / 2**w))[0]
Пример #2
0
    def load(self, x, y, z, w):
        '''
        @override
        '''
        if z < self.min_z:
            z = self.min_z
        elif z > self.max_z:
            z = self.max_z

        if z not in self.kdtrees:
            return np.zeros(self.blocksize)

        x0 = x * self.blocksize[0]
        y0 = y * self.blocksize[0]
        x1 = x0 + self.blocksize[0]
        y1 = y0 + self.blocksize[1]

        kdtree = self.kdtrees[z]
        assert isinstance(kdtree, KDTree)
        #
        # Look every "blocksize" within the kdtree for the closest center
        #
        nx = 2 * (x1 - x0) / self.tile_width + 1
        ny = 2 * (y1 - y0) / self.tile_height + 1
        xr = np.vstack([np.linspace(x0, x1, nx)] * ny)
        yr = np.column_stack([np.linspace(y0, y1, ny)] * nx)
        coords = np.column_stack([xr.flatten(), yr.flatten()])
        d, idxs = kdtree.query(coords)
        idxs = np.unique(idxs)
        single_renderers = []
        for idx in idxs:
            ts = self.ts[z][idx]
            renderer = TilespecSingleTileRenderer(ts,
                                                  compute_distances=False,
                                                  mipmap_level=w)
            single_renderers.append(renderer)
            if w > 0:
                model = AffineModel(m=np.eye(3) * 2.0**w)
                renderer.add_transformation(model)
            for ts_transform in ts.get_transforms():
                model = Transforms.from_tilespec(ts_transform)
                renderer.add_transformation(model)
            if w > 0:
                model = AffineModel(m=np.eye(3) / 2.0**w)
                renderer.add_transformation(model)
        renderer = MultipleTilesRenderer(single_renderers)
        return renderer.crop(x0 / 2**w, y0 / 2**w, x1 / 2**w, y1 / 2**w)[0]
 def test_04_02_crop_with_transform(self):
     r = np.random.RandomState(12345)
     img = r.uniform(high=255, size=(35, 42)).astype(np.uint8)
     renderer = STR(img, False, False)
     a = AffineModel(np.array([[0, 1, 0], [1, 0, 0], [0, 0, 1]]))
     renderer.add_transformation(a)
     img_out, start, mask = renderer.crop(5, 10, 20, 30)
     np.testing.assert_array_equal(img_out, img.transpose()[10:31, 5:21])
 def test_01_03_affine(self):
     r = np.random.RandomState(325)
     img = r.uniform(high=255, size=(35, 42)).astype(np.uint8)
     renderer = STR(img, False, False)
     a = AffineModel(m=np.array([[0, 1, 0], [1, 0, 0], [0, 0, 1]]))
     renderer.add_transformation(a)
     img_out, start = renderer.render()
     self.assertSequenceEqual(img_out.shape, (42, 35))
     np.testing.assert_array_almost_equal(img, img_out.transpose())
Пример #5
0
 def compute_val_max(offsets):
   s = 0.0    
   for x in tiles.keys():
      data1 = tiles[x]["transforms"][0]["dataString"].split(" ")
      data2 = tiles2[x]["transforms"][0]["dataString"].split(" ")
      m1 = AffineModel()
      m1.set_from_modelspec(tiles[x]["transforms"][0]["dataString"])
      m2 = AffineModel()
      m2.set_from_modelspec(tiles2[x]["transforms"][0]["dataString"])
      #m1 = RigidModel(np.float64(data1[0])+(np.float64(offsets[0]))+np.float64(TEST_OFFSETS[0]), np.array([np.float64(data1[1])+offsets[1] + TEST_OFFSETS[1],np.float64(data1[2]) + offsets[2] +TEST_OFFSETS[2]])) 
      #m2 = RigidModel(np.float64(data2[0]), np.array([np.float64(data2[1]),np.float64(data2[2])]))
      #mr1 = list(m1.apply(np.array([0.0,0.0]))[0,])
      #mr2 = list(m2.apply(np.array([0.0,0.0]))[0,])
      mr1 = list(m1.apply(np.array([np.float64(1.0),np.float64(1.0)]))[0,])
      mr2 = list(m2.apply(np.array([np.float64(1.0),np.float64(1.0)]))[0,])
      s = max(s, abs(mr1[0]-mr2[0]))
      s = max(s, abs(mr1[1]-mr2[1]))
   return s 
 def test_02_01_bbox(self):
     r = np.random.RandomState(456)
     img = r.uniform(high=255, size=(35, 42)).astype(np.uint8)
     for transform, bbox in ((TranslationModel(np.array([14, 25])), [
             14, 55, 25, 59
     ]), (AffineModel(np.array([[0, 1, 0], [1, 0, 0],
                                [0, 0, 1]])), [0, 34, 0, 41])):
         renderer = STR(img, False, False)
         renderer.add_transformation(transform)
         self.assertSequenceEqual(renderer.bbox, bbox)
Пример #7
0
 def compute_val_sum(offsets):
   s = 0.0    
   for x in tiles.keys():
      data1 = tiles[x]["transforms"][0]["dataString"].split(" ")
      data2 = tiles2[x]["transforms"][0]["dataString"].split(" ")
      m1 = AffineModel()
      m1.set_from_modelspec(tiles[x]["transforms"][0]["dataString"])
      m2 = AffineModel()
      m2.set_from_modelspec(tiles2[x]["transforms"][0]["dataString"])
      #m1 = RigidModel(np.float64(data1[0])+(np.float64(offsets[0]))+TEST_OFFSETS[0], np.array([np.float64(data1[1])+offsets[1]+TEST_OFFSETS[1],np.float64(data1[2]) + offsets[2] +TEST_OFFSETS[2]])) 
      #m2 = RigidModel(np.float64(data2[0]), np.array([np.float64(data2[1]),np.float64(data2[2])]))
      mr1 = list(m1.apply(np.array([np.float64(1.0),np.float64(1.0)]))[0,])
      mr2 = list(m2.apply(np.array([np.float64(1.0),np.float64(1.0)]))[0,])
      s += round(abs(mr1[0]-mr2[0])) + round(abs(mr1[1]-mr2[1]))
      if round(abs(mr1[0]-mr2[0])) + round(abs(mr1[1]-mr2[1])) > 20.0:
        print "Big error. " + str(x) + " " + str(round(abs(mr1[0]-mr2[0])) + round(abs(mr1[1]-mr2[1])))
        print tiles[x]
        print ""
   return s 
Пример #8
0
    def load(self, x, y, z, w):
        '''
        @override
        '''
        plane_rendered = deepcopy(self.layer_renderer[z])
        if w > 0:
            model = AffineModel(m=np.eye(3) / 2.0**w)
            plane_rendered.add_transformation(model)

        return plane_rendered
 def test_03_01_mask(self):
     r = np.random.RandomState(231)
     img = np.zeros((20, 20), np.uint8)
     renderer = STR(img, True, False)
     renderer.add_transformation(
         AffineModel(
             np.array([[np.cos(np.pi / 4), -np.sin(np.pi / 4), 0],
                       [np.sin(np.pi / 4),
                        np.cos(np.pi / 4), 0], [0, 0, 1]])))
     renderer.render()
     mask, start = renderer.fetch_mask()
     self.assertEqual(mask[0, 0], 0)
     self.assertEqual(mask[-1, -1], 0)
     self.assertTrue(np.all(mask[int(mask.shape[0] / 2)] == 1))
     self.assertTrue(np.all(mask[:, int(mask.shape[1] / 2)] == 1))
 def test_05_02_crop_with_distances_affine(self):
     r = np.random.RandomState(12345)
     img = r.uniform(high=255, size=(42, 42)).astype(np.uint8)
     renderer = STR(img, False, True)
     renderer.add_transformation(
         AffineModel(
             np.array([[np.cos(np.pi / 4), -np.sin(np.pi / 4), 0],
                       [np.sin(np.pi / 4),
                        np.cos(np.pi / 4), 0], [0, 0, 1]])))
     d = int(42 * np.sqrt(2) + 1)
     img_out, (x, y), cropped_distances = renderer.crop_with_distances(
         -d, 0, d, d)
     self.assertEqual(cropped_distances[1, 1], 0)
     self.assertEqual(cropped_distances[-2, -2], 0)
     self.assertGreater(
         cropped_distances[1, int(cropped_distances.shape[1] / 2)], .5)
     self.assertLessEqual(
         cropped_distances[1, int(cropped_distances.shape[1] / 2)], 1.5)
Пример #11
0
 def __init__(self, other_renderer):
     '''Initialize with another renderer
     
     :param other_renderer: A renderer derived from SingleTileRendererBase
     '''
     super(AlphaTileRenderer, self).__init__(
         other_renderer.width, other_renderer.height, False, False)
     pre, post = [
         AffineModel(np.vstack([transform, [0, 0, 1]])
                     if transform.shape[0] == 2
                     else transform) 
         for transform in 
         other_renderer.pre_non_affine_transform, 
         other_renderer.post_non_affine_transform]
     self.add_transformation(pre)             
     if other_renderer.non_affine_transform is not None:
         self.add_transformation(other_renderer.non_affine_transform)
         self.add_transformation(post)