Exemple #1
0
        def synapses_for_bbox(self, shape, offset):
            """
      Returns { seigd: [ ((x,y,z), swc_label), ... ] 
      where x,y,z are in voxel coordinates with the
      origin set to the bottom left corner of this cutout.
      """
            bbox = Bbox(offset, shape + offset) * vol.resolution
            center = bbox.center()
            diagonal = Vec(*((bbox.maxpt - center)))
            pts = [
                centroids[i, :]
                for i in kdtree.query_ball_point(center, diagonal.length())
            ]
            pts = [
                tuple(Vec(*pt, dtype=int)) for pt in pts if bbox.contains(pt)
            ]

            synapses = defaultdict(list)
            for pt in pts:
                for label, swc_label in labelsmap[pt]:
                    voxel_pt = Vec(*pt,
                                   dtype=np.float32) / vol.resolution - offset
                    synapses[label].append(
                        (tuple(voxel_pt.astype(int)), swc_label))
            return synapses
Exemple #2
0
class GrapheneMeshTask(RegisteredTask):
    def __init__(self, cloudpath, shape, offset, mip, **kwargs):
        """
    Convert all labels in the specified bounding box into meshes
    via marching cubes and quadratic edge collapse (github.com/seung-lab/zmesh).

    Required:
      shape: (sx,sy,sz) size of task
      offset: (x,y,z) offset from (0,0,0)
      cloudpath: neuroglancer/cloudvolume dataset path

    Optional:
      mip: (uint) level of the resolution pyramid to download segmentation from
      simplification_factor: (uint) try to reduce the number of triangles in the 
        mesh by this factor (but constrained by max_simplification_error)
      max_simplification_error: The maximum physical distance that
        simplification is allowed to move a triangle vertex by. 
      mesh_dir: which subdirectory to write the meshes to (overrides info file location)

      parallel_download: (uint: 1) number of processes to use during the segmentation download
      cache_control: (str: None) specify the cache-control header when uploading mesh files
      dust_threshold: (uint: None) don't bother meshing labels strictly smaller than this number of voxels.
      encoding: (str) 'precomputed' (default) or 'draco'
      draco_compression_level: (uint: 1) only applies to draco encoding
      progress: (bool: False) show progress bars for meshing 
      object_ids: (list of ints) if specified, only mesh these ids
      fill_missing: (bool: False) replace missing segmentation files with zeros instead of erroring
      timestamp: (int: None) (graphene only) use the segmentation existing at this
        UNIX timestamp.
    """
        super(GrapheneMeshTask, self).__init__(cloudpath, shape, offset, mip,
                                               **kwargs)
        self.shape = Vec(*shape)
        self.offset = Vec(*offset)
        self.mip = int(mip)
        self.cloudpath = cloudpath
        self.layer_id = 2
        self.overlap_vx = 1
        self.options = {
            'cache_control': kwargs.get('cache_control', None),
            'draco_compression_level': kwargs.get('draco_compression_level',
                                                  1),
            'fill_missing': kwargs.get('fill_missing', False),
            'max_simplification_error': kwargs.get('max_simplification_error',
                                                   40),
            'simplification_factor': kwargs.get('simplification_factor', 100),
            'mesh_dir': kwargs.get('mesh_dir', None),
            'progress': kwargs.get('progress', False),
            'timestamp': kwargs.get('timestamp', None),
        }

    def execute(self):
        self.cv = CloudVolume(
            self.cloudpath,
            mip=self.mip,
            bounded=False,
            fill_missing=self.options['fill_missing'],
            mesh_dir=self.options['mesh_dir'],
        )

        if self.cv.mesh.meta.is_sharded() == False:
            raise ValueError("The mesh sharding parameter must be defined.")

        self.bounds = Bbox(self.offset, self.shape + self.offset)
        self.bounds = Bbox.clamp(self.bounds, self.cv.bounds)

        self.progress = bool(self.options['progress'])

        self.mesher = zmesh.Mesher(self.cv.resolution)

        # Marching cubes needs 1 voxel overlap to properly
        # stitch adjacent meshes.
        # data_bounds = self.bounds.clone()
        # data_bounds.maxpt += self.overlap_vx

        self.mesh_dir = self.get_mesh_dir()
        self.draco_encoding_settings = draco_encoding_settings(
            shape=(self.shape + self.overlap_vx),
            offset=self.offset,
            resolution=self.cv.resolution,
            compression_level=1,
            create_metadata=True,
            uses_new_draco_bin_size=self.cv.meta.uses_new_draco_bin_size,
        )

        chunk_pos = self.cv.meta.point_to_chunk_position(self.bounds.center(),
                                                         mip=self.mip)

        img = mesh_graphene_remap.remap_segmentation(
            self.cv,
            chunk_pos.x,
            chunk_pos.y,
            chunk_pos.z,
            mip=self.mip,
            overlap_vx=self.overlap_vx,
            time_stamp=self.timestamp,
            progress=self.progress,
        )

        if not np.any(img):
            return

        self.upload_meshes(self.compute_meshes(img))

    def get_mesh_dir(self):
        if self.options['mesh_dir'] is not None:
            return self.options['mesh_dir']
        elif 'mesh' in self.cv.info:
            return self.cv.info['mesh']
        else:
            raise ValueError(
                "The mesh destination is not present in the info file.")

    def compute_meshes(self, data):
        data = data.T
        self.mesher.mesh(data)
        del data

        meshes = {}
        for obj_id in tqdm(self.mesher.ids(),
                           disable=(not self.progress),
                           desc="Mesh"):
            # remapped_id = component_map[obj_id]
            meshes[obj_id] = self.create_mesh(obj_id)

        return meshes

    def upload_meshes(self, meshes):
        if len(meshes) == 0:
            return

        reader = self.cv.mesh.readers[self.layer_id]

        shard_binary = reader.spec.synthesize_shard(meshes)
        # the shard filename is derived from the chunk position,
        # so any label inside this L2 chunk will do
        shard_filename = reader.get_filename(list(meshes.keys())[0])

        cf = CloudFiles(self.cv.cloudpath)
        cf.put(
            f"{self.get_mesh_dir()}/initial/{self.layer_id}/{shard_filename}",
            shard_binary,
            compress=None,
            content_type="application/octet-stream",
            cache_control="no-cache",
        )

    def create_mesh(self, obj_id):
        mesh = self.mesher.get_mesh(
            obj_id,
            simplification_factor=self.options['simplification_factor'],
            max_simplification_error=self.options['max_simplification_error'],
            # Graphene meshes were created before we fixed the offset problem
            # so unless otherwise specificed, keep this set to False
            voxel_centered=False,
        )

        self.mesher.erase(obj_id)
        mesh.vertices[:] += self.bounds.minpt * self.cv.resolution

        mesh_binary = DracoPy.encode(mesh.vertices, mesh.faces,
                                     **self.draco_encoding_settings)

        return mesh_binary