Beispiel #1
0
def create_quantized_affinity_tasks(taskqueue,
                                    src_layer,
                                    dest_layer,
                                    shape,
                                    fill_missing=False):
    shape = Vec(*shape)

    info = create_quantized_affinity_info(src_layer, dest_layer, shape)
    destvol = CloudVolume(dest_layer, info=info)
    destvol.commit_info()

    create_downsample_scales(dest_layer, mip=0, ds_shape=shape)

    for startpt in tqdm(xyzrange(destvol.bounds.minpt, destvol.bounds.maxpt,
                                 shape),
                        desc="Inserting QuantizeAffinities Tasks"):
        task = QuantizeAffinitiesTask(
            source_layer_path=src_layer,
            dest_layer_path=dest_layer,
            shape=list(shape.clone()),
            offset=list(startpt.clone()),
            fill_missing=fill_missing,
        )
        task_queue.insert(task)
    task_queue.wait('Uploading')
Beispiel #2
0
def create_meshing_tasks(task_queue, layer_path, mip, shape=Vec(512, 512,
                                                                512)):
    shape = Vec(*shape)
    max_simplification_error = 40

    vol = CloudVolume(layer_path, mip)

    if not 'mesh' in vol.info:
        vol.info['mesh'] = 'mesh_mip_{}_err_{}'.format(
            mip, max_simplification_error)
        vol.commit_info()

    for startpt in tqdm(xyzrange(vol.bounds.minpt, vol.bounds.maxpt, shape),
                        desc="Inserting Mesh Tasks"):
        task = MeshTask(
            layer_path=layer_path,
            mip=vol.mip,
            shape=shape.clone(),
            offset=startpt.clone(),
            max_simplification_error=max_simplification_error,
        )
        task_queue.insert(task)
    task_queue.wait('Uploading MeshTasks')

    vol.provenance.processing.append({
        'method': {
            'task': 'MeshTask',
            'layer_path': layer_path,
            'mip': vol.mip,
            'shape': shape.tolist(),
        },
        'by': USER_EMAIL,
        'date': strftime('%Y-%m-%d %H:%M %Z'),
    })
    vol.commit_provenance()
Beispiel #3
0
def create_watershed_remap_tasks(task_queue,
                                 map_path,
                                 src_layer_path,
                                 dest_layer_path,
                                 shape=Vec(2048, 2048, 64)):
    shape = Vec(*shape)
    vol = CloudVolume(src_layer_path)

    create_downsample_scales(dest_layer_path, mip=0, ds_shape=shape)

    for startpt in tqdm(xyzrange(vol.bounds.minpt, vol.bounds.maxpt, shape),
                        desc="Inserting Remap Tasks"):
        task = WatershedRemapTask(
            map_path=map_path,
            src_path=src_layer_path,
            dest_path=dest_layer_path,
            shape=shape.clone(),
            offset=startpt.clone(),
        )
        task_queue.insert(task)
    task_queue.wait('Uploading Remap Tasks')
    dvol = CloudVolume(dest_layer_path)
    dvol.provenance.processing.append({
        'method': {
            'task': 'WatershedRemapTask',
            'src': src_layer_path,
            'dest': dest_layer_path,
            'remap_file': map_path,
            'shape': list(shape),
        },
        'by': '*****@*****.**',
        'date': strftime('%Y-%m-%d %H:%M %Z'),
    })
    dvol.commit_provenance()
Beispiel #4
0
def create_transfer_tasks(task_queue,
                          src_layer_path,
                          dest_layer_path,
                          chunk_size=None,
                          shape=Vec(2048, 2048, 64),
                          fill_missing=False,
                          translate=(0, 0, 0)):
    shape = Vec(*shape)
    translate = Vec(*translate)
    vol = CloudVolume(src_layer_path)

    if not chunk_size:
        chunk_size = vol.info['scales'][0]['chunk_sizes'][0]
    chunk_size = Vec(*chunk_size)

    try:
        dvol = CloudVolume(dest_layer_path)
    except Exception:  # no info file
        info = copy.deepcopy(vol.info)
        dvol = CloudVolume(dest_layer_path, info=info)
        dvol.info['scales'] = dvol.info['scales'][:1]
        dvol.info['scales'][0]['chunk_sizes'] = [chunk_size.tolist()]
        dvol.commit_info()

    create_downsample_scales(dest_layer_path,
                             mip=0,
                             ds_shape=shape,
                             preserve_chunk_size=True)

    bounds = vol.bounds.clone()
    for startpt in tqdm(xyzrange(bounds.minpt, bounds.maxpt, shape),
                        desc="Inserting Transfer Tasks"):
        task = TransferTask(
            src_path=src_layer_path,
            dest_path=dest_layer_path,
            shape=shape.clone(),
            offset=startpt.clone(),
            fill_missing=fill_missing,
            translate=translate,
        )
        task_queue.insert(task)
    task_queue.wait('Uploading Transfer Tasks')
    dvol = CloudVolume(dest_layer_path)
    dvol.provenance.processing.append({
        'method': {
            'task': 'TransferTask',
            'src': src_layer_path,
            'dest': dest_layer_path,
            'shape': list(map(int, shape)),
            'fill_missing': fill_missing,
            'translate': list(map(int, translate)),
        },
        'by': USER_EMAIL,
        'date': strftime('%Y-%m-%d %H:%M %Z'),
    })
    dvol.commit_provenance()
Beispiel #5
0
def create_fixup_quantize_tasks(task_queue, src_layer, dest_layer, shape,
                                points):
    shape = Vec(*shape)
    vol = CloudVolume(src_layer, 0)
    offsets = compute_fixup_offsets(vol, points, shape)

    for offset in tqdm(offsets,
                       desc="Inserting Corrective Quantization Tasks"):
        task = QuantizeAffinitiesTask(
            source_layer_path=src_layer,
            dest_layer_path=dest_layer,
            shape=list(shape.clone()),
            offset=list(offset.clone()),
        )
        # task.execute()
        task_queue.insert(task)
    task_queue.wait('Uploading')
Beispiel #6
0
class FinelyDividedTaskIterator():
    """
  Parallelizes tasks that do not have overlap.

  Evenly splits tasks between processes without 
  regards to whether the dividing line lands in
  the middle of a slice. 
  """
    def __init__(self, bounds, shape):
        self.bounds = bounds
        self.shape = Vec(*shape)
        self.start = 0
        self.end = num_tasks(bounds, shape)

    def __len__(self):
        return self.end - self.start

    def __getitem__(self, slc):
        itr = copy.deepcopy(self)
        itr.start = max(self.start + slc.start, self.start)
        itr.end = min(self.start + slc.stop, self.end)
        return itr

    def __iter__(self):
        for i in range(self.start, self.end):
            pt = self.to_coord(i)
            offset = pt * self.shape + self.bounds.minpt
            yield self.task(self.shape.clone(), offset.clone())

        self.on_finish()

    def to_coord(self, index):
        """Convert an index into a grid coordinate defined by the task shape."""
        sx, sy, sz = np.ceil(self.bounds.size3() / self.shape).astype(int)
        sxy = sx * sy
        z = index // sxy
        y = (index - (z * sxy)) // sx
        x = index - sx * (y + z * sy)
        return Vec(x, y, z)

    def task(self, shape, offset):
        raise NotImplementedError()

    def on_finish(self):
        pass
Beispiel #7
0
def create_transfer_tasks(task_queue,
                          src_layer_path,
                          dest_layer_path,
                          shape=Vec(2048, 2048, 64),
                          fill_missing=False,
                          translate=(0, 0, 0)):
    shape = Vec(*shape)
    translate = Vec(*translate)
    vol = CloudVolume(src_layer_path)

    create_downsample_scales(dest_layer_path,
                             mip=0,
                             ds_shape=shape,
                             preserve_chunk_size=True)

    bounds = vol.bounds.clone()

    for startpt in tqdm(xyzrange(bounds.minpt, bounds.maxpt, shape),
                        desc="Inserting Transfer Tasks"):
        task = TransferTask(
            src_path=src_layer_path,
            dest_path=dest_layer_path,
            shape=shape.clone(),
            offset=startpt.clone(),
            fill_missing=fill_missing,
            translate=translate,
        )
        task_queue.insert(task)
    task_queue.wait('Uploading Transfer Tasks')
    dvol = CloudVolume(dest_layer_path)
    dvol.provenance.processing.append({
        'method': {
            'task': 'TransferTask',
            'src': src_layer_path,
            'dest': dest_layer_path,
            'shape': list(map(int, shape)),
        },
        'by': '*****@*****.**',
        'date': strftime('%Y-%m-%d %H:%M %Z'),
    })
    dvol.commit_provenance()
Beispiel #8
0
def create_boss_transfer_tasks(task_queue,
                               src_layer_path,
                               dest_layer_path,
                               shape=Vec(1024, 1024, 64)):
    # Note: Weird errors with datatype changing to float64 when requesting 2048,2048,64
    # 1024,1024,64 worked nicely though.
    shape = Vec(*shape)
    vol = CloudVolume(dest_layer_path)

    create_downsample_scales(dest_layer_path, mip=0, ds_shape=shape)

    for startpt in tqdm(xyzrange(vol.bounds.minpt, vol.bounds.maxpt, shape),
                        desc="Inserting Boss Transfer Tasks"):
        task = BossTransferTask(
            src_path=src_layer_path,
            dest_path=dest_layer_path,
            shape=shape.clone(),
            offset=startpt.clone(),
        )
        task_queue.insert(task)
    task_queue.wait('Uploading Boss Transfer Tasks')
Beispiel #9
0
def create_meshing_tasks(task_queue, layer_path, mip, shape=Vec(512, 512,
                                                                512)):
    shape = Vec(*shape)
    max_simplification_error = 40

    vol = CloudVolume(layer_path, mip)

    if not 'mesh' in vol.info:
        vol.info['mesh'] = 'mesh_mip_{}_err_{}'.format(
            mip, max_simplification_error)
        vol.commit_info()

    for startpt in tqdm(xyzrange(vol.bounds.minpt, vol.bounds.maxpt, shape),
                        desc="Inserting Mesh Tasks"):
        task = MeshTask(
            layer_path=layer_path,
            mip=vol.mip,
            shape=shape.clone(),
            offset=startpt.clone(),
            max_simplification_error=max_simplification_error,
        )
        task_queue.insert(task)
    task_queue.wait('Uploading MeshTasks')
Beispiel #10
0
def create_luminance_levels_tasks(layer_path,
                                  levels_path=None,
                                  coverage_factor=0.01,
                                  shape=None,
                                  offset=(0, 0, 0),
                                  mip=0,
                                  bounds=None):
    """
  Compute per slice luminance level histogram and write them as
  $layer_path/levels/$z. Each z file looks like:

  {
    "levels": [ 0, 35122, 12, ... ], # 256 indices, index = luminance i.e. 0 is black, 255 is white 
    "patch_size": [ sx, sy, sz ], # metadata on how large the patches were
    "num_patches": 20, # metadata on
    "coverage_ratio": 0.011, # actual sampled area on this slice normalized by ROI size.
  }

  layer_path: source image to sample from
  levels_path: which path to write ./levels/ to (default: $layer_path)
  coverage_factor: what fraction of the image to sample

  offset & shape: Allows you to specify an ROI if much of
    the edges are black. Defaults to entire image.
  mip: int, which mip to work with, default maximum resolution
  """
    vol = CloudVolume(layer_path, mip=mip)

    if shape == None:
        shape = vol.shape.clone()
        shape.z = 1

    offset = Vec(*offset)
    zoffset = offset.clone()

    bounds = get_bounds(vol, bounds, shape, mip)
    protocol = vol.path.protocol

    class LuminanceLevelsTaskIterator(object):
        def __len__(self):
            return bounds.maxpt.z - bounds.minpt.z

        def __iter__(self):
            for z in range(bounds.minpt.z, bounds.maxpt.z + 1):
                zoffset.z = z
                yield LuminanceLevelsTask(
                    src_path=layer_path,
                    levels_path=levels_path,
                    shape=shape,
                    offset=zoffset,
                    coverage_factor=coverage_factor,
                    mip=mip,
                )

            if protocol == 'boss':
                raise StopIteration()

            if levels_path:
                try:
                    vol = CloudVolume(levels_path)
                except cloudvolume.exceptions.InfoUnavailableError:
                    vol = CloudVolume(levels_path, info=vol.info)
            else:
                vol = CloudVolume(layer_path, mip=mip)

            vol.provenance.processing.append({
                'method': {
                    'task': 'LuminanceLevelsTask',
                    'src': layer_path,
                    'levels_path': levels_path,
                    'shape': Vec(*shape).tolist(),
                    'offset': Vec(*offset).tolist(),
                    'bounds': [bounds.minpt.tolist(),
                               bounds.maxpt.tolist()],
                    'coverage_factor': coverage_factor,
                    'mip': mip,
                },
                'by':
                OPERATOR_CONTACT,
                'date':
                strftime('%Y-%m-%d %H:%M %Z'),
            })
            vol.commit_provenance()

    return LuminanceLevelsTaskIterator()