コード例 #1
0
ファイル: squeeze.py プロジェクト: yajivunev/gunpowder
    def provide(self, request):
        outputs = gp.Batch()

        # RAW
        raw_spec = copy.deepcopy(self.array_spec_raw)
        raw_spec.roi = request[self.raw].roi

        raw_shape = request[self.raw].roi.get_shape() / self.voxel_size

        outputs[self.raw] = gp.Array(
            np.random.randint(0, 256, raw_shape, dtype=raw_spec.dtype),
            raw_spec)

        # Unsqueeze
        outputs[self.raw].data = np.expand_dims(outputs[self.raw].data, axis=0)
        outputs[self.raw].data = np.expand_dims(outputs[self.raw].data, axis=0)

        # LABELS
        labels_spec = copy.deepcopy(self.array_spec_labels)
        labels_spec.roi = request[self.labels].roi

        labels_shape = request[self.labels].roi.get_shape() / self.voxel_size

        labels = np.ones(labels_shape, dtype=labels_spec.dtype)
        outputs[self.labels] = gp.Array(labels, labels_spec)

        # Unsqueeze
        outputs[self.labels].data = np.expand_dims(outputs[self.labels].data,
                                                   axis=0)

        return outputs
コード例 #2
0
ファイル: train.py プロジェクト: pattonw/mouselight
    def provide(self, request):

        voxel_size = self.spec[self.raw].voxel_size
        shape = gp.Coordinate((1, ) + request[self.raw].roi.get_shape())

        noise = np.abs(np.random.randn(*shape))
        smoothed_noise = gaussian_filter(noise, sigma=self.smoothness)

        seeds = np.zeros(shape, dtype=int)
        for i in range(self.n_objects):
            if i == 0:
                num_points = 100
            else:
                num_points = self.points_per_skeleton
            points = np.stack(
                [
                    np.random.randint(0, shape[dim], num_points)
                    for dim in range(3)
                ],
                axis=1,
            )
            tree = skelerator.Tree(points)
            skeleton = skelerator.Skeleton(tree, [1, 1, 1],
                                           "linear",
                                           generate_graph=False)
            seeds = skeleton.draw(seeds, np.array([0, 0, 0]), i + 1)

        seeds[maximum_filter(seeds, size=4) != seeds] = 0
        seeds_dt = distance_transform_edt(seeds == 0) + 5.0 * smoothed_noise
        gt_data = cwatershed(seeds_dt, seeds).astype(np.uint64)[0] - 1

        labels = np.unique(gt_data)

        raw_data = np.zeros_like(gt_data, dtype=np.uint8)
        value = 0
        for label in labels:
            raw_data[gt_data == label] = value
            value += 255.0 / self.n_objects

        spec = request[self.raw].copy()
        spec.voxel_size = (1, 1)
        raw = gp.Array(raw_data, spec)

        spec = request[self.gt].copy()
        spec.voxel_size = (1, 1)
        gt_crop = (request[self.gt].roi -
                   request[self.raw].roi.get_begin()) / voxel_size
        gt_crop = gt_crop.to_slices()
        gt = gp.Array(gt_data[gt_crop], spec)

        batch = gp.Batch()
        batch[self.raw] = raw
        batch[self.gt] = gt

        return batch
コード例 #3
0
    def provide(self, request):
        roi_array = request[gp.ArrayKeys.M_PRED].roi
        batch = gp.Batch()
        batch.arrays[gp.ArrayKeys.M_PRED] = gp.Array(
            self.m_pred[(roi_array / self.voxel_size).to_slices()],
            spec=gp.ArraySpec(roi=roi_array, voxel_size=self.voxel_size))
        slices = (roi_array / self.voxel_size).to_slices()
        batch.arrays[gp.ArrayKeys.D_PRED] = gp.Array(
            self.d_pred[:, slices[0], slices[1], slices[2]],
            spec=gp.ArraySpec(roi=roi_array, voxel_size=self.voxel_size))

        return batch
コード例 #4
0
ファイル: utils.py プロジェクト: funkelab/contraband
    def process(self, batch, request):

        ids_0 = set([n.id for n in batch[self.points_0].nodes])
        ids_1 = set([n.id for n in batch[self.points_1].nodes])
        common_ids = ids_0.intersection(ids_1)

        locations_0 = []
        locations_1 = []
        # get list of only xy locations
        # locations are in voxels, relative to output roi
        points_roi = request[self.points_0].roi
        voxel_size = batch[self.raw_0].spec.voxel_size
        for i in common_ids:
            location_0 = np.array(batch[self.points_0].node(i).location)
            location_1 = np.array(batch[self.points_1].node(i).location)
            if not points_roi.contains(location_0):
                print(f"skipping point {i} at {location_0}")
                continue
            if not points_roi.contains(location_1):
                print(f"skipping point {i} at {location_1}")
                continue
            location_0 -= points_roi.get_begin()
            location_1 -= points_roi.get_begin()
            location_0 /= voxel_size
            location_1 /= voxel_size
            locations_0.append(location_0)
            locations_1.append(location_1)

        locations_0 = np.array(locations_0, dtype=np.float32)
        locations_1 = np.array(locations_1, dtype=np.float32)
        if self.is_2d:
            locations_0 = locations_0[:, 1:]
            locations_1 = locations_1[:, 1:]
        locations_0 = locations_0[np.newaxis]
        locations_1 = locations_1[np.newaxis]

        # create point location arrays (with batch dimension)
        batch[self.locations_0] = gp.Array(locations_0,
                                           self.spec[self.locations_0])
        batch[self.locations_1] = gp.Array(locations_1,
                                           self.spec[self.locations_1])

        # add batch dimension to raw
        batch[self.raw_0].data = batch[self.raw_0].data[np.newaxis, :]
        batch[self.raw_1].data = batch[self.raw_1].data[np.newaxis, :]

        # make sure raw is float32
        batch[self.raw_0].data = batch[self.raw_0].data.astype(np.float32)
        batch[self.raw_1].data = batch[self.raw_1].data.astype(np.float32)
コード例 #5
0
ファイル: utils.py プロジェクト: funkelab/contraband
    def provide(self, request):

        timing = Timing(self)
        timing.start()

        batch = gp.Batch()

        # If a Array is requested then we will randomly choose
        # the number of requested points
        if isinstance(self.points, gp.ArrayKey):
            points = np.random.choice(self.data.shape[0], self.num_points)
            data = self.data[points][np.newaxis]
            if self.scale is not None:
                data = data * self.scale
            if self.label_data is not None:
                labels = self.label_data[points]
            batch[self.points] = gp.Array(data, self.spec[self.points])

        else:
            # If a graph is request we must select points within the
            # request ROI

            min_bb = request[self.points].roi.get_begin()
            max_bb = request[self.points].roi.get_end()

            logger.debug("Points source got request for %s",
                         request[self.points].roi)

            point_filter = np.ones((self.data.shape[0], ), dtype=np.bool)
            for d in range(self.ndims):
                point_filter = np.logical_and(point_filter,
                                              self.data[:, d] >= min_bb[d])
                point_filter = np.logical_and(point_filter,
                                              self.data[:, d] < max_bb[d])

            points_data, labels = self._get_points(point_filter)
            logger.debug(f"Found {len(points_data)} points")
            points_spec = gp.GraphSpec(roi=request[self.points].roi.copy())
            batch.graphs[self.points] = gp.Graph(points_data, [], points_spec)

        # Labels will always be an Array
        if self.label_data is not None:
            batch[self.labels] = gp.Array(labels, self.spec[self.labels])

        timing.stop()
        batch.profiling_stats.add(timing)

        return batch
コード例 #6
0
    def provide(self, request):

        timing = gp.profiling.Timing(self)
        timing.start()

        batch = gp.Batch()

        for (array_key, request_spec) in request.array_specs.items():
            logger.debug("Reading %s in %s...", array_key, request_spec.roi)

            voxel_size = self.spec[array_key].voxel_size

            # scale request roi to voxel units
            dataset_roi = request_spec.roi / voxel_size

            # shift request roi into dataset
            # dataset_roi = dataset_roi - self.spec[array_key].roi.get_offset() / voxel_size

            # create array spec
            array_spec = self.spec[array_key].copy()
            array_spec.roi = request_spec.roi

            # add array to batch
            batch.arrays[array_key] = gp.Array(
                self.func(dataset_roi.get_shape()), array_spec)

        logger.debug("done")

        timing.stop()
        batch.profiling_stats.add(timing)

        return batch
コード例 #7
0
    def process(self, batch, request):
        mask = (batch[self.array].data > self.threshold).astype(np.float32)
        mask_spec = self.spec[self.mask].copy()
        mask_spec.roi = batch[self.array].spec.roi
        batch[self.mask] = gp.Array(mask, mask_spec)

        return batch
コード例 #8
0
    def process(self, batch, request):
        mask = (batch[self.array].data > self.threshold).astype(np.uint32)
        mask_spec = copy.deepcopy(batch[self.array].spec)
        mask_spec.dtype = np.uint32
        batch[self.mask] = gp.Array(mask, mask_spec)

        return batch
コード例 #9
0
    def process(self, batch, request):

        spec = batch[self.gt].spec.copy()
        spec.dtype = np.int32

        binarized = gp.Array(data=(batch[self.gt].data > 0).astype(np.int32), spec=spec)

        batch[self.gt_binary] = binarized
コード例 #10
0
    def provide(self, request):

        batch = gp.Batch()
        for (array_key, request_spec) in request.array_specs.items():
            array_spec = self.spec[array_key].copy()
            array_spec.roi = request_spec.roi
            print "array_spec: ", array_spec.roi.get_shape()
            data = np.zeros((array_spec.roi.get_shape()))
            batch.arrays[array_key] = gp.Array(data, array_spec)
        return batch
コード例 #11
0
    def process(self, batch, request):
        output = gp.Batch()

        gt_array = NumpyArray.from_gp_array(batch[self.gt_key])
        target_array = self.predictor.create_target(gt_array)
        mask_array = NumpyArray.from_gp_array(batch[self.mask_key])
        weight_array = self.predictor.create_weight(
            gt_array, target_array, mask=mask_array
        )

        request_spec = request[self.target_key]
        request_spec.voxel_size = gt_array.voxel_size
        output[self.target_key] = gp.Array(target_array[request_spec.roi], request_spec)
        request_spec = request[self.weights_key]
        request_spec.voxel_size = gt_array.voxel_size
        output[self.weights_key] = gp.Array(
            weight_array[request_spec.roi], request_spec
        )
        return output
コード例 #12
0
    def process(self, batch, request):

        if self.target_key not in request:
            return
        arr = batch.arrays[self.source_key].data
        func_output = self.func(arr)

        spec = self.spec[self.target_key].copy()
        spec.roi = request[self.target_key].roi
        batch.arrays[self.target_key] = gp.Array(func_output, spec)
コード例 #13
0
    def process(self, batch, request):

        if (self.sum_array_key not in request):
            return
        sum_arr = batch.arrays[self.array_keys[0]].data
        for ak in self.array_keys[1:]:
            sum_arr += batch.arrays[ak].data
        spec = self.spec[self.sum_array_key].copy()
        spec.roi = request[self.sum_array_key].roi
        batch.arrays[self.sum_array_key] = gp.Array(sum_arr, spec)
コード例 #14
0
    def provide(self, request):

        voxel_size = self.spec[self.raw].voxel_size
        shape = gp.Coordinate((1, ) + request[self.raw].roi.get_shape())

        gt_data = np.zeros(shape, dtype=int)
        for i in range(self.n_objects):
            points = np.stack(
                [np.random.randint(0, shape[dim], 2) for dim in range(3)],
                axis=1)
            tree = skelerator.Tree(points)
            skeleton = skelerator.Skeleton(tree, [1, 1, 1],
                                           "linear",
                                           generate_graph=False)
            gt_data = skeleton.draw(gt_data, np.array([0, 0, 0]), i + 1)

        gt_data = gt_data[0].astype(np.uint64)
        gt_data = maximum_filter(gt_data, size=2)

        labels = np.unique(gt_data)

        raw_data = (gt_data > 0).astype(np.float32)
        raw_data = np.clip(
            raw_data + np.random.normal(scale=0.1, size=raw_data.shape), 0,
            1).astype(np.float32)

        spec = request[self.raw].copy()
        spec.voxel_size = (1, 1)
        raw = gp.Array(raw_data, spec)

        spec = request[self.gt].copy()
        spec.voxel_size = (1, 1)
        gt_crop = (request[self.gt].roi -
                   request[self.raw].roi.get_begin()) / voxel_size
        gt_crop = gt_crop.to_slices()
        gt = gp.Array(gt_data[gt_crop], spec)

        batch = gp.Batch()
        batch[self.raw] = raw
        batch[self.gt] = gt

        return batch
コード例 #15
0
    def process(self, batch, request):
        
        mask = batch[self.array].data.copy() > 0
        spec = batch[self.array].spec.copy()
        spec.dtype = np.float32

        bg = np.logical_not(mask)
        dt = np.tanh(ndimage.distance_transform_edt(mask) / self.scaling)
        dt[bg] = np.tanh(ndimage.distance_transform_edt(bg) / self.scaling)[bg] * (-1)

        batch[self.dt] = gp.Array(data=dt.astype(np.float32), spec=spec)
コード例 #16
0
ファイル: merge_channel.py プロジェクト: pattonw/neurolight
    def process(self, batch, request):

        spec = self.spec[self.fg].copy()
        voxel_size = (1, ) + spec.voxel_size
        merged = np.stack([batch[self.fg].data, batch[self.bg].data], axis=0)

        batch[self.raw] = gp.Array(
            data=merged.astype(spec.dtype),
            spec=gp.ArraySpec(dtype=spec.dtype,
                              roi=Roi((0, 0, 0, 0), merged.shape) * voxel_size,
                              interpolatable=True,
                              voxel_size=voxel_size))
コード例 #17
0
 def process(self, batch, request):
     if self.target_key not in request:
         return
     prod_arr = batch.arrays[self.keys[0]].data
     for key in self.keys[1:]:
         prod_arr *= batch.arrays[key].data
     if self.target_spec.dtype is not None:
         prod_arr.astype(self.target_spec.dtype)
     spec = self.spec[self.target_key].copy()
     spec.roi = request[self.target_key].roi
     spec.dtype = prod_arr.dtype
     batch.arrays[self.target_key] = gp.Array(prod_arr, spec)
コード例 #18
0
    def process(self, batch, request):

        spec = batch[self.labels].spec.copy()
        spec.dtype = np.uint8

        reduce_channels = np.max(batch[self.labels].data > 0, axis=0).astype(np.uint8)

        binarized = gp.Array(
            data=reduce_channels,
            spec=spec)

        batch[self.labels_binary] = binarized
コード例 #19
0
    def process(self, batch, request):
        outputs = gp.Batch()

        if self.in_array not in batch:
            return

        data = batch[self.in_array].data
        spec = batch[self.in_array].spec.copy()
        spec.dtype = np.bool
        binarized = data != self.target
        outputs[self.out_array] = gp.Array(binarized, spec)

        return outputs
コード例 #20
0
 def process(self, batch, request):
     array = batch[self.array]
     intensity_data = np.zeros_like(array.data, dtype=np.float32)
     for i, label in enumerate(np.unique(array.data)):
         if label == 0:
             continue
         mask = array.data == label
         intensity_data[mask] = np.maximum(
             intensity_data[mask],
             self.intensities[i % len(self.intensities)])
     spec = array.spec
     spec.dtype = np.float32
     batch[self.array] = gp.Array(intensity_data, spec)
コード例 #21
0
    def process(self, batch, request):

        # get the raw and segmentation arrays from the current batch
        raw = batch[self.raw]
        seg = batch[self.seg]

        print(f"RAW: {raw}")
        print(f"SEG: {seg}")

        # simulate cages, return brembow volumes for raw, cages, and density
        simulated_raw = Volume(raw.data, raw.spec.voxel_size)
        cage_map, density_map = simulate_random_cages(
            simulated_raw, Volume(seg.data, seg.spec.voxel_size), self.cages,
            self.min_density, self.max_density, self.psf, True, True,
            self.no_cage_probability)

        # create array specs for new gunpowder arrays
        raw_spec = batch[self.raw].spec.copy()
        cage_map_spec = batch[self.seg].spec.copy()
        cage_map_spec.dtype = np.uint64
        density_map_spec = batch[self.seg].spec.copy()
        density_map_spec.dtype = np.float32

        # create arrays and crop to requested size
        print(cage_map_spec)
        cage_map_array = gp.Array(data=cage_map, spec=cage_map_spec)
        cage_map_array = cage_map_array.crop(request[self.cage_map].roi)
        density_map_array = gp.Array(data=density_map, spec=density_map_spec)
        density_map_array = density_map_array.crop(
            request[self.density_map].roi)

        # create a new batch with processed arrays
        processed = gp.Batch()
        processed[self.raw] = gp.Array(data=simulated_raw.data, spec=raw_spec)
        processed[self.cage_map] = cage_map_array
        processed[self.density_map] = density_map_array

        return processed
コード例 #22
0
 def process(self, batch, request):
     final_scores = {}
     for key, array in batch.items():
         if "SCORE" in str(key):
             block = int(str(key).split("_")[1])
             final_scores[block] = array.data
     final_scores = [
         final_scores[block] for block in range(1, 26)
         if block in final_scores
     ]
     outputs = gp.Batch()
     outputs[self.output] = gp.Array(np.array(final_scores),
                                     gp.ArraySpec(nonspatial=True))
     return outputs
コード例 #23
0
    def process(self, batch, request):

        random_pick = np.random.choice(self.input_labels, p=self.probabilities)
        output_label = batch.arrays[random_pick].data.copy()

        # logger.info(random_pick)

        spec = self.spec[self.output_label].copy()
        spec.roi = request[self.output_label].roi.copy()
        batch.arrays[self.output_label] = gp.Array(output_label, spec)

        for i in self.input_labels:
            roi = request[i].roi
            batch.arrays[i] = batch.arrays[i].crop(roi)
コード例 #24
0
    def process(self, batch, request):
        outputs = gp.Batch()

        # logger.debug("upsampeling %s with %s", self.source, self.factor)

        # resize
        data = batch.arrays[self.source].data
        data = rescale(data, self.factor)

        # create output array
        spec = self.spec[self.target].copy()
        spec.roi = request[self.target].roi
        outputs.arrays[self.target] = gp.Array(data, spec)

        return outputs
コード例 #25
0
def evaluate_affs(pred_labels, gt_labels, return_results=False):

    results = rand_voi(gt_labels.data, pred_labels.data)
    results["voi_sum"] = results["voi_split"] + results["voi_merge"]

    scores = {"sample": results, "average": results}

    if return_results:
        results = {
            "pred_labels":
            gp.Array(
                pred_labels.data.astype(np.uint64),
                gp.ArraySpec(roi=pred_labels.spec.roi,
                             voxel_size=pred_labels.spec.voxel_size)),
            "gt_labels":
            gp.Array(
                gt_labels.data.astype(np.uint64),
                gp.ArraySpec(roi=gt_labels.spec.roi,
                             voxel_size=gt_labels.spec.voxel_size)),
        }

        return scores, results

    return scores
コード例 #26
0
    def process(self, batch, request):
        joined_affinities = batch.arrays[self.joined_affinities].data.copy()

        raw = random_noise(joined_affinities, 's&p', amount=self.sp)

        raw = gaussian(raw, self.sigma)
        # plt.imshow(raw[66], cmap="Greys_r")
        raw = raw * self.contrast

        # plt.show()

        spec = self.spec[self.raw].copy()
        spec.roi = request[self.raw].roi.copy()
        batch.arrays[self.raw] = gp.Array(raw, spec)

        roi = request[self.joined_affinities].roi
        batch.arrays[self.joined_affinities] = batch.arrays[
            self.joined_affinities].crop(roi)
コード例 #27
0
    def process(self, batch, request):
        input_data = batch[self.input_array].data
        input_spec = batch[self.input_array].spec
        input_roi = input_spec.roi
        output_roi_shape = request[self.output_array].roi.get_shape()
        shift = (input_roi.get_shape() - output_roi_shape) / 2
        output_roi = gp.Roi(shift, output_roi_shape)
        output_data = input_data[tuple(
            map(
                slice,
                output_roi.get_begin() / input_spec.voxel_size,
                output_roi.get_end() / input_spec.voxel_size,
            ))]
        output_spec = copy.deepcopy(input_spec)
        output_spec.roi = output_roi

        output_array = gp.Array(output_data, output_spec)

        batch[self.output_array] = output_array
コード例 #28
0
ファイル: dacapo_array_source.py プロジェクト: pattonw/dacapo
    def provide(self, request):
        output = gp.Batch()

        timing_provide = Timing(self, "provide")
        timing_provide.start()

        spec = self.array_spec.copy()
        spec.roi = request[self.key].roi

        data = self.array[spec.roi]
        if "c" not in self.array.axes:
            # add a channel dimension
            data = np.expand_dims(data, 0)
        if np.any(np.isnan(data)):
            raise ValueError("INPUT DATA CAN'T BE NAN")
        output[self.key] = gp.Array(data, spec=spec)

        timing_provide.stop()

        output.profiling_stats.add(timing_provide)

        return output
コード例 #29
0
    def provide(self, request):
        batch = gp.Batch()
        # print "n:", self.n
        # print "pid: ", mp.current_process().pid

        for (array_key, request_spec) in request.array_specs.items():

            array_spec = self.spec[array_key].copy()
            array_spec.roi = request_spec.roi
            shape = array_spec.roi.get_shape()

            # enlarge
            lshape = list(shape)
            inc = [0] * len(shape)
            for i, s in enumerate(shape):
                if s % 2 != 0:
                    inc[i] += 1
                    lshape[i] += 1
            shape = gp.Coordinate(lshape)

            data = create_segmentation(
                shape=shape,
                n_objects=self.n_objects,
                points_per_skeleton=self.points_per_skeleton,
                interpolation=self.interpolation,
                smoothness=self.smoothness,
                noise_strength=self.noise_strength,
                seed=self.seed)
            # seed=np.random.randint(10000))
            segmentation = data["segmentation"]

            # crop (more elegant & general way to do this?)
            segmentation = segmentation[:lshape[0] - inc[0], :lshape[1] -
                                        inc[1], :lshape[2] - inc[2]]
            # segmentation = segmentation[:lshape_out[i] - inc[i] for i in range(len(shape))]

            batch.arrays[array_key] = gp.Array(segmentation, array_spec)
        # self.n +=1
        return batch
コード例 #30
0
    def process(self, batch, request):
        input_affinities = batch.arrays[self.input_affinities].data.copy()

        affs1 = input_affinities[0]
        affs2 = input_affinities[1]
        affs3 = input_affinities[2]

        joined_affinities = np.logical_and(affs1 == 1, affs2 == 1)
        joined_affinities = np.logical_and(joined_affinities, affs3 == 1)

        # crop to requested ROI
        joined_affinities_roi = request[self.joined_affinities].roi.copy()
        joined_affinities = self.__crop_center(
            joined_affinities, joined_affinities_roi.get_shape())

        spec = self.spec[self.joined_affinities].copy()
        spec.roi = request[self.joined_affinities].roi.copy()
        batch.arrays[self.joined_affinities] = gp.Array(
            joined_affinities, spec)

        roi = request[self.input_affinities].roi
        batch.arrays[self.input_affinities] = batch.arrays[
            self.input_affinities].crop(roi)