def test_without_placeholder(self):

        test_labels = ArrayKey("TEST_LABELS")
        test_points = GraphKey("TEST_POINTS")

        pipeline = (
            PointTestSource3D() + RandomLocation(ensure_nonempty=test_points) +
            ElasticAugment([10, 10, 10], [0.1, 0.1, 0.1], [0, 2.0 * math.pi]) +
            Snapshot(
                {test_labels: "volumes/labels"},
                output_dir=self.path_to(),
                output_filename="elastic_augment_test{id}-{iteration}.hdf",
            ))

        with build(pipeline):
            for i in range(2):

                request_size = Coordinate((40, 40, 40))

                request_a = BatchRequest(random_seed=i)
                request_a.add(test_points, request_size)

                request_b = BatchRequest(random_seed=i)
                request_b.add(test_points, request_size)
                request_b.add(test_labels, request_size)

                # No array to provide a voxel size to ElasticAugment
                with pytest.raises(PipelineRequestError):
                    pipeline.request_batch(request_a)
                batch_b = pipeline.request_batch(request_b)

                self.assertIn(test_labels, batch_b)
    def test_placeholder(self):

        test_labels = ArrayKey("TEST_LABELS")
        test_points = GraphKey("TEST_POINTS")

        pipeline = (
            PointTestSource3D() + RandomLocation(ensure_nonempty=test_points) +
            ElasticAugment([10, 10, 10], [0.1, 0.1, 0.1], [0, 2.0 * math.pi]) +
            Snapshot(
                {test_labels: "volumes/labels"},
                output_dir=self.path_to(),
                output_filename="elastic_augment_test{id}-{iteration}.hdf",
            ))

        with build(pipeline):
            for i in range(2):

                request_size = Coordinate((40, 40, 40))

                request_a = BatchRequest(random_seed=i)
                request_a.add(test_points, request_size)
                request_a.add(test_labels, request_size, placeholder=True)

                request_b = BatchRequest(random_seed=i)
                request_b.add(test_points, request_size)
                request_b.add(test_labels, request_size)

                batch_a = pipeline.request_batch(request_a)
                batch_b = pipeline.request_batch(request_b)

                points_a = batch_a[test_points].nodes
                points_b = batch_b[test_points].nodes

                for a, b in zip(points_a, points_b):
                    assert all(np.isclose(a.location, b.location))
Esempio n. 3
0
    def test_3d_basics(self):

        test_labels = ArrayKey("TEST_LABELS")
        test_points = GraphKey("TEST_POINTS")
        test_raster = ArrayKey("TEST_RASTER")

        pipeline = (
            PointTestSource3D() + ElasticAugment(
                [10, 10, 10],
                [0.1, 0.1, 0.1],
                # [0, 0, 0], # no jitter
                [0, 2.0 * math.pi],
            ) + RasterizeGraph(
                test_points,
                test_raster,
                settings=RasterizationSettings(radius=2, mode="peak"),
            ) + Snapshot(
                {
                    test_labels: "volumes/labels",
                    test_raster: "volumes/raster"
                },
                dataset_dtypes={test_raster: np.float32},
                output_dir=self.path_to(),
                output_filename="elastic_augment_test{id}-{iteration}.hdf",
            ))

        for _ in range(5):

            with build(pipeline):

                request_roi = Roi((-20, -20, -20), (40, 40, 40))

                request = BatchRequest()
                request[test_labels] = ArraySpec(roi=request_roi)
                request[test_points] = GraphSpec(roi=request_roi)
                request[test_raster] = ArraySpec(roi=request_roi)

                batch = pipeline.request_batch(request)
                labels = batch[test_labels]
                points = batch[test_points]

                # the point at (0, 0, 0) should not have moved
                self.assertTrue(points.contains(0))

                labels_data_roi = (
                    labels.spec.roi -
                    labels.spec.roi.get_begin()) / labels.spec.voxel_size

                # points should have moved together with the voxels
                for point in points.nodes:
                    loc = point.location - labels.spec.roi.get_begin()
                    loc = loc / labels.spec.voxel_size
                    loc = Coordinate(int(round(x)) for x in loc)
                    if labels_data_roi.contains(loc):
                        self.assertEqual(labels.data[loc], point.id)
Esempio n. 4
0
def train():

    gunpowder.set_verbose(False)

    affinity_neighborhood = malis.mknhood3d()
    solver_parameters = gunpowder.caffe.SolverParameters()
    solver_parameters.train_net = 'net.prototxt'
    solver_parameters.base_lr = 1e-4
    solver_parameters.momentum = 0.95
    solver_parameters.momentum2 = 0.999
    solver_parameters.delta = 1e-8
    solver_parameters.weight_decay = 0.000005
    solver_parameters.lr_policy = 'inv'
    solver_parameters.gamma = 0.0001
    solver_parameters.power = 0.75
    solver_parameters.snapshot = 10000
    solver_parameters.snapshot_prefix = 'net'
    solver_parameters.type = 'Adam'
    solver_parameters.resume_from = None
    solver_parameters.train_state.add_stage('euclid')

    request = BatchRequest()
    request.add_volume_request(VolumeTypes.RAW, constants.input_shape)
    request.add_volume_request(VolumeTypes.GT_LABELS, constants.output_shape)
    request.add_volume_request(VolumeTypes.GT_MASK, constants.output_shape)
    request.add_volume_request(VolumeTypes.GT_AFFINITIES, constants.output_shape)
    request.add_volume_request(VolumeTypes.LOSS_SCALE, constants.output_shape)

    data_providers = list()
    fibsem_dir = "/groups/turaga/turagalab/data/FlyEM/fibsem_medulla_7col"
    for volume_name in ("tstvol-520-1-h5",):
        h5_filepath = "./{}.h5".format(volume_name)
        path_to_labels = os.path.join(fibsem_dir, volume_name, "groundtruth_seg.h5")
        with h5py.File(path_to_labels, "r") as f_labels:
            mask_shape = f_labels["main"].shape
        with h5py.File(h5_filepath, "w") as h5:
            h5['volumes/raw'] = h5py.ExternalLink(os.path.join(fibsem_dir, volume_name, "im_uint8.h5"), "main")
            h5['volumes/labels/neuron_ids'] = h5py.ExternalLink(path_to_labels, "main")
            h5.create_dataset(
                name="volumes/labels/mask",
                dtype="uint8",
                shape=mask_shape,
                fillvalue=1,
            )
        data_providers.append(
            gunpowder.Hdf5Source(
                h5_filepath,
                datasets={
                    VolumeTypes.RAW: 'volumes/raw',
                    VolumeTypes.GT_LABELS: 'volumes/labels/neuron_ids',
                    VolumeTypes.GT_MASK: 'volumes/labels/mask',
                },
                resolution=(8, 8, 8),
            )
        )
    dvid_source = DvidSource(
        hostname='slowpoke3',
        port=32788,
        uuid='341',
        raw_array_name='grayscale',
        gt_array_name='groundtruth',
        gt_mask_roi_name="seven_column_eroded7_z_lt_5024",
        resolution=(8, 8, 8),
    )
    data_providers.extend([dvid_source])
    data_providers = tuple(
        provider +
        RandomLocation() +
        Reject(min_masked=0.5) +
        Normalize()
        for provider in data_providers
    )

    # create a batch provider by concatenation of filters
    batch_provider = (
        data_providers +
        RandomProvider() +
        ElasticAugment([20, 20, 20], [0, 0, 0], [0, math.pi / 2.0]) +
        SimpleAugment(transpose_only_xy=False) +
        GrowBoundary(steps=2, only_xy=False) +
        AddGtAffinities(affinity_neighborhood) +
        BalanceAffinityLabels() +
        SplitAndRenumberSegmentationLabels() +
        IntensityAugment(0.9, 1.1, -0.1, 0.1, z_section_wise=False) +
        PreCache(
            request,
            cache_size=11,
            num_workers=10) +
        Train(solver_parameters, use_gpu=0) +
        Typecast(volume_dtypes={
            VolumeTypes.GT_LABELS: np.dtype("uint32"),
            VolumeTypes.GT_MASK: np.dtype("uint8"),
            VolumeTypes.LOSS_SCALE: np.dtype("float32"),
        }, safe=True) +
        Snapshot(every=50, output_filename='batch_{id}.hdf') +
        PrintProfilingStats(every=50)
    )

    n = 500000
    print("Training for", n, "iterations")

    with gunpowder.build(batch_provider) as pipeline:
        for i in range(n):
            pipeline.request_batch(request)

    print("Finished")
Esempio n. 5
0
    def test_fast_transform_no_recompute(self):
        test_labels = ArrayKey("TEST_LABELS")
        test_points = GraphKey("TEST_POINTS")
        test_raster = ArrayKey("TEST_RASTER")
        fast_pipeline = (DensePointTestSource3D() + ElasticAugment(
            [10, 10, 10],
            [0.1, 0.1, 0.1],
            [0, 2.0 * math.pi],
            use_fast_points_transform=True,
            recompute_missing_points=False,
        ) + RasterizeGraph(
            test_points,
            test_raster,
            settings=RasterizationSettings(radius=2, mode="peak"),
        ))

        reference_pipeline = (
            DensePointTestSource3D() +
            ElasticAugment([10, 10, 10], [0.1, 0.1, 0.1], [0, 2.0 * math.pi]) +
            RasterizeGraph(
                test_points,
                test_raster,
                settings=RasterizationSettings(radius=2, mode="peak"),
            ))

        timings = []
        for i in range(5):
            points_fast = {}
            points_reference = {}
            # seed chosen specifically to make this test fail
            seed = i + 15
            with build(fast_pipeline):

                request_roi = Roi((0, 0, 0), (40, 40, 40))

                request = BatchRequest(random_seed=seed)
                request[test_labels] = ArraySpec(roi=request_roi)
                request[test_points] = GraphSpec(roi=request_roi)
                request[test_raster] = ArraySpec(roi=request_roi)

                t1_fast = time.time()
                batch = fast_pipeline.request_batch(request)
                t2_fast = time.time()
                points_fast = {
                    node.id: node
                    for node in batch[test_points].nodes
                }

            with build(reference_pipeline):

                request_roi = Roi((0, 0, 0), (40, 40, 40))

                request = BatchRequest(random_seed=seed)
                request[test_labels] = ArraySpec(roi=request_roi)
                request[test_points] = GraphSpec(roi=request_roi)
                request[test_raster] = ArraySpec(roi=request_roi)

                t1_ref = time.time()
                batch = reference_pipeline.request_batch(request)
                t2_ref = time.time()
                points_reference = {
                    node.id: node
                    for node in batch[test_points].nodes
                }

            timings.append((t2_fast - t1_fast, t2_ref - t1_ref))
            diffs = []
            missing = 0
            for point_id, point in points_reference.items():
                if point_id not in points_fast:
                    missing += 1
                    continue
                diff = point.location - points_fast[point_id].location
                diffs.append(tuple(diff))
                self.assertAlmostEqual(
                    np.linalg.norm(diff),
                    0,
                    delta=1,
                    msg=
                    "fast transform returned location {} but expected {} for point {}"
                    .format(point.location, points_fast[point_id].location,
                            point_id),
                )

            t_fast, t_ref = [np.mean(x) for x in zip(*timings)]
            self.assertLess(t_fast, t_ref)
            self.assertGreater(missing, 0)
Esempio n. 6
0
    def test_random_seed(self):

        test_labels = ArrayKey('TEST_LABELS')
        test_points = GraphKey('TEST_POINTS')
        test_raster = ArrayKey('TEST_RASTER')

        pipeline = (
            PointTestSource3D() + ElasticAugment(
                [10, 10, 10],
                [0.1, 0.1, 0.1],
                # [0, 0, 0], # no jitter
                [0, 2.0 * math.pi]) +  # rotate randomly
            # [math.pi/4, math.pi/4]) + # rotate by 45 deg
            # [0, 0]) + # no rotation
            RasterizeGraph(test_points,
                           test_raster,
                           settings=RasterizationSettings(radius=2,
                                                          mode='peak')) +
            Snapshot(
                {
                    test_labels: 'volumes/labels',
                    test_raster: 'volumes/raster'
                },
                dataset_dtypes={test_raster: np.float32},
                output_dir=self.path_to(),
                output_filename='elastic_augment_test{id}-{iteration}.hdf'))

        batch_points = []
        for _ in range(5):

            with build(pipeline):

                request_roi = Roi((-20, -20, -20), (40, 40, 40))

                request = BatchRequest(random_seed=10)
                request[test_labels] = ArraySpec(roi=request_roi)
                request[test_points] = GraphSpec(roi=request_roi)
                request[test_raster] = ArraySpec(roi=request_roi)
                batch = pipeline.request_batch(request)
                labels = batch[test_labels]
                points = batch[test_points]
                batch_points.append(
                    tuple((node.id, tuple(node.location))
                          for node in points.nodes))

                # the point at (0, 0, 0) should not have moved
                data = {node.id: node for node in points.nodes}
                self.assertTrue(0 in data)

                labels_data_roi = (
                    labels.spec.roi -
                    labels.spec.roi.get_begin()) / labels.spec.voxel_size

                # points should have moved together with the voxels
                for node in points.nodes:
                    loc = node.location - labels.spec.roi.get_begin()
                    loc = loc / labels.spec.voxel_size
                    loc = Coordinate(int(round(x)) for x in loc)
                    if labels_data_roi.contains(loc):
                        self.assertEqual(labels.data[loc], node.id)

        for point_data in zip(*batch_points):
            self.assertEqual(len(set(point_data)), 1)
Esempio n. 7
0
    def test_3d_basics(self):

        test_labels = ArrayKey("TEST_LABELS")
        test_graph = GraphKey("TEST_GRAPH")
        test_raster = ArrayKey("TEST_RASTER")

        pipeline = (
            GraphTestSource3D() + ElasticAugment(
                [10, 10, 10],
                [0.1, 0.1, 0.1],
                # [0, 0, 0], # no jitter
                [0, 2.0 * math.pi],
            ) +  # rotate randomly
            # [math.pi/4, math.pi/4]) + # rotate by 45 deg
            # [0, 0]) + # no rotation
            RasterizePoints(
                test_graph,
                test_raster,
                settings=RasterizationSettings(radius=2, mode="peak"),
            ) + Snapshot(
                {
                    test_labels: "volumes/labels",
                    test_raster: "volumes/raster"
                },
                dataset_dtypes={test_raster: np.float32},
                output_dir=self.path_to(),
                output_filename="elastic_augment_test{id}-{iteration}.hdf",
            ))

        for _ in range(5):

            with build(pipeline):

                request_roi = Roi((-20, -20, -20), (40, 40, 40))

                request = BatchRequest()
                request[test_labels] = ArraySpec(roi=request_roi)
                request[test_graph] = GraphSpec(roi=request_roi)
                request[test_raster] = ArraySpec(roi=request_roi)

                batch = pipeline.request_batch(request)
                labels = batch[test_labels]
                graph = batch[test_graph]

                # the node at (0, 0, 0) should not have moved
                # The node at (0,0,0) seems to have moved
                # self.assertIn(
                #     Node(id=0, location=np.array([0, 0, 0])), list(graph.nodes)
                # )
                self.assertIn(0, [v.id for v in graph.nodes])

                labels_data_roi = (
                    labels.spec.roi -
                    labels.spec.roi.get_begin()) / labels.spec.voxel_size

                # graph should have moved together with the voxels
                for node in graph.nodes:
                    loc = node.location - labels.spec.roi.get_begin()
                    loc = loc / labels.spec.voxel_size
                    loc = Coordinate(int(round(x)) for x in loc)
                    if labels_data_roi.contains(loc):
                        self.assertEqual(labels.data[loc], node.id)
Esempio n. 8
0
def visualize_original_elastic_augment(args=None):

    data_providers = []
    data_dir = '/groups/saalfeld/home/hanslovskyp/experiments/quasi-isotropic/data'
    file_pattern = 'sample_A_padded_20160501-2-additional-sections-fixed-offset.h5'
    file_pattern = 'sample_B_padded_20160501-2-additional-sections-fixed-offset.h5'
    file_pattern = 'sample_C_padded_20160501-2-additional-sections-fixed-offset.h5'

    for data in glob.glob(os.path.join(data_dir, file_pattern)):
        h5_source = Hdf5Source(data, datasets={
            RAW: 'volumes/raw',
        })
        data_providers.append(h5_source)

    input_resolution = (360, 36, 36)
    output_resolution = Coordinate((120, 108, 108))
    offset = (13640, 10932, 10932)

    output_shape = Coordinate((12, 100, 100)) * output_resolution
    output_offset = (13320 + 3600, 32796 + 36 + 10800, 32796 + 36 + 10800)

    overhang = Coordinate((360, 108, 108)) * 16

    input_shape = output_shape + overhang * 2
    input_offset = Coordinate(output_offset) - overhang

    output_roi = Roi(offset=output_offset, shape=output_shape)
    input_roi = Roi(offset=input_offset, shape=input_shape)

    augmentations = (
        # SimpleAugment(transpose_only=[1,2]),
        # ElasticAugmentNonMatchingVoxelSize(control_point_spacing=(1, 1, 1), jitter_sigma=(0.0, 3.0, 3.0), rotation_interval=(0, np.pi/2.0)),
        ElasticAugment(
            control_point_spacing=(4, 40, 40),
            #jitter_sigma=(0, 1 * 2 * 36, 0 * 36),
            jitter_sigma=(0, 2, 2),
            rotation_interval=(2 * np.pi / 8, 0 * 2 * np.pi),
            subsample=1), )

    batch, snapshot = fuse.util.run_augmentations(
        data_providers=data_providers,
        roi=lambda key: input_roi.copy(),
        augmentations=augmentations,
        keys=(RAW, ),
        voxel_size=lambda key: input_resolution)

    args = get_parser().parse_args() if args is None else args
    jnius_config.add_options('-Xmx{}'.format(args.max_heap_size))

    import payntera.jfx
    import imglyb
    from jnius import autoclass

    payntera.jfx.init_platform()

    PainteraBaseView = autoclass(
        'org.janelia.saalfeldlab.paintera.PainteraBaseView')
    viewer = PainteraBaseView.defaultView()
    pbv = viewer.baseView
    scene, stage = payntera.jfx.start_stage(viewer.paneWithStatus.getPane())
    payntera.jfx.invoke_on_jfx_application_thread(
        lambda: pbv.orthogonalViews().setScreenScales([0.3, 0.1, 0.03]))

    keys_to_show = (RAW, )
    snapshot_states = add_to_viewer(
        snapshot,
        keys=keys_to_show,
        name=lambda key: '%s-snapshot' % key.identifier)
    states = add_to_viewer(batch, keys=keys_to_show)

    viewer.keyTracker.installInto(scene)
    scene.addEventFilter(
        autoclass('javafx.scene.input.MouseEvent').ANY, viewer.mouseTracker)

    while stage.isShowing():
        time.sleep(0.1)
Esempio n. 9
0
output_offset = (13320 + 3600, 32796 + 36 + 10800, 32796 + 36 + 10800)

overhang = Coordinate((360, 108, 108)) * 16

input_shape = output_shape + overhang * 2
input_offset = Coordinate(output_offset) - overhang

output_roi = Roi(offset=output_offset, shape=output_shape)
input_roi = Roi(offset=input_offset, shape=input_shape)

augmentations = (
    # SimpleAugment(transpose_only=[1,2]),
    # ElasticAugmentNonMatchingVoxelSize(control_point_spacing=(1, 1, 1), jitter_sigma=(0.0, 3.0, 3.0), rotation_interval=(0, np.pi/2.0)),
    ElasticAugment(
        control_point_spacing=(4, 40, 40),
        #jitter_sigma=(0, 1 * 2 * 36, 0 * 36),
        jitter_sigma=(0, 2, 2),
        rotation_interval=(2 * np.pi / 8, 0 * 2 * np.pi),
        subsample=1), )

batch, snapshot = fuse.util.run_augmentations(
    data_providers=data_providers,
    roi=lambda key: input_roi.copy(),
    augmentations=augmentations,
    keys=(RAW, ),
    voxel_size=lambda key: input_resolution)

jnius_config.add_options('-Xmx{}'.format(args.max_heap_size))

import payntera.jfx
import imglyb
from jnius import autoclass