示例#1
0
    def test_merge_basics(self):
        voxel_size = (1, 1, 1)
        GraphKey("PRESYN")
        ArrayKey("GT_LABELS")
        graphsource = GraphTestSource(voxel_size)
        arraysource = ArrayTestSoure(voxel_size)
        pipeline = (graphsource, arraysource) + MergeProvider() + RandomLocation()
        window_request = Coordinate((50, 50, 50))
        with build(pipeline):
            # Check basic merging.
            request = BatchRequest()
            request.add((GraphKeys.PRESYN), window_request)
            request.add((ArrayKeys.GT_LABELS), window_request)
            batch_res = pipeline.request_batch(request)
            self.assertTrue(ArrayKeys.GT_LABELS in batch_res.arrays)
            self.assertTrue(GraphKeys.PRESYN in batch_res.graphs)

            # Check that request of only one source also works.
            request = BatchRequest()
            request.add((GraphKeys.PRESYN), window_request)
            batch_res = pipeline.request_batch(request)
            self.assertFalse(ArrayKeys.GT_LABELS in batch_res.arrays)
            self.assertTrue(GraphKeys.PRESYN in batch_res.graphs)

        # Check that it fails, when having two sources that provide the same type.
        arraysource2 = ArrayTestSoure(voxel_size)
        pipeline_fail = (arraysource, arraysource2) + MergeProvider() + RandomLocation()
        with self.assertRaises(PipelineSetupError):
            with build(pipeline_fail):
                pass
示例#2
0
    def test_output(self):

        cropped_roi_raw = Roi((400, 40, 40), (1000, 100, 100))
        cropped_roi_presyn = Roi((800, 80, 80), (800, 80, 80))

        GraphKey("PRESYN")

        pipeline = (
            ExampleSourceCrop()
            + Crop(ArrayKeys.RAW, cropped_roi_raw)
            + Crop(GraphKeys.PRESYN, cropped_roi_presyn)
        )

        with build(pipeline):

            self.assertTrue(pipeline.spec[ArrayKeys.RAW].roi == cropped_roi_raw)
            self.assertTrue(pipeline.spec[GraphKeys.PRESYN].roi == cropped_roi_presyn)

        pipeline = ExampleSourceCrop() + Crop(
            ArrayKeys.RAW,
            fraction_negative=(0.25, 0, 0),
            fraction_positive=(0.25, 0, 0),
        )
        expected_roi_raw = Roi((650, 20, 20), (900, 180, 180))

        with build(pipeline):

            logger.info(pipeline.spec[ArrayKeys.RAW].roi)
            logger.info(expected_roi_raw)
            self.assertTrue(pipeline.spec[ArrayKeys.RAW].roi == expected_roi_raw)
示例#3
0
    def test_output(self):
        meta_base = self.path_to('tf_graph')

        ArrayKey('A')
        ArrayKey('B')
        ArrayKey('C')
        ArrayKey('GRADIENT_A')

        # create model meta graph file and get input/output names
        (a, b, c, optimizer, loss) = self.create_meta_graph(meta_base)

        source = ExampleTensorflowTrainSource()
        train = Train(
            meta_base,
            optimizer=optimizer,
            loss=loss,
            inputs={a: ArrayKeys.A, b: ArrayKeys.B},
            outputs={c: ArrayKeys.C},
            gradients={a: ArrayKeys.GRADIENT_A},
            save_every=100)
        pipeline = source + train

        request = BatchRequest({
            ArrayKeys.A: ArraySpec(roi=Roi((0, 0), (2, 2))),
            ArrayKeys.B: ArraySpec(roi=Roi((0, 0), (2, 2))),
            ArrayKeys.C: ArraySpec(roi=Roi((0, 0), (2, 2))),
            ArrayKeys.GRADIENT_A: ArraySpec(roi=Roi((0, 0), (2, 2))),
        })

        # train for a couple of iterations
        with build(pipeline):

            batch = pipeline.request_batch(request)

            self.assertAlmostEqual(batch.loss, 9.8994951)

            gradient_a = batch.arrays[ArrayKeys.GRADIENT_A].data
            self.assertTrue(gradient_a[0, 0] < gradient_a[0, 1])
            self.assertTrue(gradient_a[0, 1] < gradient_a[1, 0])
            self.assertTrue(gradient_a[1, 0] < gradient_a[1, 1])

            for i in range(200-1):
                loss1 = batch.loss
                batch = pipeline.request_batch(request)
                loss2 = batch.loss
                self.assertLess(loss2, loss1)

        # resume training
        with build(pipeline):

            for i in range(100):
                loss1 = batch.loss
                batch = pipeline.request_batch(request)
                loss2 = batch.loss
                self.assertLess(loss2, loss1)
示例#4
0
def test_transpose():
    voxel_size = Coordinate((20, 20))
    graph_key = GraphKey("GRAPH")
    array_key = ArrayKey("ARRAY")
    graph = Graph(
        [Node(id=1, location=np.array([450, 550]))],
        [],
        GraphSpec(roi=Roi((100, 200), (800, 600))),
    )
    data = np.zeros([40, 30])
    data[17, 17] = 1
    array = Array(
        data, ArraySpec(roi=Roi((100, 200), (800, 600)),
                        voxel_size=voxel_size))

    default_pipeline = (
        (GraphSource(graph_key, graph), ArraySource(array_key, array)) +
        MergeProvider() + SimpleAugment(
            mirror_only=[], transpose_only=[0, 1], transpose_probs=[0, 0]))

    transpose_pipeline = (
        (GraphSource(graph_key, graph), ArraySource(array_key, array)) +
        MergeProvider() + SimpleAugment(
            mirror_only=[], transpose_only=[0, 1], transpose_probs=[1, 1]))

    request = BatchRequest()
    request[graph_key] = GraphSpec(roi=Roi((400, 500), (200, 300)))
    request[array_key] = ArraySpec(roi=Roi((400, 500), (200, 300)))
    with build(default_pipeline):
        expected_location = [450, 550]
        batch = default_pipeline.request_batch(request)

        assert len(list(batch[graph_key].nodes)) == 1
        node = list(batch[graph_key].nodes)[0]
        assert all(np.isclose(node.location, expected_location))
        node_voxel_index = Coordinate(
            (node.location - batch[array_key].spec.roi.get_offset()) /
            voxel_size)
        assert (
            batch[array_key].data[node_voxel_index] == 1
        ), f"Node at {np.where(batch[array_key].data == 1)} not {node_voxel_index}"

    with build(transpose_pipeline):
        expected_location = [410, 590]
        batch = transpose_pipeline.request_batch(request)

        assert len(list(batch[graph_key].nodes)) == 1
        node = list(batch[graph_key].nodes)[0]
        assert all(np.isclose(node.location, expected_location))
        node_voxel_index = Coordinate(
            (node.location - batch[array_key].spec.roi.get_offset()) /
            voxel_size)
        assert (
            batch[array_key].data[node_voxel_index] == 1
        ), f"Node at {np.where(batch[array_key].data == 1)} not {node_voxel_index}"
示例#5
0
    def test_mirror(self):
        test_graph = GraphKey("TEST_GRAPH")

        pipeline = TestSource() + SimpleAugment(
            mirror_only=[0, 1, 2], transpose_only=[]
        )

        request = BatchRequest()
        request[GraphKeys.TEST_GRAPH] = GraphSpec(roi=Roi((0, 20, 33), (100, 100, 120)))
        possible_loc = [[50, 49], [70, 29], [100, 86]]
        with build(pipeline):
            seen_mirrored = False
            for i in range(100):
                batch = pipeline.request_batch(request)

                assert len(list(batch[GraphKeys.TEST_GRAPH].nodes)) == 1
                node = list(batch[GraphKeys.TEST_GRAPH].nodes)[0]
                logging.debug(node.location)
                assert all(
                    [
                        node.location[dim] in possible_loc[dim] 
                        for dim in range(3)
                    ]
                )
                seen_mirrored = seen_mirrored or any(
                    [node.location[dim] == possible_loc[dim][1] for dim in range(3)]
                )
                assert Roi((0, 20, 33), (100, 100, 120)).contains(batch[GraphKeys.TEST_GRAPH].spec.roi)
                assert batch[GraphKeys.TEST_GRAPH].spec.roi.contains(node.location)
            assert seen_mirrored
def test_embedding_pipeline(
    tmpdir, aux_task, blend_mode, fusion_pipeline, train_embedding, snapshot_every
):
    setup_config = DEFAULT_CONFIG
    setup_config["FUSION_PIPELINE"] = fusion_pipeline
    setup_config["TRAIN_EMBEDDING"] = train_embedding
    setup_config["SNAPSHOT_EVERY"] = snapshot_every
    setup_config["TENSORBOARD_LOG_DIR"] = tmpdir
    setup_config["SNAPSHOT_DIR"] = tmpdir
    setup_config["SNAPSHOT_FILE_NAME"] = "test_snapshot"
    setup_config["MATCHING_FAILURES_DIR"] = None
    setup_config["BLEND_MODE"] = blend_mode
    setup_config["AUX_TASK"] = aux_task
    voxel_size = Coordinate(setup_config["VOXEL_SIZE"])
    output_size = Coordinate(setup_config["OUTPUT_SHAPE"]) * voxel_size
    input_size = Coordinate(setup_config["INPUT_SHAPE"]) * voxel_size
    pipeline, raw, output, inputs = embedding_pipeline(
        setup_config, get_test_data_sources
    )
    request = BatchRequest()
    request.add(raw, input_size)
    request.add(output, output_size)
    for key in inputs:
        request.add(key, output_size)
    with build(pipeline):
        batch = pipeline.request_batch(request)
        assert output in batch
        assert raw in batch
    def test_without_placeholder(self):

        test_labels = ArrayKey("TEST_LABELS")
        test_points = GraphKey("TEST_POINTS")

        pipeline = (
            PointTestSource3D() + RandomLocation(ensure_nonempty=test_points) +
            ElasticAugment([10, 10, 10], [0.1, 0.1, 0.1], [0, 2.0 * math.pi]) +
            Snapshot(
                {test_labels: "volumes/labels"},
                output_dir=self.path_to(),
                output_filename="elastic_augment_test{id}-{iteration}.hdf",
            ))

        with build(pipeline):
            for i in range(2):

                request_size = Coordinate((40, 40, 40))

                request_a = BatchRequest(random_seed=i)
                request_a.add(test_points, request_size)

                request_b = BatchRequest(random_seed=i)
                request_b.add(test_points, request_size)
                request_b.add(test_labels, request_size)

                # No array to provide a voxel size to ElasticAugment
                with pytest.raises(PipelineRequestError):
                    pipeline.request_batch(request_a)
                batch_b = pipeline.request_batch(request_b)

                self.assertIn(test_labels, batch_b)
    def test_placeholder(self):

        test_labels = ArrayKey("TEST_LABELS")
        test_points = GraphKey("TEST_POINTS")

        pipeline = (
            PointTestSource3D() + RandomLocation(ensure_nonempty=test_points) +
            ElasticAugment([10, 10, 10], [0.1, 0.1, 0.1], [0, 2.0 * math.pi]) +
            Snapshot(
                {test_labels: "volumes/labels"},
                output_dir=self.path_to(),
                output_filename="elastic_augment_test{id}-{iteration}.hdf",
            ))

        with build(pipeline):
            for i in range(2):

                request_size = Coordinate((40, 40, 40))

                request_a = BatchRequest(random_seed=i)
                request_a.add(test_points, request_size)
                request_a.add(test_labels, request_size, placeholder=True)

                request_b = BatchRequest(random_seed=i)
                request_b.add(test_points, request_size)
                request_b.add(test_labels, request_size)

                batch_a = pipeline.request_batch(request_a)
                batch_b = pipeline.request_batch(request_b)

                points_a = batch_a[test_points].nodes
                points_b = batch_b[test_points].nodes

                for a, b in zip(points_a, points_b):
                    assert all(np.isclose(a.location, b.location))
示例#9
0
    def test_with_edge(self):
        graph_with_edge = GraphKey("TEST_GRAPH_WITH_EDGE")
        array_with_edge = ArrayKey("RASTERIZED_EDGE")

        pipeline = GraphTestSourceWithEdge() + RasterizeGraph(
            GraphKeys.TEST_GRAPH_WITH_EDGE,
            ArrayKeys.RASTERIZED_EDGE,
            ArraySpec(voxel_size=(1, 1, 1)),
            settings=RasterizationSettings(0.5),
        )

        with build(pipeline):
            request = BatchRequest()
            roi = Roi((0, 0, 0), (10, 10, 10))

            request[GraphKeys.TEST_GRAPH_WITH_EDGE] = GraphSpec(roi=roi)
            request[ArrayKeys.RASTERIZED_EDGE] = ArraySpec(roi=roi)

            batch = pipeline.request_batch(request)

            rasterized = batch.arrays[ArrayKeys.RASTERIZED_EDGE].data

            assert (
                rasterized.sum() == 10
            ), f"rasterized has ones at: {np.where(rasterized==1)}"
示例#10
0
    def test_square(self):
        

        test_graph = GraphKey("TEST_GRAPH")
        test_array1 = ArrayKey("TEST_ARRAY1")
        test_array2 = ArrayKey("TEST_ARRAY2")

        pipeline = ((ArrayTestSource(), TestSource()) + MergeProvider() + 
                    Pad(test_array1, None) + Pad(test_array2, None) + Pad(test_graph, None)
                    + SimpleAugment(
            mirror_only=[1,2], transpose_only=[1,2]
        ))

        request = BatchRequest()
        request[GraphKeys.TEST_GRAPH] = GraphSpec(roi=Roi((0, 50, 65), (100, 100, 100)))
        request[ArrayKeys.TEST_ARRAY1] = ArraySpec(roi=Roi((0, 0, 15), (100, 200, 200)))
        request[ArrayKeys.TEST_ARRAY2] = ArraySpec(roi=Roi((0, 50, 65), (100, 100, 100)))

        
        with build(pipeline):
            for i in range(100):
                batch = pipeline.request_batch(request)
                assert len(list(batch[GraphKeys.TEST_GRAPH].nodes)) == 1

                for (array_key, array) in batch.arrays.items():
                    assert batch.arrays[array_key].data.shape == batch.arrays[array_key].spec.roi.get_shape()
示例#11
0
    def test_voxel_size(self):

        locations = [[0, 0, 0], [91, 20, 20], [42, 24, 57]]

        pipeline = (
            ExampleSourceSpecifiedLocation(roi=Roi((0, 0, 0), (100, 100, 100)),
                                           voxel_size=(5, 2, 2)) +
            SpecifiedLocation(
                locations, choose_randomly=False, extra_data=None,
                jitter=None))

        with build(pipeline):

            batch = pipeline.request_batch(
                BatchRequest({
                    ArrayKeys.RAW:
                    ArraySpec(roi=Roi((0, 0, 0), (20, 20, 20)))
                }))
            # first locations is skipped
            # second should start at [80/5, 10/2, 10/2] = [16, 5, 5]
            self.assertEqual(batch.arrays[ArrayKeys.RAW].data[0, 0, 0], 40255)

            batch = pipeline.request_batch(
                BatchRequest({
                    ArrayKeys.RAW:
                    ArraySpec(roi=Roi((0, 0, 0), (20, 20, 20)))
                }))
            # third should start at [30/5, 14/2, 48/2] = [6, 7, 23]
            self.assertEqual(batch.arrays[ArrayKeys.RAW].data[0, 0, 0], 15374)
示例#12
0
    def test_ensure_center_non_zero(self):
        path = Path(self.path_to("test_swc_source.swc"))

        # write test swc
        self._write_swc(path, self._toy_swc_points().to_nx_graph())

        # read arrays
        swc = PointsKey("SWC")
        img = ArrayKey("IMG")
        pipeline = (SwcFileSource(
            path, [swc], [PointsSpec(roi=Roi((0, 0, 0), (11, 11, 11)))]) +
                    RandomLocation(ensure_nonempty=swc, ensure_centered=True) +
                    RasterizeSkeleton(
                        points=swc,
                        array=img,
                        array_spec=ArraySpec(
                            interpolatable=False,
                            dtype=np.uint32,
                            voxel_size=Coordinate((1, 1, 1)),
                        ),
                    ))

        request = BatchRequest()
        request.add(img, Coordinate((5, 5, 5)))
        request.add(swc, Coordinate((5, 5, 5)))

        with build(pipeline):
            batch = pipeline.request_batch(request)

            data = batch[img].data
            g = batch[swc]
            assert g.num_vertices() > 0

            self.assertNotEqual(data[tuple(np.array(data.shape) // 2)], 0)
示例#13
0
    def test_precache(self):

        logging.getLogger("gunpowder.torch.nodes.predict").setLevel(
            logging.INFO)

        a = ArrayKey("A")
        pred = ArrayKey("PRED")

        model = ExampleModel()

        reference_request = BatchRequest()
        reference_request[a] = ArraySpec(roi=Roi((0, 0), (7, 7)))
        reference_request[pred] = ArraySpec(roi=Roi((1, 1), (5, 5)))

        source = ExampleTorchTrain2DSource()
        predict = Predict(
            model=model,
            inputs={"a": a},
            outputs={0: pred},
            array_specs={pred: ArraySpec()},
        )
        pipeline = source + predict + PreCache(cache_size=3, num_workers=2)

        request = BatchRequest({
            a: ArraySpec(roi=Roi((0, 0), (17, 17))),
            pred: ArraySpec(roi=Roi((0, 0), (15, 15))),
        })

        # train for a couple of iterations
        with build(pipeline):

            batch = pipeline.request_batch(request)
            assert pred in batch
示例#14
0
    def test_relabel_components(self):
        path = Path(self.path_to("test_swc_source.swc"))

        # write test swc
        self._write_swc(path, self._toy_swc_points().to_nx_graph())

        # read arrays
        swc = GraphKey("SWC")
        source = SwcFileSource(path, [swc])

        with build(source):
            batch = source.request_batch(
                BatchRequest({swc:
                              GraphSpec(roi=Roi((0, 1, 5), (11, 10, 1)))}))

        temp_g = batch.points[swc]
        temp_g.relabel_connected_components()

        previous_label = None
        ccs = list(temp_g.connected_components)
        self.assertEqual(len(ccs), 3)
        for cc in ccs:
            self.assertEqual(len(cc), 10)
            label = None
            for point_id in cc:
                if label is None:
                    label = temp_g.node(point_id).attrs["component"]
                    self.assertNotEqual(label, previous_label)
                self.assertEqual(
                    temp_g.node(point_id).attrs["component"], label)
            previous_label = label
示例#15
0
 def __iter__(self):
     with gp.build(self._pipeline):
         teardown = False
         while not teardown:
             batch = self._pipeline.request_batch(self._request)
             yield batch
             teardown = yield
     yield None
示例#16
0
    def test_context(self):
        d_pred = gp.ArrayKeys.D_PRED
        m_pred = gp.ArrayKeys.M_PRED
        presyn = gp.PointsKeys.PRESYN
        postsyn = gp.PointsKeys.POSTSYN

        outdir = tempfile.mkdtemp()

        voxel_size = gp.Coordinate((10, 10, 10))
        size = ((200, 200, 200))
        # Check whether the score of the entire cube is measured, although
        # cube of borderpoint partially outside request ROI.
        context = 40
        shape = gp.Coordinate(size) / voxel_size
        m_predar = np.zeros(shape, dtype=np.float32)
        outsidepoint = gp.Coordinate((13, 13, 13))
        borderpoint = (4, 4, 4)
        m_predar[3:5, 3:5, 3:5] = 1
        m_predar[outsidepoint] = 1

        d_predar = np.ones((3, shape[0], shape[1], shape[2])) * 0

        pipeline = (TestSource(m_predar, d_predar, voxel_size=voxel_size) +
                    ExtractSynapses(m_pred,
                                    d_pred,
                                    presyn,
                                    postsyn,
                                    out_dir=outdir,
                                    settings=parameters,
                                    context=context) +
                    gp.PrintProfilingStats())

        request = gp.BatchRequest()

        roi = gp.Roi((40, 40, 40), (80, 80, 80))

        request[presyn] = gp.PointsSpec(roi=roi)
        request[postsyn] = gp.PointsSpec(roi=roi)
        with gp.build(pipeline):
            batch = pipeline.request_batch(request)

        synapsefile = os.path.join(outdir, "40", "40", "40.npz")
        with np.load(synapsefile) as data:
            data = dict(data)

        self.assertTrue(len(data['ids']) == 1)
        self.assertEqual(data['scores'][0], 2.0**3)  # Size of the cube.
        for ii in range(len(voxel_size)):
            self.assertEqual(data['positions'][0][0][ii],
                             borderpoint[ii] * voxel_size[ii])

        for ii in range(len(voxel_size)):
            self.assertEqual(data['positions'][0][1][ii],
                             borderpoint[ii] * voxel_size[ii] + 0)
        shutil.rmtree(outdir)
示例#17
0
def test_realistic_valid_examples(example, use_gurobi):
    penalty_attr = "penalty"
    location_attr = "location"
    example_dir = Path(__file__).parent / "mouselight_examples" / "valid" / example

    consensus = PointsKey("CONSENSUS")
    skeletonization = PointsKey("SKELETONIZATION")
    matched = PointsKey("MATCHED")
    matched_with_fallback = PointsKey("MATCHED_WITH_FALLBACK")

    inf_roi = Roi(Coordinate((None,) * 3), Coordinate((None,) * 3))

    request = BatchRequest()
    request[matched] = PointsSpec(roi=inf_roi)
    request[matched_with_fallback] = PointsSpec(roi=inf_roi)
    request[consensus] = PointsSpec(roi=inf_roi)

    pipeline = (
        (
            GraphSource(example_dir / "graph.obj", [skeletonization]),
            GraphSource(example_dir / "tree.obj", [consensus]),
        )
        + MergeProvider()
        + TopologicalMatcher(
            skeletonization,
            consensus,
            matched,
            expected_edge_len=10,
            match_distance_threshold=76,
            max_gap_crossing=48,
            use_gurobi=use_gurobi,
            location_attr=location_attr,
            penalty_attr=penalty_attr,
        )
        + TopologicalMatcher(
            skeletonization,
            consensus,
            matched_with_fallback,
            expected_edge_len=10,
            match_distance_threshold=76,
            max_gap_crossing=48,
            use_gurobi=use_gurobi,
            location_attr=location_attr,
            penalty_attr=penalty_attr,
            with_fallback=True,
        )
    )

    with build(pipeline):
        batch = pipeline.request_batch(request)
        consensus_ccs = list(batch[consensus].connected_components)
        matched_with_fallback_ccs = list(batch[matched_with_fallback].connected_components)
        matched_ccs = list(batch[matched].connected_components)

        assert len(matched_ccs) == len(consensus_ccs)
示例#18
0
def test_filter_components():
    raw = GraphKey("RAW")

    pipeline = TestSource() + FilterComponents(raw, 100,
                                               Coordinate((10, 10, 10)))

    request_no_fallback = BatchRequest()
    request_no_fallback[raw] = GraphSpec(roi=Roi((0, 0, 0), (20, 20, 20)))

    with build(pipeline):
        batch = pipeline.request_batch(request_no_fallback)
        assert raw in batch
        assert len(list(batch[raw].connected_components)) == 1

    request_fallback = BatchRequest()
    request_fallback[raw] = GraphSpec(roi=Roi((20, 20, 20), (20, 20, 20)))

    with build(pipeline):
        batch = pipeline.request_batch(request_fallback)
        assert raw in batch
        assert len(list(batch[raw].connected_components)) == 0
示例#19
0
    def test_3d(self):

        test_graph = GraphKey("TEST_GRAPH")
        graph_spec = GraphSpec(roi=Roi((0, 0, 0), (5, 5, 5)))
        test_array = ArrayKey("TEST_ARRAY")
        array_spec = ArraySpec(
            roi=Roi((0, 0, 0), (5, 5, 5)), voxel_size=Coordinate((1, 1, 1))
        )
        test_array2 = ArrayKey("TEST_ARRAY2")
        array2_spec = ArraySpec(
            roi=Roi((0, 0, 0), (5, 5, 5)), voxel_size=Coordinate((1, 1, 1))
        )

        snapshot_request = BatchRequest()
        snapshot_request.add(test_graph, Coordinate((5, 5, 5)))

        pipeline = ExampleSource(
            [test_graph, test_array, test_array2], [graph_spec, array_spec, array2_spec]
        ) + Snapshot(
            {
                test_graph: "graphs/graph",
                test_array: "volumes/array",
                test_array2: "volumes/array2",
            },
            output_dir=str(self.test_dir),
            every=2,
            additional_request=snapshot_request,
            output_filename="snapshot.hdf",
        )

        snapshot_file_path = Path(self.test_dir, "snapshot.hdf")

        with build(pipeline):

            request = BatchRequest()
            roi = Roi((0, 0, 0), (5, 5, 5))

            request[test_array] = ArraySpec(roi=roi)
            request[test_array2] = ArraySpec(roi=roi)

            pipeline.request_batch(request)

            assert snapshot_file_path.exists()
            f = h5py.File(snapshot_file_path)
            assert f["volumes/array"] is not None
            assert f["graphs/graph-ids"] is not None

            snapshot_file_path.unlink()

            pipeline.request_batch(request)

            assert not snapshot_file_path.exists()
示例#20
0
    def test_multi_transpose(self):
        test_graph = GraphKey("TEST_GRAPH")
        test_array1 = ArrayKey("TEST_ARRAY1")
        test_array2 = ArrayKey("TEST_ARRAY2")
        point = np.array([50, 70, 100])

        transpose_dims = [0, 1, 2]
        pipeline = (ArrayTestSource(),
                    ExampleSource()) + MergeProvider() + SimpleAugment(
                        mirror_only=[], transpose_only=transpose_dims)

        request = BatchRequest()
        offset = (0, 20, 33)
        request[GraphKeys.TEST_GRAPH] = GraphSpec(
            roi=Roi(offset, (100, 100, 120)))
        request[ArrayKeys.TEST_ARRAY1] = ArraySpec(
            roi=Roi((0, 0, 0), (100, 200, 300)))
        request[ArrayKeys.TEST_ARRAY2] = ArraySpec(
            roi=Roi((0, 100, 250), (100, 100, 50)))

        # Create all possible permurations of our transpose dims
        transpose_combinations = list(permutations(transpose_dims, 3))
        possible_loc = np.zeros((len(transpose_combinations), 3))

        # Transpose points in all possible ways
        for i, comb in enumerate(transpose_combinations):
            possible_loc[i] = point[np.array(comb)]

        with build(pipeline):
            seen_transposed = False
            seen_node = True
            for i in range(100):
                batch = pipeline.request_batch(request)

                if len(list(batch[GraphKeys.TEST_GRAPH].nodes)) == 1:
                    seen_node = True
                    node = list(batch[GraphKeys.TEST_GRAPH].nodes)[0]

                    assert node.location in possible_loc

                    seen_transposed = seen_transposed or any(
                        [node.location[dim] != point[dim] for dim in range(3)])
                    assert Roi((0, 20, 33), (100, 100, 120)).contains(
                        batch[GraphKeys.TEST_GRAPH].spec.roi)
                    assert batch[GraphKeys.TEST_GRAPH].spec.roi.contains(
                        node.location)

                for (array_key, array) in batch.arrays.items():
                    assert batch.arrays[array_key].data.shape == batch.arrays[
                        array_key].spec.roi.get_shape()
            assert seen_transposed
            assert seen_node
示例#21
0
    def test_output_basics(self):
        d_pred = gp.ArrayKeys.D_PRED
        m_pred = gp.ArrayKeys.M_PRED
        presyn = gp.PointsKeys.PRESYN
        postsyn = gp.PointsKeys.POSTSYN

        voxel_size = gp.Coordinate((10, 10, 10))
        size = ((200, 200, 200))
        context = 40
        shape = gp.Coordinate(size) / voxel_size
        m_predar = np.zeros(shape, dtype=np.float32)
        insidepoint = gp.Coordinate((10, 10, 10))
        outsidepoint = gp.Coordinate((15, 15, 15))
        m_predar[insidepoint] = 1
        m_predar[outsidepoint] = 1

        d_predar = np.ones((3, shape[0], shape[1], shape[2])) * 10

        outdir = tempfile.mkdtemp()

        pipeline = (TestSource(m_predar, d_predar, voxel_size=voxel_size) +
                    ExtractSynapses(m_pred,
                                    d_pred,
                                    presyn,
                                    postsyn,
                                    out_dir=outdir,
                                    settings=parameters,
                                    context=context))

        request = gp.BatchRequest()

        roi = gp.Roi((40, 40, 40), (80, 80, 80))

        request[presyn] = gp.PointsSpec(roi=roi)
        request[postsyn] = gp.PointsSpec(roi=roi)
        with gp.build(pipeline):
            batch = pipeline.request_batch(request)
        print(outdir, "outdir")
        synapsefile = os.path.join(outdir, "40", "40", "40.npz")
        with np.load(synapsefile) as data:
            data = dict(data)

        self.assertTrue(len(data['ids']) == 1)
        self.assertEqual(data['scores'][0], 1.0)  # Size of the cube.
        for ii in range(len(voxel_size)):
            self.assertEqual(data['positions'][0][1][ii],
                             insidepoint[ii] * voxel_size[ii])

        for ii in range(len(voxel_size)):
            self.assertEqual(data['positions'][0][0][ii],
                             insidepoint[ii] * voxel_size[ii] + 10)
        shutil.rmtree(outdir)
示例#22
0
    def save_embs(self):

        pipeline, request, predictions = self.make_pipeline()

        with gp.build(pipeline):
            try:
                shutil.rmtree(
                    os.path.join(self.curr_log_dir,
                                 self.dataset + '_embs.zarr'))
            except OSError as e:
                pass

            pipeline.request_batch(gp.BatchRequest())
示例#23
0
    def test_pipeline3(self):
        array_key = ArrayKey("TEST_ARRAY")
        points_key = GraphKey("TEST_POINTS")
        voxel_size = Coordinate((1, 1))
        spec = ArraySpec(voxel_size=voxel_size, interpolatable=True)

        hdf5_source = Hdf5Source(self.fake_data_file, {array_key: "testdata"},
                                 array_specs={array_key: spec})
        csv_source = CsvPointsSource(
            self.fake_points_file,
            points_key,
            GraphSpec(roi=Roi(shape=Coordinate((100, 100)), offset=(0, 0))),
        )

        request = BatchRequest()
        shape = Coordinate((60, 60))
        request.add(array_key, shape, voxel_size=Coordinate((1, 1)))
        request.add(points_key, shape)

        shift_node = ShiftAugment(prob_slip=0.2,
                                  prob_shift=0.2,
                                  sigma=5,
                                  shift_axis=0)
        pipeline = ((hdf5_source, csv_source) + MergeProvider() +
                    RandomLocation(ensure_nonempty=points_key) + shift_node)
        with build(pipeline) as b:
            request = b.request_batch(request)
            # print(request[points_key])

        target_vals = [
            self.fake_data[point[0]][point[1]] for point in self.fake_points
        ]
        result_data = request[array_key].data
        result_points = list(request[points_key].nodes)
        result_vals = [
            result_data[int(point.location[0])][int(point.location[1])]
            for point in result_points
        ]

        for result_val in result_vals:
            self.assertTrue(
                result_val in target_vals,
                msg=
                "result value {} at points {} not in target values {} at points {}"
                .format(
                    result_val,
                    list(result_points),
                    target_vals,
                    self.fake_points,
                ),
            )