Example #1
0
 def test_unclip(self):
     self._handler.apply(NeuronClipping().clipAll(True))
     self._handler.apply(NeuronClipping().unclip(
             range(1, 501), [0.5] * 500, [1.0] * 500))
     # This result must match the result from test_clip
     image_compare.capture_and_compare(
         self.view, self._prefix + "simple_clip.png")
Example #2
0
 def test_multiple_clip_1(self):
     clipping = NeuronClipping()
     for i in range(500):
         clipping.clip([i], [0.0], [0.5])
     self._handler.apply(clipping)
     image_compare.capture_and_compare(
         self.view, self._prefix + "simple_clip.png")
Example #3
0
    def test_soma_mode(self):
        self._handler.attributes.mode = RepresentationMode.SOMA
        self._handler.update()
        self.engine.frame()
        tmp = image_compare.capture_temporary(self.view)

        self._handler.apply(NeuronClipping().clipAll(False))
        try:
            image_compare.capture_and_compare(self.view,
                                              tmp,
                                              prepend_sample_path=False)
        finally:
            os.remove(tmp)

        self._handler.attributes.mode = RepresentationMode.NO_DISPLAY
        self._handler.update()
        self.engine.frame()
        tmp = image_compare.capture_temporary(self.view)

        self._handler.apply(NeuronClipping().clipAll(True))
        try:
            image_compare.capture_and_compare(self.view,
                                              tmp,
                                              prepend_sample_path=False)
        finally:
            os.remove(tmp)
Example #4
0
 def test_multiple_unclip_2(self):
     self._handler.apply(NeuronClipping().clipAll(True))
     clipping = NeuronClipping()
     for i in range(1, 501):
         self._handler.apply(NeuronClipping().unclip([i], [0.5], [1.0]))
     self._handler.apply(clipping)
     image_compare.capture_and_compare(
         self.view, self._prefix + "simple_clip.png")
Example #5
0
    def test_unclip_all(self):
        tmp = image_compare.capture_temporary(self.view)

        self._handler.apply(NeuronClipping().clipAll(True))
        self._handler.apply(NeuronClipping().unclipAll())
        try:
            image_compare.capture_and_compare(
                self.view, tmp, prepend_sample_path = False)
        finally:
            os.remove(tmp)
Example #6
0
    def test_clip_and_unclip_are_perfect_opposites(self):
        from random import random
        starts = [random() for i in range(500)]
        ends = [s + random() * (1 - s) for s in starts]

        tmp = image_compare.capture_temporary(self.view)
        self._handler.apply(NeuronClipping().clip(range(500), starts, ends))
        self._handler.apply(NeuronClipping().unclip(range(500), starts, ends))
        try:
            image_compare.capture_and_compare(
                self.view, tmp, prepend_sample_path = False)
        finally:
            os.remove(tmp)
Example #7
0
    def test_clip_soma_interval(self):
        # Soma is clipping with any range
        self._handler.apply(NeuronClipping().clipAll())
        self._handler.apply(NeuronClipping().clip([0], [0], [0.1]))
        tmp = image_compare.capture_temporary(self.view)

        self._handler.attributes.mode = RepresentationMode.NO_DISPLAY
        self._handler.update()
        try:
            image_compare.capture_and_compare(
                self.view, tmp, prepend_sample_path = False)
        finally:
            os.remove(tmp)
Example #8
0
 def test_clip_chain_1(self):
     tmp = image_compare.capture_temporary(self.view)
     self._handler.apply(
         NeuronClipping().clipAll()
                         .unclip(range(500), [0] * 500, [1.0] * 500))
     try:
         image_compare.capture_and_compare(
             self.view, tmp, prepend_sample_path = False)
     finally:
         os.remove(tmp)
Example #9
0
    def test_clip(self):
        self._handler.apply(NeuronClipping().clip(
                range(500), [0.0] * 500, [0.5] * 500))

        # Rendering at least one frame to make sure the scene is not empty for
        # the snapshot.
        self.engine.frame()
        self.engine.waitFrame()

        image_compare.capture_and_compare(
            self.view, self._prefix + "simple_clip.png")
Example #10
0
    def test_clip(self):
        self._handler.apply(NeuronClipping().clip(range(500), [0.0] * 500,
                                                  [0.5] * 500))

        # Displaying the first frame to ensure that clipping operations
        # have the intended effect
        for i in range(2):
            self.engine.frame()
            self.engine.waitFrame()

        image_compare.capture_and_compare(self.view,
                                          self._prefix + "simple_clip.png")
Example #11
0
 def test_badlengths(self):
     clipping = NeuronClipping()
     self.assertRaises(ValueError, NeuronClipping.clip, clipping,
                       [0], [0], [0, 1])
     self.assertRaises(ValueError, NeuronClipping.clip, clipping,
                       [0], [0, 1], [0])
     self.assertRaises(ValueError, NeuronClipping.clip, clipping,
                       [0, 1], [0], [0])
     self.assertRaises(ValueError, NeuronClipping.unclip, clipping,
                       [0], [0], [0, 1])
     self.assertRaises(ValueError, NeuronClipping.unclip, clipping,
                       [0], [0, 1], [0])
     self.assertRaises(ValueError, NeuronClipping.unclip, clipping,
                       [0, 1], [0], [0])
Example #12
0
    def test_clip_axon(self):
        sections = [int(x) for x in self._morphology.section_ids(
            [brain.neuron.SectionType.axon])]
        length = len(sections)
        self._handler.apply(NeuronClipping().clip(
                sections, [0] * length, [1] * length))
        tmp = image_compare.capture_temporary(self.view)

        self._handler.attributes.mode = RepresentationMode.NO_AXON
        self._handler.update()
        try:
            image_compare.capture_and_compare(
                self.view, tmp, prepend_sample_path = False)
        finally:
            os.remove(tmp)
Example #13
0
 def test_clip_chain_2(self):
     self._handler.apply(
         NeuronClipping().clip(range(500), [0] * 500, [0.75] * 500)
                         .unclip(range(1, 501), [0.5] * 500, [0.75] * 500))
     image_compare.capture_and_compare(
         self.view, self._prefix + "simple_clip.png")