Ejemplo n.º 1
0
class TestSingleTypeCompilation(unittest.TestCase):
    """
    Check if the scaffold can create a single cell type.
    """
    @classmethod
    def setUpClass(self):
        super(TestSingleTypeCompilation, self).setUpClass()
        config = JSONConfig(file=single_neuron_config)
        self.scaffold = Scaffold(config)
        self.scaffold.compile_network()

    def test_placement_statistics(self):
        self.assertEqual(self.scaffold.statistics.cells_placed["test_cell"], 4)
        self.assertEqual(self.scaffold.get_cell_total(), 4)

    def test_network_cache(self):
        pass
        # TODO: Implement a check that the network cache contains the right amount of placed cells

    def test_hdf5_cells(self):
        pass
        # TODO: Implement a check that the hdf5 file contains the right datasets under 'cells'

    def test_from_hdf5(self):
        scaffold_copy = from_hdf5(self.scaffold.output_formatter.file)
        for key in self.scaffold.statistics.cells_placed:
            self.assertEqual(
                scaffold_copy.statistics.cells_placed[key],
                self.scaffold.statistics.cells_placed[key],
            )
        self.assertEqual(scaffold_copy.get_cell_total(), 4)
        self.assertRaises(OSError, from_hdf5, "doesntexist")
Ejemplo n.º 2
0
class TestEntities(unittest.TestCase):
    @classmethod
    def setUpClass(self):
        import nest

        nest.ResetKernel()
        super(TestEntities, self).setUpClass()
        config = JSONConfig(file=minimal_config_entities)
        self.scaffold = Scaffold(config)
        self.scaffold.compile_network()
        hdf_config = _from_hdf5("minimal_entities.hdf5")
        self.scaffold_fresh = Scaffold(hdf_config,
                                       from_file="minimal_entities.hdf5")

    def test_placement_statistics(self):
        self.assertEqual(self.scaffold.statistics.cells_placed["entity_type"],
                         100)

    @unittest.skipIf(
        importlib.util.find_spec("nest") is None, "NEST is not importable.")
    def test_creation_in_nest(self):

        f = h5py.File("minimal_entities.hdf5", "r")
        ids = list(f["entities"]["entity_type"])
        self.assertEqual(ids, list(range(100)))
        f.close()

        # Try to load the network directly from the hdf5 file
        nest_adapter = self.scaffold_fresh.get_simulation("test")
        simulator = nest_adapter.prepare()
        nest_adapter.simulate(simulator)
        nest_ids = nest_adapter.entities["entity_type"].nest_identifiers
        self.assertEqual(list(nest_ids), list(range(1, 101)))
Ejemplo n.º 3
0
class TestSingleNeuronTypeSetup(unittest.TestCase):
    def setUp(self):
        config = JSONConfig(file=single_neuron_config)
        self.scaffold = Scaffold(config)
        self.scaffold.compile_network()
        self.nest_adapter = self.scaffold.configuration.simulations[
            "test_single_neuron"]
        self.nest_adapter.reset()

    def tearDown(self):
        self.nest_adapter.delete_lock()

    def test_single_neuron(self):
        self.scaffold.run_simulation("test_single_neuron")
        test_cell_model = self.nest_adapter.cell_models["test_cell"]
        self.assertEqual(test_cell_model.nest_identifiers, list(range(1, 5)))

        test_neuron_status = self.nest_adapter.nest.GetStatus(
            test_cell_model.nest_identifiers)
        self.assertEqual(test_neuron_status[0]["t_ref"], 1.5)
        self.assertEqual(test_neuron_status[0]["C_m"], 7.0)
        self.assertEqual(test_neuron_status[0]["V_th"], -41.0)
        self.assertEqual(test_neuron_status[0]["V_reset"], -70.0)
        self.assertEqual(test_neuron_status[0]["E_L"], -62.0)
        self.assertEqual(test_neuron_status[0]["I_e"], 0.0)
Ejemplo n.º 4
0
 def test_spoofing(self):
     """
     Assert that fake detailed connections can be made
     """
     config = JSONConfig(file=_config)
     scaffold = Scaffold(config)
     scaffold.compile_network()
     original_connections = len(
         scaffold.cell_connections_by_tag["connection"])
     sd = SpoofDetails()
     sd.presynaptic = "from_cell"
     sd.postsynaptic = "to_cell"
     sd.scaffold = scaffold
     # Raise error because here's no morphologies registered for the cell types.
     with self.assertRaises(
             MorphologyDataError,
             msg="Missing morphologies during spoofing not caught."):
         sd.after_connectivity()
     # Add some morphologies
     setattr(
         config.cell_types["from_cell"].morphology,
         "detailed_morphologies",
         {"names": ["GranuleCell"]},
     )
     setattr(
         config.cell_types["to_cell"].morphology,
         "detailed_morphologies",
         {"names": ["GranuleCell"]},
     )
     # Run the spoofing again
     sd.after_connectivity()
     cs = scaffold.get_connectivity_set("connection")
     scaffold.compile_output()
     # Use the intersection property. It throws an error should the detailed
     # information be missing
     try:
         i = cs.intersections
         for inter in i:
             fl = inter.from_compartment.labels
             tl = inter.to_compartment.labels
             self.assertIn("axon", fl,
                           "From compartment type is not an axon")
             self.assertIn("dendrites", tl,
                           "From compartment type is not a dendrite")
         self.assertNotEqual(len(i), 0, "Empty intersection data")
         self.assertEqual(len(i), original_connections,
                          "Different amount of spoofed connections")
     except MissingMorphologyError:
         self.fail("Could not find the intersection data on spoofed set")
     # Set both types to relays and try spoofing again
     config.cell_types["from_cell"].relay = True
     config.cell_types["to_cell"].relay = True
     with self.assertRaises(MorphologyError,
                            msg="Did not catch double relay spoofing!"):
         sd.after_connectivity()
Ejemplo n.º 5
0
    def test_representatives(self):
        """
        Test that 1 cell per non-relay cell model is chosen.
        """
        from patch import p

        config = JSONConfig(double_nn_config)
        scaffold = Scaffold(config)
        scaffold.compile_network()
        adapter = scaffold.create_adapter("neuron")
        adapter.h = p
        adapter.load_balance()
        device = adapter.devices["test_representatives"]
        device.initialise_targets()
        targets = adapter.devices["test_representatives"].get_targets()
        self.assertEqual(
            1,
            len(targets),
            "Targetting type `representatives` did not return the correct amount of representatives.",
        )
Ejemplo n.º 6
0
class TestMultiInstance(unittest.TestCase):
    @classmethod
    def setUpClass(self):
        super(TestMultiInstance, self).setUpClass()
        import nest

        self.nest = nest
        config = JSONConfig(file=single_neuron_config)
        self.scaffold = Scaffold(config)
        self.scaffold.compile_network()
        self.hdf5 = self.scaffold.output_formatter.file
        self.nest_adapter_0 = self.scaffold.get_simulation(
            "test_single_neuron")
        # When another test errors, the lock might remain, and all locking tests fail
        self.nest_adapter_0.delete_lock()
        self.nest_adapter_1 = self.scaffold.create_adapter(
            "test_single_neuron")
        self.nest_adapter_2 = self.scaffold.create_adapter(
            "test_single_neuron")
        self.nest_adapter_multi_1 = self.scaffold.create_adapter(
            "test_single_neuron")
        self.nest_adapter_multi_1.enable_multi("first")
        self.nest_adapter_multi_1b = self.scaffold.create_adapter(
            "test_single_neuron")
        self.nest_adapter_multi_1b.enable_multi("first")
        self.nest_adapter_multi_2 = self.scaffold.create_adapter(
            "test_single_neuron")
        self.nest_adapter_multi_2.enable_multi("second")

    def tearDown(self):
        # Clean up any remaining locks to keep the test functions independent.
        # Otherwise a chain reaction of failures is evoked.
        self.nest_adapter_0.delete_lock()

    def test_single_instance_unwanted_usage(self):
        # Test AdapterError when trying to unlock unlocked adapter
        self.assertRaises(AdapterError, self.nest_adapter_0.release_lock)
        # Test whether the scaffold throws an AdapterError when the same
        # adapter is prepared twice.
        self.nest_adapter_0.prepare()
        self.assertRaises(AdapterError, self.nest_adapter_0.prepare)

        self.nest_adapter_0.release_lock()
        self.nest_adapter_0.reset()

    def test_single_instance_single_lock(self):
        self.nest_adapter_1.reset()
        # Lock kernel. Prepare adapter and thereby lock NEST kernel
        self.nest_adapter_1.prepare()
        lock_data = self.nest_adapter_1.read_lock()
        self.assertEqual(lock_data["multi"], False)
        self.assertEqual(self.nest_adapter_1.multi, False)
        self.assertEqual(self.nest_adapter_1.has_lock, True)

        # Release lock.
        self.nest_adapter_1.release_lock()
        self.assertEqual(self.nest_adapter_1.read_lock(), None)
        self.assertEqual(self.nest_adapter_1.has_lock, False)
        self.nest_adapter_1.reset()

    def test_multi_instance_single_lock(self):
        # Test that a 2nd single-instance adapter can't manage a locked kernel.
        self.nest_adapter_1.prepare()

        self.assertRaises(KernelLockedError, self.nest_adapter_2.prepare)
        self.assertEqual(self.nest_adapter_2.is_prepared, False)

        self.nest_adapter_1.release_lock()
        self.nest_adapter_1.reset()
        self.nest_adapter_2.reset()

    def test_single_instance_multi_lock(self):
        self.nest_adapter_multi_1.reset()
        # Test functionality of the multi lock.
        self.nest_adapter_multi_1.prepare()
        lock_data = self.nest_adapter_multi_1.read_lock()
        self.assertEqual(self.nest_adapter_multi_1.suffix, "first")
        self.assertEqual(lock_data["multi"], True)
        self.assertEqual(lock_data["suffixes"][0],
                         self.nest_adapter_multi_1.suffix)
        self.assertEqual(self.nest_adapter_multi_1.multi, True)
        self.assertEqual(self.nest_adapter_multi_1.is_prepared, True)
        self.assertEqual(self.nest_adapter_multi_1.has_lock, True)

        self.nest_adapter_multi_1.release_lock()
        self.assertEqual(self.nest_adapter_multi_1.multi, True)
        self.assertEqual(self.nest_adapter_multi_1.has_lock, False)
        self.nest_adapter_multi_1.reset()
        self.assertEqual(self.nest_adapter_multi_1.is_prepared, False)

    def test_multi_instance_multi_lock(self):
        # Test functionality of the multi lock.
        self.nest_adapter_multi_1.prepare()
        # Test that we have 1 lock.
        lock_data = self.nest_adapter_multi_1.read_lock()
        # Check multi instance multi lock
        self.nest_adapter_multi_2.cell_models["test_cell"].parameters[
            "t_ref"] = 3.0
        self.nest_adapter_multi_2.prepare()
        # Check that we have 2 locks
        lock_data = self.nest_adapter_multi_1.read_lock()
        self.assertEqual(len(lock_data["suffixes"]), 2)

        # Test that we set the right parameters on the right classes.
        try:
            params = self.nest.GetDefaults("test_cell_first")
        except Exception as e:
            self.fail("First suffixed NEST models not found")
        try:
            params = self.nest.GetDefaults("test_cell_second")
        except Exception as e:
            self.fail("Second suffixed NEST models not found")

        # Test that the adapters have the correct nest_identifiers
        id1 = self.nest_adapter_multi_1.cell_models[
            "test_cell"].nest_identifiers
        id2 = self.nest_adapter_multi_2.cell_models[
            "test_cell"].nest_identifiers
        self.assertEqual(id1, [1, 2, 3, 4])
        self.assertEqual(id2, [5, 6, 7, 8])

        # Test that the adapter nodes have the right model
        self.assertTrue(
            all(
                map(
                    lambda x: str(x["model"]) == "test_cell_first",
                    self.nest.GetStatus(id1),
                )))
        self.assertTrue(
            all(
                map(
                    lambda x: str(x["model"]) == "test_cell_second",
                    self.nest.GetStatus(id2),
                )))

        # Test that the adapter nodes have the right differential parameter t_ref
        self.assertTrue(
            all(map(lambda x: x["t_ref"] == 1.5, self.nest.GetStatus(id1))))
        self.assertTrue(
            all(map(lambda x: x["t_ref"] == 3.0, self.nest.GetStatus(id2))))

        # Check duplicate suffixes
        self.assertRaises(SuffixTakenError, self.nest_adapter_multi_1b.prepare)

        self.nest_adapter_multi_1.release_lock()
        self.nest_adapter_multi_1.reset()
        # Test that we have 1 lock again after release.
        lock_data = self.nest_adapter_multi_1.read_lock()
        self.assertEqual(lock_data["suffixes"][0], "second")
        self.nest_adapter_multi_2.release_lock()
        self.nest_adapter_multi_2.reset()
Ejemplo n.º 7
0
class TestPlacementSets(unittest.TestCase):
    """
    Check if the scaffold can create a single cell type.
    """
    @classmethod
    def setUpClass(self):
        super(TestPlacementSets, self).setUpClass()
        test_setup.prep_morphologies()
        config = JSONConfig(file=double_neuron_config)
        self.scaffold = Scaffold(config)
        self.scaffold.compile_network()

    def test_hdf5_structure(self):
        with h5py.File(self.scaffold.output_formatter.file, "r") as h:
            for key in ["from", "to"]:
                group = h["cells/placement/" + key + "_cell"]
                self.assertTrue(
                    "identifiers" in group,
                    "Identifiers dataset missing for the " + key + "_cell",
                )
                self.assertTrue(
                    "positions" in group,
                    "Positions dataset missing for the " + key + "_cell",
                )
                self.assertEqual(
                    group["positions"].shape,
                    (4, 3),
                    "Incorrect position dataset size for the " + key + "_cell",
                )
                self.assertTrue(
                    group["positions"].dtype == np.float64,
                    "Incorrect position dataset dtype ({}) for the ".format(
                        group["positions"].dtype) + key + "_cell",
                )
                self.assertEqual(
                    group["identifiers"].shape,
                    (2, ),
                    "Incorrect or noncontinuous identifiers dataset size for the "
                    + key + "_cell",
                )
                self.assertTrue(
                    group["identifiers"].dtype == np.int32,
                    "Incorrect identifiers dataset dtype ({}) for the ".format(
                        group["identifiers"].dtype) + key + "_cell",
                )

    def test_placement_set_properties(self):
        for key in ["from", "to"]:
            cell_type = self.scaffold.get_cell_type(key + "_cell")
            ps = PlacementSet(self.scaffold.output_formatter, cell_type)
            self.assertEqual(
                ps.identifiers.shape,
                (4, ),
                "Incorrect identifiers shape for " + key + "_cell",
            )
            self.assertEqual(
                ps.positions.shape,
                (4, 3),
                "Incorrect positions shape for " + key + "_cell",
            )
            self.assertRaises(DatasetNotFoundError, lambda: ps.rotations)
            self.assertEqual(type(ps.cells[0]), Cell,
                             "PlacementSet.cells did not return Cells")
            self.assertEqual(
                ps.cells[1].id,
                1 if key == "from" else 5,
                "PlacementSet.cells identifiers incorrect",
            )
            self.assertEqual(
                ps.cells[1].position.shape,
                (3, ),
                "PlacementSet.cells positions wrong shape",
            )
Ejemplo n.º 8
0
class TestFiberIntersection(unittest.TestCase):
    @classmethod
    def setUpClass(self):
        super(TestFiberIntersection, self).setUpClass()
        # Make sure the MR exists
        test_setup.prep_morphologies()
        # The scaffold has only the Granular layer (100x100x150) with 20 GrCs
        # and 1 GoC placed, as specified in the config file
        self.config = JSONConfig(file=fiber_transform_config)
        # Defining quivers field to include also voxels outside the scaffold
        # volume
        self.quivers_field = np.zeros(
            shape=(3, 80, 80, 80))  # Each voxel in the volume has vol_res=25um
        # Fake quiver, oriented as original fibers
        basic_quiver = np.array([0, 1.0, 0.0])
        self.quivers_field[0, :] = basic_quiver[0]
        self.quivers_field[1, :] = basic_quiver[1]
        self.quivers_field[2, :] = basic_quiver[2]
        self.config.connection_types[
            "parallel_fiber_to_golgi_bended"].transformation.quivers = self.quivers_field
        self.config.connection_types[
            "parallel_fiber_to_golgi_bended"].transformation.vol_start = [
                -500.0, -500.0, -500.0
            ]
        self.scaffold = Scaffold(self.config)
        self.scaffold.morphology_repository = MorphologyRepository(morpho_file)
        self.scaffold.compile_network()

    def test_fiber_connections(self):
        pre_type = "granule_cell"
        pre_neu = self.scaffold.get_placement_set(pre_type)
        conn_type = "parallel_fiber_to_golgi"
        cs = self.scaffold.get_connectivity_set(conn_type)
        num_conn = len(cs.connections)
        # Check that no more connections are formed than the number of
        # presynaptic neurons - how could happen otherwise?
        self.assertTrue(num_conn <= len(pre_neu.identifiers))

        # Check that increasing resolution in FiberIntersection does not change
        # connection number if there are no transformations (and thus the fibers
        # are parallel to main axes)
        conn_type_HR = "parallel_fiber_to_golgi_HR"
        cs_HR = self.scaffold.get_connectivity_set(conn_type_HR)
        self.assertEqual(num_conn, len(cs_HR.connections))

        # Check that half (+- 5) connections are obtained with half the affinity
        conn_type_affinity = "parallel_fiber_to_golgi_affinity"
        cs_affinity = self.scaffold.get_connectivity_set(conn_type_affinity)
        self.assertAlmostEqual(num_conn / 2,
                               len(cs_affinity.connections),
                               delta=5)

        # Check that same number of connections are obtained when a fake quiver
        # transformation is applied
        conn_type_transform = "parallel_fiber_to_golgi_bended"
        cs_fake_transform = self.scaffold.get_connectivity_set(
            conn_type_transform)
        self.assertEqual(len(cs_fake_transform.connections), num_conn)

        # Check that less connections are obtained when the PC surface is
        # oriented according to orientation vector of 45° rotation in yz plane,
        # for how the Golgi cell is placed and the parallel fibers are rotated
        basic_quiver = np.array([0, 0.7, 0.7])
        self.quivers_field[0, :] = basic_quiver[0]
        self.quivers_field[1, :] = basic_quiver[1]
        self.quivers_field[2, :] = basic_quiver[2]
        self.config.connection_types[
            "parallel_fiber_to_golgi_bended"].transformation.quivers = self.quivers_field
        self.scaffold = Scaffold(self.config)
        self.scaffold.compile_network()
        cs_transform = self.scaffold.get_connectivity_set(conn_type_transform)
        self.assertTrue(len(cs_transform.connections) <= num_conn)