def setUp(self): os.chdir(os.path.dirname(__file__)) self.network_path = os.path.join("networks", "network_testing_input") self.config_file = os.path.join(self.network_path, "network-config.json") self.position_file = os.path.join(self.network_path, "network-neuron-positions.hdf5") self.save_file = os.path.join(self.network_path, "voxels", "network-putative-synapses.hdf5") # Setup network so we can test input generation from snudda.init.init import SnuddaInit cell_spec = os.path.join(os.path.dirname(__file__), "validation") cnc = SnuddaInit(struct_def={}, config_file=self.config_file, random_seed=1234) cnc.define_striatum(num_dSPN=5, num_iSPN=0, num_FS=5, num_LTS=0, num_ChIN=0, volume_type="cube", neurons_dir=cell_spec) cnc.write_json(self.config_file) # Place neurons from snudda.place.place import SnuddaPlace npn = SnuddaPlace( config_file=self.config_file, log_file=None, verbose=True, d_view= None, # TODO: If d_view is None code run sin serial, add test parallel h5libver="latest") npn.parse_config() npn.write_data(self.position_file) # Detect self.sd = SnuddaDetect(config_file=self.config_file, position_file=self.position_file, save_file=self.save_file, rc=None, hyper_voxel_size=120, verbose=True) self.sd.detect(restart_detection_flag=True) # Prune self.network_file = os.path.join(self.network_path, "network-synapses.hdf5") sp = SnuddaPrune(network_path=self.network_path, config_file=None) # Use default config file sp.prune(pre_merge_only=False)
def setup_network(self, neurons_path, num_replicas=10, neuron_types=None): # TODO: num_replicas should be set by a parameter, it affects how many duplicates of each neuron # and thus how many steps we have between n_min and n_max number of inputs specified. config_def = self.create_network_config(neurons_path=neurons_path, num_replicas=num_replicas, neuron_types=neuron_types) print( f"Writing network config file to {self.network_config_file_name}") with open(self.network_config_file_name, "w") as f: json.dump(config_def, f, indent=2, cls=NumpyEncoder) create_cube_mesh(os.path.join("data", "mesh", "InputTestMesh.obj"), [0, 0, 0], 1e-3, description="Mesh file used for Input Scaling") # Write the neurons path to file self.write_tuning_info() from snudda.place.place import SnuddaPlace from snudda.detect.detect import SnuddaDetect from snudda.detect.prune import SnuddaPrune sp = SnuddaPlace(network_path=self.network_path) sp.parse_config() sp.write_data() sd = SnuddaDetect(network_path=self.network_path) sd.detect() sp = SnuddaPrune(network_path=self.network_path) sp.prune()
def prune_synapses(self, args): # self.networkPath = args.path print("Prune synapses") print("Network path: " + str(self.network_path)) from snudda.detect.prune import SnuddaPrune log_filename = os.path.join(self.network_path, "log", "logFile-synapse-pruning.txt") random_seed = args.randomseed self.setup_log_file(log_filename) # sets self.logfile if args.parallel: self.setup_parallel() # sets self.d_view and self.lb_view # Optionally set this scratch_path = None if args.merge_only: pre_merge_only = True else: pre_merge_only = False print(f"preMergeOnly : {pre_merge_only}") if args.h5legacy: h5libver = "earliest" else: h5libver = "latest" # default sp = SnuddaPrune(network_path=self.network_path, logfile=self.logfile, logfile_name=log_filename, config_file=args.config_file, d_view=self.d_view, lb_view=self.lb_view, scratch_path=scratch_path, h5libver=h5libver, random_seed=random_seed, verbose=args.verbose) sp.prune(pre_merge_only=pre_merge_only) self.stop_parallel() self.close_log_file()
def prune_network(self, pruning_config=None, fig_name=None, title=None, verbose=False, plot_network=True, random_seed=None, n_repeats=None): if n_repeats is None: n_repeats = self.n_repeats work_log = os.path.join(self.network_path, "log", "network-detect-worklog.hdf5") pruned_output = os.path.join(self.network_path, "network-synapses.hdf5") if pruning_config is not None and not os.path.exists(pruning_config): pruning_config = os.path.join(self.network_path, pruning_config) # We keep temp files sp = SnuddaPrune(network_path=self.network_path, config_file=pruning_config, verbose=verbose, keep_files=True, random_seed=random_seed) # Use default config file sp.prune() n_synapses = sp.out_file["network/synapses"].shape[0] n_gap_junctions = sp.out_file["network/gapJunctions"].shape[0] sp = [] plot_axon = True plot_dendrite = True #plot_axon = np.ones((20,), dtype=bool) #plot_dendrite = np.ones((20,), dtype=bool) #plot_axon[:10] = False #plot_dendrite[10:] = False if plot_network: pn = PlotNetwork(pruned_output) plt, ax = pn.plot(fig_name=fig_name, show_axis=False, plot_axon=plot_axon, plot_dendrite=plot_dendrite, title=title, title_pad=-14, elev_azim=(90, 0)) if n_repeats > 1: n_syn_mean, n_syn_std, _, _ = self.gather_pruning_statistics(pruning_config=pruning_config, n_repeats=n_repeats) plt.figtext(0.5, 0.15, f"(${n_syn_mean:.1f} \pm {n_syn_std:.1f}$)", ha="center", fontsize=16) plt.savefig(fig_name, dpi=300, bbox_inches='tight') # Load the pruned data and check it # sl = SnuddaLoad(pruned_output) return n_synapses, n_gap_junctions
def prune_network(self, pruning_config=None, fig_name=None, title=None): work_log = os.path.join(self.network_path, "log", "network-detect-worklog.hdf5") pruned_output = os.path.join(self.network_path, "network-synapses.hdf5") if pruning_config is not None and not os.path.exists(pruning_config): pruning_config = os.path.join(self.network_path, pruning_config) sp = SnuddaPrune(network_path=self.network_path, config_file=pruning_config) # Use default config file sp.prune(pre_merge_only=False) sp = [] plot_axon = True plot_dendrite = True #plot_axon = np.ones((20,), dtype=bool) #plot_dendrite = np.ones((20,), dtype=bool) #plot_axon[:10] = False #plot_dendrite[10:] = False pn = PlotNetwork(pruned_output) plt, ax = pn.plot(fig_name=fig_name, show_axis=False, plot_axon=plot_axon, plot_dendrite=plot_dendrite, title=title, title_pad=-14, elev_azim=(90, 0))
def __init__(self): if os.path.dirname(__file__): os.chdir(os.path.dirname(__file__)) self.network_path = "touch_detection_illustration_network" self.config_file = os.path.join(self.network_path, "network-config.json") self.position_file = os.path.join(self.network_path, "network-neuron-positions.hdf5") self.save_file = os.path.join(self.network_path, "voxels", "network-putative-synapses.hdf5") create_cube_mesh(file_name=os.path.join(self.network_path, "mesh", "simple_mesh.obj"), centre_point=(0, 0, 0), side_len=500e-6) sp = SnuddaPlace(config_file=self.config_file, d_view=None) sp.parse_config() sp.write_data(self.position_file) self.sd = SnuddaDetect(config_file=self.config_file, position_file=self.position_file, save_file=self.save_file, rc=None, hyper_voxel_size=150) # Reposition the neurons so we know how many synapses and where they will be located before pruning neuron_positions = np.array([[0, 59, 0], # Postsynaptiska [0, 89, 0], [0, 119, 0], [0, 149, 0], [0, 179, 0], [0, 209, 0], [0, 239, 0], [0, 269, 0], [0, 299, 0], [0, 329, 0], [59, 0, 0], # Presynaptiska [89, 0, 0], [119, 0, 0], [149, 0, 0], [179, 0, 0], [209, 0, 0], [239, 0, 0], [269, 0, 0], [299, 0, 0], [329, 0, 0], ]) * 1e-6 for idx, pos in enumerate(neuron_positions): self.sd.neurons[idx]["position"] = pos ang = -np.pi / 2 R_x = np.array([[1, 0, 0], [0, np.cos(ang), -np.sin(ang)], [0, np.sin(ang), np.cos(ang)]]) ang = np.pi / 2 R_y = np.array([[np.cos(ang), 0, np.sin(ang)], [0, 1, 0], [-np.sin(ang), 0, np.cos(ang)]]) for idx in range(0, 10): # Post synaptic neurons self.sd.neurons[idx]["rotation"] = R_x for idx in range(10, 20): # Presynaptic neurons self.sd.neurons[idx]["rotation"] = R_y self.sd.detect(restart_detection_flag=True) # Also update so that the new positions are saved in the place file rn = RepositionNeurons(self.position_file) for neuron_info in self.sd.neurons: rn.place(neuron_info["neuronID"], position=neuron_info["position"], rotation=neuron_info["rotation"], verbose=False) rn.close() sp = SnuddaPrune(network_path=self.network_path) # Use default config file sp.prune() sp = []
def setUp(self): from snudda.place.create_cube_mesh import create_cube_mesh # Create cube meshes self.network_path = os.path.join("networks", "network_testing_project") mesh_file_a = os.path.join(self.network_path, "mesh", "volume_A.obj") mesh_file_b = os.path.join(self.network_path, "mesh", "volume_B.obj") create_cube_mesh(mesh_file_a, [5e-3, 0, 0], 300e-6, "Volume A - connect structures example") create_cube_mesh(mesh_file_b, [-5e-3, 0, 0], 300e-6, "Volume B - connect structures example") # Define network from snudda.init.init import SnuddaInit cnc = SnuddaInit(network_path=self.network_path, random_seed=123) cnc.define_structure(struct_name="VolumeA", struct_mesh=mesh_file_a, d_min=15e-6, mesh_bin_width=50e-6) cnc.define_structure(struct_name="VolumeB", struct_mesh=mesh_file_b, d_min=15e-6, mesh_bin_width=50e-6) cnc.add_neurons(name="dSPN", num_neurons=20, volume_id="VolumeA", neuron_dir=os.path.join("$DATA", "neurons", "striatum", "dspn")) cnc.add_neurons(name="iSPN", num_neurons=20, volume_id="VolumeB", neuron_dir=os.path.join("$DATA", "neurons", "striatum", "ispn")) # Add the projection we want to test dSPN->iSPN proj_file = os.path.join("data", "ExampleProjection.json") cnc.neuron_projection(neuron_name="dSPN", target_name="iSPN", projection_name="ExampleProjection", projection_file=proj_file, source_volume="VolumeA", dest_volume="VolumeB", projection_radius=100e-6, number_of_targets=[10, 5], number_of_synapses=[10, 5], dendrite_synapse_density="1", connection_type="GABA", dist_pruning=None, f1=0.9, soft_max=None, mu2=None, a3=None) # Also add dSPN-dSPN and iSPN-iSPN synapses # Note we do NOT add dSPN-iSPN again this way, as that would overwrite the above connections # (The above neuron_projection will also do normal touch detection) SPN2SPNdistDepPruning = "1-exp(-(0.4*d/60e-6)**2)" MSD1gGABA = [0.24e-9, 0.1e-9] MSD2gGABA = [0.24e-9, 0.1e-9] MSD1GABAfailRate = 0.7 # Taverna 2008, figure 2 MSD2GABAfailRate = 0.4 # Taverna 2008, 2mM pfdSPNdSPN = os.path.join("$DATA", "synapses", "striatum", "PlanertFitting-DD-tmgaba-fit.json") pfdSPNiSPN = os.path.join("$DATA", "synapses", "striatum", "PlanertFitting-DI-tmgaba-fit.json") pfiSPNdSPN = os.path.join("$DATA", "synapses", "striatum", "PlanertFitting-ID-tmgaba-fit.json") pfiSPNiSPN = os.path.join("$DATA", "synapses", "striatum", "PlanertFitting-II-tmgaba-fit.json") cnc.add_neuron_target(neuron_name="dSPN", target_name="dSPN", connection_type="GABA", dist_pruning=SPN2SPNdistDepPruning, f1=0.38, soft_max=3, mu2=2.4, a3=1.0, conductance=MSD1gGABA, parameter_file=pfdSPNdSPN, mod_file="tmGabaA", channel_param_dictionary={ "tau1": (1.3e-3, 1e3), "tau2": (12.4e-3, 1e3), "failRate": MSD1GABAfailRate }) cnc.add_neuron_target(neuron_name="iSPN", target_name="iSPN", connection_type="GABA", dist_pruning=SPN2SPNdistDepPruning, f1=0.55, soft_max=4, mu2=2.4, a3=1.0, conductance=MSD2gGABA, parameter_file=pfiSPNiSPN, mod_file="tmGabaA", channel_param_dictionary={ "tau1": (1.3e-3, 1e3), "tau2": (12.4e-3, 1e3), "failRate": MSD2GABAfailRate }) cnc.write_json() # Place neurons, then detect, project and prune from snudda.place.place import SnuddaPlace sp = SnuddaPlace(network_path=self.network_path, verbose=True) sp.parse_config() sp.write_data() from snudda.detect.detect import SnuddaDetect sd = SnuddaDetect(network_path=self.network_path, hyper_voxel_size=100, verbose=True) sd.detect() from snudda.detect.project import SnuddaProject sp = SnuddaProject(network_path=self.network_path) sp.project() sp.write() from snudda.detect.prune import SnuddaPrune sp = SnuddaPrune(network_path=self.network_path, verbose=True) sp.prune()
def test_project(self): # Are there connections dSPN->iSPN from snudda.utils.load import SnuddaLoad network_file = os.path.join(self.network_path, "network-synapses.hdf5") sl = SnuddaLoad(network_file) dspn_id_list = sl.get_cell_id_of_type("dSPN") ispn_id_list = sl.get_cell_id_of_type("iSPN") tot_proj_ctr = 0 for dspn_id in dspn_id_list: for ispn_id in ispn_id_list: synapses, synapse_coords = sl.find_synapses(pre_id=dspn_id, post_id=ispn_id) if synapses is not None: tot_proj_ctr += synapses.shape[0] with self.subTest(stage="projection_exists"): # There should be projection synapses between dSPN and iSPN in this toy example self.assertTrue(tot_proj_ctr > 0) tot_dd_syn_ctr = 0 for dspn_id in dspn_id_list: for dspn_id2 in dspn_id_list: synapses, synapse_coords = sl.find_synapses(pre_id=dspn_id, post_id=dspn_id2) if synapses is not None: tot_dd_syn_ctr += synapses.shape[0] tot_ii_syn_ctr = 0 for ispn_id in ispn_id_list: for ispn_id2 in ispn_id_list: synapses, synapse_coords = sl.find_synapses(pre_id=ispn_id, post_id=ispn_id2) if synapses is not None: tot_ii_syn_ctr += synapses.shape[0] with self.subTest(stage="normal_synapses_exist"): # In this toy example neurons are quite sparsely placed, but we should have at least some # synapses self.assertTrue(tot_dd_syn_ctr > 0) self.assertTrue(tot_ii_syn_ctr > 0) # We need to run in parallel also to verify we get same result (same random seed) serial_synapses = sl.data["synapses"].copy() del sl # Close old file so we can overwrite it os.environ["IPYTHONDIR"] = os.path.join(os.path.abspath(os.getcwd()), ".ipython") os.environ["IPYTHON_PROFILE"] = "default" os.system( "ipcluster start -n 4 --profile=$IPYTHON_PROFILE --ip=127.0.0.1&") time.sleep(10) # Run place, detect and prune in parallel by passing rc from ipyparallel import Client u_file = os.path.join(".ipython", "profile_default", "security", "ipcontroller-client.json") rc = Client(url_file=u_file, timeout=120, debug=False) d_view = rc.direct_view( targets='all') # rc[:] # Direct view into clients from snudda.detect.detect import SnuddaDetect sd = SnuddaDetect(network_path=self.network_path, hyper_voxel_size=100, rc=rc, verbose=True) sd.detect() from snudda.detect.project import SnuddaProject # TODO: Currently SnuddaProject only runs in serial sp = SnuddaProject(network_path=self.network_path) sp.project() sp.write() from snudda.detect.prune import SnuddaPrune # Prune has different methods for serial and parallel execution, important to test it! sp = SnuddaPrune(network_path=self.network_path, rc=rc, verbose=True) sp.prune() with self.subTest(stage="check-parallel-identical"): sl2 = SnuddaLoad(network_file) parallel_synapses = sl2.data["synapses"].copy() # ParameterID, sec_X etc are randomised in hyper voxel, so you need to use same # hypervoxel size for reproducability between serial and parallel execution # All synapses should be identical regardless of serial or parallel execution path self.assertTrue((serial_synapses == parallel_synapses).all()) os.system("ipcluster stop")
def test_prune(self): pruned_output = os.path.join(self.network_path, "network-synapses.hdf5") with self.subTest(stage="No-pruning"): sp = SnuddaPrune(network_path=self.network_path, config_file=None, verbose=True, keep_files=True) # Use default config file sp.prune() sp = [] # Load the pruned data and check it sl = SnuddaLoad(pruned_output) # TODO: Call a plot function to plot entire network with synapses and all self.assertEqual(sl.data["nSynapses"], (20 * 8 + 10 * 2) * 2) # Update, now AMPA+GABA, hence *2 at end # This checks that all synapses are in order # The synapse sort order is destID, sourceID, synapsetype (channel model id). syn = sl.data["synapses"][:sl.data["nSynapses"], :] syn_order = (syn[:, 1] * len(self.sd.neurons) + syn[:, 0] ) * 12 + syn[:, 6] # The 12 is maxChannelModelID self.assertTrue((np.diff(syn_order) >= 0).all()) # Note that channel model id is dynamically allocated, starting from 10 (GJ have ID 3) # Check that correct number of each type self.assertEqual(np.sum(sl.data["synapses"][:, 6] == 10), 20 * 8 + 10 * 2) self.assertEqual(np.sum(sl.data["synapses"][:, 6] == 11), 20 * 8 + 10 * 2) self.assertEqual(sl.data["nGapJunctions"], 4 * 4 * 4) gj = sl.data["gapJunctions"][:sl.data["nGapJunctions"], :2] gj_order = gj[:, 1] * len(self.sd.neurons) + gj[:, 0] self.assertTrue((np.diff(gj_order) >= 0).all()) with self.subTest(stage="load-testing"): sl = SnuddaLoad(pruned_output, verbose=True) # Try and load a neuron n = sl.load_neuron(0) self.assertTrue(type(n) == NeuronMorphology) syn_ctr = 0 for s in sl.synapse_iterator(chunk_size=50): syn_ctr += s.shape[0] self.assertEqual(syn_ctr, sl.data["nSynapses"]) gj_ctr = 0 for gj in sl.gap_junction_iterator(chunk_size=50): gj_ctr += gj.shape[0] self.assertEqual(gj_ctr, sl.data["nGapJunctions"]) syn, syn_coords = sl.find_synapses(pre_id=14) self.assertTrue((syn[:, 0] == 14).all()) self.assertEqual(syn.shape[0], 40) syn, syn_coords = sl.find_synapses(post_id=3) self.assertTrue((syn[:, 1] == 3).all()) self.assertEqual(syn.shape[0], 36) cell_id_perm = sl.get_cell_id_of_type("ballanddoublestick", random_permute=True, num_neurons=28) cell_id = sl.get_cell_id_of_type("ballanddoublestick", random_permute=False) self.assertEqual(len(cell_id_perm), 28) self.assertEqual(len(cell_id), 28) for cid in cell_id_perm: self.assertTrue(cid in cell_id) # It is important merge file has synapses sorted with dest_id, source_id as sort order since during pruning # we assume this to be able to quickly find all synapses on post synaptic cell. # TODO: Also include the ChannelModelID in sorting check with self.subTest("Checking-merge-file-sorted"): for mf in [ "temp/synapses-for-neurons-0-to-28-MERGE-ME.hdf5", "temp/gapJunctions-for-neurons-0-to-28-MERGE-ME.hdf5", "network-synapses.hdf5" ]: merge_file = os.path.join(self.network_path, mf) sl = SnuddaLoad(merge_file, verbose=True) if "synapses" in sl.data: syn = sl.data["synapses"][:sl.data["nSynapses"], :2] syn_order = syn[:, 1] * len(self.sd.neurons) + syn[:, 0] self.assertTrue((np.diff(syn_order) >= 0).all()) if "gapJunctions" in sl.data: gj = sl.data["gapJunctions"][:sl.data["nGapJunctions"], :2] gj_order = gj[:, 1] * len(self.sd.neurons) + gj[:, 0] self.assertTrue((np.diff(gj_order) >= 0).all()) with self.subTest("synapse-f1"): # Test of f1 testing_config_file = os.path.join(self.network_path, "network-config-test-1.json") sp = SnuddaPrune(network_path=self.network_path, config_file=testing_config_file, verbose=True, keep_files=True) # Use default config file sp.prune() # Load the pruned data and check it sl = SnuddaLoad(pruned_output, verbose=True) # Setting f1=0.5 in config should remove 50% of GABA synapses, but does so randomly, for AMPA we used f1=0.9 gaba_id = sl.data["connectivityDistributions"][ "ballanddoublestick", "ballanddoublestick"]["GABA"]["channelModelID"] ampa_id = sl.data["connectivityDistributions"][ "ballanddoublestick", "ballanddoublestick"]["AMPA"]["channelModelID"] n_gaba = np.sum(sl.data["synapses"][:, 6] == gaba_id) n_ampa = np.sum(sl.data["synapses"][:, 6] == ampa_id) self.assertTrue((20 * 8 + 10 * 2) * 0.5 - 10 < n_gaba < (20 * 8 + 10 * 2) * 0.5 + 10) self.assertTrue((20 * 8 + 10 * 2) * 0.9 - 10 < n_ampa < (20 * 8 + 10 * 2) * 0.9 + 10) with self.subTest("synapse-softmax"): # Test of softmax testing_config_file = os.path.join( self.network_path, "network-config-test-2.json" ) # Only GABA synapses in this config sp = SnuddaPrune(network_path=self.network_path, config_file=testing_config_file, verbose=True, keep_files=True) # Use default config file sp.prune() # Load the pruned data and check it sl = SnuddaLoad(pruned_output) # Softmax reduces number of synapses self.assertTrue(sl.data["nSynapses"] < 20 * 8 + 10 * 2) with self.subTest("synapse-mu2"): # Test of mu2 testing_config_file = os.path.join(self.network_path, "network-config-test-3.json") sp = SnuddaPrune(network_path=self.network_path, config_file=testing_config_file, verbose=True, keep_files=True) # Use default config file sp.prune() # Load the pruned data and check it sl = SnuddaLoad(pruned_output) # With mu2 having 2 synapses means 50% chance to keep them, having 1 will be likely to have it removed self.assertTrue( 20 * 8 * 0.5 - 10 < sl.data["nSynapses"] < 20 * 8 * 0.5 + 10) with self.subTest("synapse-a3"): # Test of a3 testing_config_file = os.path.join(self.network_path, "network-config-test-4.json") sp = SnuddaPrune(network_path=self.network_path, config_file=testing_config_file, verbose=True, keep_files=True) # Use default config file sp.prune() # Load the pruned data and check it sl = SnuddaLoad(pruned_output) # a3=0.6 means 40% chance to remove all synapses between a pair self.assertTrue( (20 * 8 + 10 * 2) * 0.6 - 14 < sl.data["nSynapses"] < (20 * 8 + 10 * 2) * 0.6 + 14) with self.subTest("synapse-distance-dependent-pruning"): # Testing distance dependent pruning testing_config_file = os.path.join(self.network_path, "network-config-test-5.json") sp = SnuddaPrune(network_path=self.network_path, config_file=testing_config_file, verbose=True, keep_files=True) # Use default config file sp.prune() # Load the pruned data and check it sl = SnuddaLoad(pruned_output) # "1*(d >= 100e-6)" means we remove all synapses closer than 100 micrometers self.assertEqual(sl.data["nSynapses"], 20 * 6) self.assertTrue( (sl.data["synapses"][:, 8] >= 100).all()) # Column 8 -- distance to soma in micrometers # TODO: Need to do same test for Gap Junctions also -- but should be same results, since same codebase with self.subTest("gap-junction-f1"): # Test of f1 testing_config_file = os.path.join(self.network_path, "network-config-test-6.json") sp = SnuddaPrune(network_path=self.network_path, config_file=testing_config_file, verbose=True, keep_files=True) # Use default config file sp.prune() # Load the pruned data and check it sl = SnuddaLoad(pruned_output) # Setting f1=0.7 in config should remove 30% of gap junctions, but does so randomly self.assertTrue( 64 * 0.7 - 10 < sl.data["nGapJunctions"] < 64 * 0.7 + 10) with self.subTest("gap-junction-softmax"): # Test of softmax testing_config_file = os.path.join(self.network_path, "network-config-test-7.json") sp = SnuddaPrune(network_path=self.network_path, config_file=testing_config_file, verbose=True, keep_files=True) # Use default config file sp.prune() # Load the pruned data and check it sl = SnuddaLoad(pruned_output) # Softmax reduces number of synapses self.assertTrue(sl.data["nGapJunctions"] < 16 * 2 + 10) with self.subTest("gap-junction-mu2"): # Test of mu2 testing_config_file = os.path.join(self.network_path, "network-config-test-8.json") sp = SnuddaPrune(network_path=self.network_path, config_file=testing_config_file, verbose=True, keep_files=True) # Use default config file sp.prune() # Load the pruned data and check it sl = SnuddaLoad(pruned_output) # With mu2 having 4 synapses means 50% chance to keep them, having 1 will be likely to have it removed self.assertTrue( 64 * 0.5 - 10 < sl.data["nGapJunctions"] < 64 * 0.5 + 10) with self.subTest("gap-junction-a3"): # Test of a3 testing_config_file = os.path.join(self.network_path, "network-config-test-9.json") sp = SnuddaPrune(network_path=self.network_path, config_file=testing_config_file, verbose=True, keep_files=True) # Use default config file sp.prune() # Load the pruned data and check it sl = SnuddaLoad(pruned_output, verbose=True) # a3=0.7 means 30% chance to remove all synapses between a pair self.assertTrue( 64 * 0.7 - 10 < sl.data["nGapJunctions"] < 64 * 0.7 + 10) if False: # Distance dependent pruning currently not implemented for gap junctions with self.subTest("gap-junction-distance-dependent-pruning"): # Testing distance dependent pruning testing_config_file = os.path.join( self.network_path, "network-config-test-10.json") sp = SnuddaPrune(network_path=self.network_path, config_file=testing_config_file, verbose=True, keep_files=True) # Use default config file sp.prune() # Load the pruned data and check it sl = SnuddaLoad(pruned_output, verbose=True) # "1*(d <= 120e-6)" means we remove all synapses further away than 100 micrometers self.assertEqual(sl.data["nGapJunctions"], 2 * 4 * 4) self.assertTrue( (sl.data["gapJunctions"][:, 8] <= 120).all()) # Column 8 -- distance to soma in micrometers