def test_attach_plugins(tempdir): class MyController(object): pass write_text( tempdir / 'plugin1.py', dedent(''' from phy import IPlugin class MyPlugin1(IPlugin): def attach_to_controller(self, controller): controller.plugin1 = True ''')) class MyPlugin2(IPlugin): def attach_to_controller(self, controller): controller.plugin2 = True contents = dedent(''' c = get_config() c.Plugins.dirs = ['%s'] c.MyController.plugins = ['MyPlugin1'] ''' % tempdir) write_text(tempdir / 'phy_config.py', contents) controller = MyController() attach_plugins(controller, plugins=['MyPlugin2'], config_dir=tempdir) assert controller.plugin1 == controller.plugin2 is True
def _make_dataset(tempdir, param='dense', has_spike_attributes=True): np.random.seed(0) # Download the dataset. paths = list(map(download_test_file, _FILES)) # Copy the dataset to a temporary directory. for path in paths: to_path = tempdir / path.name # Skip sparse arrays if is_sparse is False. if param == 'sparse' and ('_ind.' in str(to_path) or 'spike_ids.' in str(to_path)): continue logger.debug("Copying file to %s.", to_path) shutil.copy(path, to_path) # Some changes to files if 'misc' fixture parameter. if param == 'misc': # Remove spike_clusters and recreate it from spike_templates. _remove(tempdir / 'spike_clusters.npy') # Replace spike_times.npy, in samples, by spikes.times.npy, in seconds. if (tempdir / 'spike_times.npy').exists(): st = np.load(tempdir / 'spike_times.npy').squeeze() st_r = st + np.random.randint(low=-20000, high=+20000, size=st.size) assert st_r.shape == st.shape # Reordered spikes. np.save(tempdir / 'spike_times_reordered.npy', st_r) np.save(tempdir / 'spikes.times.npy', st / 25000.) # sample rate _remove(tempdir / 'spike_times.npy') # Buggy TSV file should not cause a crash. write_text(tempdir / 'error.tsv', '') # Remove some non-necessary files. _remove(tempdir / 'template_features.npy') _remove(tempdir / 'pc_features.npy') _remove(tempdir / 'channel_probes.npy') _remove(tempdir / 'channel_shanks.npy') _remove(tempdir / 'amplitudes.npy') _remove(tempdir / 'whitening_mat.npy') _remove(tempdir / 'whitening_mat_inv.npy') _remove(tempdir / 'sim_binary.dat') # Spike attributes. if has_spike_attributes: write_array(tempdir / 'spike_fail.npy', np.full(10, np.nan)) # wrong number of spikes write_array(tempdir / 'spike_works.npy', np.random.rand(314)) write_array(tempdir / 'spike_randn.npy', np.random.randn(314, 2)) # TSV file with cluster data. write_tsv( tempdir / 'cluster_Amplitude.tsv', [{'cluster_id': 1, 'Amplitude': 123.4}], first_field='cluster_id') write_tsv( tempdir / 'cluster_metrics.tsv', [ {'cluster_id': 2, 'met1': 123.4, 'met2': 'hello world 1'}, {'cluster_id': 3, 'met1': 5.678}, {'cluster_id': 5, 'met2': 'hello world 2'}, ]) template_path = tempdir / paths[0].name return template_path
def test_discover_plugins(tempdir, no_native_plugins): path = tempdir / 'my_plugin.py' contents = '''from phy import IPlugin\nclass MyPlugin(IPlugin): pass''' write_text(path, contents) plugins = discover_plugins([tempdir]) assert plugins assert plugins[0].__name__ == 'MyPlugin'
def py_config(tempdir): # Create and load a config file. config_contents = """ c = get_config() c.MyConfigurable.my_var = 1.0 """ path = tempdir / 'config.py' write_text(path, config_contents) return path
def json_config(tempdir): # Create and load a config file. config_contents = """ { "MyConfigurable": { "my_var": 1.0 } } """ path = tempdir / 'config.json' write_text(path, config_contents) return path
def _make_dataset(tempdir, param='dense', has_spike_attributes=True): # Download the dataset. paths = list(map(download_test_file, _FILES)) # Copy the dataset to a temporary directory. for path in paths: to_path = tempdir / path.name # Skip sparse arrays if is_sparse is False. if param == 'sparse' and ('_ind.' in str(to_path) or 'spike_ids.' in str(to_path)): continue logger.debug("Copying file to %s.", to_path) shutil.copy(path, to_path) # Some changes to files if 'misc' fixture parameter. if param == 'misc': # Remove spike_clusters and recreate it from spike_templates. _remove(tempdir / 'spike_clusters.npy') # Replace spike_times.npy, in samples, by spikes.times.npy, in seconds. if (tempdir / 'spike_times.npy').exists(): st = np.load(tempdir / 'spike_times.npy') np.save(tempdir / 'spikes.times.npy', st / 25000.) # sample rate _remove(tempdir / 'spike_times.npy') # Buggy TSV file should not cause a crash. write_text(tempdir / 'error.tsv', '') # Remove some non-necessary files. _remove(tempdir / 'template_features.npy') _remove(tempdir / 'pc_features.npy') _remove(tempdir / 'channel_shanks.npy') _remove(tempdir / 'amplitudes.npy') _remove(tempdir / 'whitening_mat.npy') _remove(tempdir / 'whitening_mat_inv.npy') _remove(tempdir / 'sim_binary.dat') # Spike attributes. if has_spike_attributes: write_array(tempdir / 'spike_fail.npy', np.full(10, np.nan)) # wrong number of spikes write_array(tempdir / 'spike_works.npy', np.random.rand(314)) write_array(tempdir / 'spike_randn.npy', np.random.randn(314, 2)) template_path = tempdir / paths[0].name return template_path