def add_cluster(self, id=None, color=None, **kwargs): channel_group_id = self._node._v_parent._v_parent._v_name clustering = self._node._v_name add_cluster(self._files, channel_group_id=channel_group_id, color=color, id=str(id), clustering=clustering, **kwargs) self._update()
def _setup(_name, has_masks=True): # Create files. prm = {'nfeatures': 3, 'waveforms_nsamples': 10, 'nchannels': 3, 'sample_rate': 20000., 'nfeatures_per_channel': 1, 'has_masks': has_masks } prb = {0: { 'channels': [4, 6, 8], 'graph': [[4, 6], [8, 4]], 'geometry': {4: [0.4, 0.6], 6: [0.6, 0.8], 8: [0.8, 0.0]}, } } create_files(_name, dir=DIRPATH, prm=prm, prb=prb) # Open the files. files = open_files(_name, dir=DIRPATH, mode='a') # Add data. add_recording(files, sample_rate=20000., start_time=10., start_sample=200000., bit_depth=16, band_high=100., band_low=500., nchannels=3,) add_event_type(files, 'myevents') add_cluster_group(files, channel_group_id='0', id='0', name='Noise') add_cluster(files, channel_group_id='0', cluster_group=0) # Close the files close_files(files)
def _setup(_name, has_masks=True): # Create files. prm = { 'nfeatures': 3 * 3, 'waveforms_nsamples': 10, 'nchannels': 3, 'sample_rate': 20000., 'nfeatures_per_channel': 3, 'has_masks': has_masks } prb = { 0: { 'channels': [0, 1, 2], 'graph': [[0, 1], [0, 2]], 'geometry': { 0: [0.4, 0.6], 1: [0.6, 0.8], 2: [0.8, 0.0] }, } } create_files(_name, dir=DIRPATH, prm=prm, prb=prb) # Open the files. files = open_files(_name, dir=DIRPATH, mode='a') # Add data. add_recording( files, sample_rate=20000., bit_depth=16, band_high=100., band_low=500., nchannels=3, ) add_event_type(files, 'myevents') add_cluster_group(files, channel_group_id='0', id='0', name='Noise') add_cluster_group(files, channel_group_id='0', id='1', name='MUA') add_cluster_group(files, channel_group_id='0', id='2', name='Good') add_cluster_group(files, channel_group_id='0', id='3', name='Unsorted') add_cluster(files, channel_group_id='0', cluster_group=0) add_spikes( files, channel_group_id='0', cluster=np.random.randint(5, 10, 1000), time_samples=np.cumsum(np.random.randint(0, 1000, 1000)).astype(np.int64), features=np.random.randint(-30000, 30000, (1000, 9)).astype(np.int16), masks=np.random.randint(0, 2, (1000, 9)).astype(np.int16), ) # Close the files close_files(files)
def _consistency_check(self): exp = self.experiment chgrp = self.shank cg = exp.channel_groups[chgrp] clusters = cg.clusters.main.keys() clusters_unique = np.unique(cg.spikes.clusters.main[:]) # Find missing clusters in the kwik file. missing = sorted(set(clusters_unique)-set(clusters)) # Add all missing clusters with a default color and "Unsorted" cluster group (group #3). for idx in missing: warn("Consistency check: adding cluster %d in the kwik file" % idx) add_cluster(exp._files, channel_group_id='%d' % chgrp, id=idx, clustering='main', cluster_group=3)
def add_missing_clusters(exp): shanks = sorted(exp.channel_groups.keys()) for shank in shanks: cg = exp.channel_groups[shank] clusters = cg.clusters.main.keys() clusters_unique = np.unique(cg.spikes.clusters.main[:]) # Find missing clusters in the kwik file. missing = sorted(set(clusters_unique)-set(clusters)) # Add all missing clusters with a default color and "Unsorted" cluster group (group #3). for idx in missing: info("Adding missing cluster %d in shank %d." % (idx, shank)) add_cluster(exp._files, channel_group_id='%d' % shank, id=str(idx), clustering='main', cluster_group=3)
def _consistency_check(self): exp = self.experiment chgrp = self.shank cg = exp.channel_groups[chgrp] clusters = cg.clusters.main.keys() clusters_unique = np.unique(cg.spikes.clusters.main[:]) # Find missing clusters in the kwik file. missing = sorted(set(clusters_unique) - set(clusters)) # Add all missing clusters with a default color and "Unsorted" cluster group (group #3). for idx in missing: warn("Consistency check: adding cluster %d in the kwik file" % idx) add_cluster(exp._files, channel_group_id='%d' % chgrp, id=idx, clustering='main', cluster_group=3)
def add_missing_clusters(exp): shanks = sorted(exp.channel_groups.keys()) for shank in shanks: cg = exp.channel_groups[shank] clusters = cg.clusters.main.keys() clusters_unique = np.unique(cg.spikes.clusters.main[:]) # Find missing clusters in the kwik file. missing = sorted(set(clusters_unique) - set(clusters)) # Add all missing clusters with a default color and "Unsorted" cluster group (group #3). for idx in missing: info("Adding missing cluster %d in shank %d." % (idx, shank)) add_cluster(exp._files, channel_group_id='%d' % shank, id=str(idx), clustering='main', cluster_group=3)
def _setup(_name, has_masks=True): # Create files. prm = { "nfeatures": 3 * 3, "waveforms_nsamples": 10, "nchannels": 3, "sample_rate": 20000.0, "nfeatures_per_channel": 3, "has_masks": has_masks, } prb = { 0: {"channels": [0, 1, 2], "graph": [[0, 1], [0, 2]], "geometry": {0: [0.4, 0.6], 1: [0.6, 0.8], 2: [0.8, 0.0]}} } create_files(_name, dir=DIRPATH, prm=prm, prb=prb) # Open the files. files = open_files(_name, dir=DIRPATH, mode="a") # Add data. add_recording(files, sample_rate=20000.0, bit_depth=16, band_high=100.0, band_low=500.0, nchannels=3) add_event_type(files, "myevents") add_cluster_group(files, channel_group_id="0", id="0", name="Noise") add_cluster_group(files, channel_group_id="0", id="1", name="MUA") add_cluster_group(files, channel_group_id="0", id="2", name="Good") add_cluster_group(files, channel_group_id="0", id="3", name="Unsorted") add_cluster(files, channel_group_id="0", cluster_group=0) add_spikes( files, channel_group_id="0", cluster=np.random.randint(5, 10, 1000), time_samples=np.cumsum(np.random.randint(0, 1000, 1000)).astype(np.int64), features=np.random.randint(-30000, 30000, (1000, 9)).astype(np.int16), masks=np.random.randint(0, 2, (1000, 9)).astype(np.int16), ) # Close the files close_files(files)