def create_multisession_fet_files(self): if not os.path.exists(self.clustersDir): print 'Creating clusters directory: %s'%(self.clustersDir) os.makedirs(self.clustersDir) if self.samples is None: self.load_all_waveforms() self.featureValues = spikesorting.calculate_features(self.samples,self.featureNames) spikesorting.write_fet_file(self.fetFilename, self.featureValues)
def create_multisession_fet_files(self): if not os.path.exists(self.clustersDir): print 'Creating clusters directory: %s'%(self.clustersDir) os.makedirs(self.clustersDir) if self.samples is None: self.load_waveforms() self.featureValues = spikesorting.calculate_features(self.samples,self.featureNames) spikesorting.write_fet_file(self.fetFilename, self.featureValues)
from pylab import * N_CHANNELS = 4 SAMPLES_PER_SPIKE = 40 dataDir = os.path.join(settings.EPHYS_PATH,'%s/%s/'%(animalName,ephysSession)) tetrodeFile = os.path.join(dataDir,'Tetrode%d.spikes'%tetrode) dataTT = loadopenephys.DataSpikes(tetrodeFile) dataTT.timestamps = dataTT.timestamps/0.03 # in microsec dataTT.samples = dataTT.samples.astype(float)-2**15 dataTT.set_clusters('/tmp/TT2.clu.1') crep = spikesorting.ClusterReportFromData(dataTT) ''' dataTT.samples = dataTT.samples.reshape((-1,N_CHANNELS,SAMPLES_PER_SPIKE),order='C') fetArray = spikesorting.calculate_features(dataTT.samples,['peak','valley']) spikesorting.write_fet_file('/tmp/TT2.fet.1',fetArray) ''' ''' plot(dataTT.samples[:10,:].T,'.-') draw() show() ''' ''' ~/tmp/klustakwik/KK2/KlustaKwik TT6 1 -Subset 1e5 -MinClusters 6 -MaxClusters 12 -MaxPossibleClusters 12 -UseFeatures 11111111
reload(loadopenephys) from pylab import * N_CHANNELS = 4 SAMPLES_PER_SPIKE = 40 dataDir = os.path.join(settings.EPHYS_PATH, '%s/%s/' % (animalName, ephysSession)) tetrodeFile = os.path.join(dataDir, 'Tetrode%d.spikes' % tetrode) dataTT = loadopenephys.DataSpikes(tetrodeFile) dataTT.timestamps = dataTT.timestamps / 0.03 # in microsec dataTT.samples = dataTT.samples.astype(float) - 2**15 dataTT.set_clusters('/tmp/TT2.clu.1') crep = spikesorting.ClusterReportFromData(dataTT) ''' dataTT.samples = dataTT.samples.reshape((-1,N_CHANNELS,SAMPLES_PER_SPIKE),order='C') fetArray = spikesorting.calculate_features(dataTT.samples,['peak','valley']) spikesorting.write_fet_file('/tmp/TT2.fet.1',fetArray) ''' ''' plot(dataTT.samples[:10,:].T,'.-') draw() show() ''' ''' ~/tmp/klustakwik/KK2/KlustaKwik TT6 1 -Subset 1e5 -MinClusters 6 -MaxClusters 12 -MaxPossibleClusters 12 -UseFeatures 11111111 ~/tmp/klustakwik/KK2/KlustaKwik TT6 1 -Subset 1e5 -MinClusters 10 -MaxClusters 24 -MaxPossibleClusters 12 -UseFeatures 11111111