예제 #1
0
    def cluster_session(self, session, tetrode):
            
        ephysSession = self.get_session_name(session)

        oneTT = spikesorting.TetrodeToCluster(self.animalName,ephysSession,tetrode)

        oneTT.load_waveforms()
        oneTT.create_fet_files()
        oneT
        T.run_clustering()
        oneTT.save_report()
예제 #2
0
def cluster_session(session, tetrode, features=['peak', 'valleyFirstHalf']):
    #TODO: the oneTT obj here has dataTT attribute, the cms one does not
    oneTT = spikesorting.TetrodeToCluster(session.subject, session.ephys_dir(),
                                          tetrode, features)
    oneTT.load_waveforms()
    clusterFile = os.path.join(oneTT.clustersDir,
                               'Tetrode%d.clu.1' % oneTT.tetrode)
    if os.path.isfile(clusterFile):
        oneTT.dataTT.set_clusters(clusterFile)
    else:
        oneTT.create_fet_files()
        oneTT.run_clustering()
        oneTT.save_report()
    return oneTT
예제 #3
0
for experiment in inforec.experiments:
    for site in experiment.sites:
        for session in site.sessions:
            for tetrode in tetrodes:
                fullPath = session.full_ephys_path()
                fullFn = os.path.join(fullPath,
                                      'Tetrode{}.spikes'.format(tetrode))
                #print fullFn
                fullBehav = session.full_behav_filename()
                #print fullBehav
                dataSpikes = loadopenephys.DataSpikes(fullFn)
                ephysSession = '{}_{}'.format(session.date, session.timestamp)
                features = ['peak', 'valleyFirstHalf']
                oneTT = spikesorting.TetrodeToCluster(session.subject,
                                                      ephysSession, tetrode,
                                                      features)

                oneTT.load_waveforms()
                clusterFile = os.path.join(oneTT.clustersDir,
                                           'Tetrode%d.clu.1' % oneTT.tetrode)
                if os.path.isfile(clusterFile):
                    oneTT.dataTT.clusters = np.fromfile(clusterFile,
                                                        dtype='int32',
                                                        sep=' ')[1:]
                else:
                    oneTT.create_fet_files()
                    oneTT.run_clustering()
                    oneTT.save_report()

                    #NOTE:Could create a mini dataframe with the clustering results at this point
예제 #4
0
'''
Fixing issues with the cluster report.
'''

from jaratoolbox import spikesorting
reload(spikesorting)

CASE = 1

if CASE == 1:
    animalName = 'test030'
    ephysSession = '2014-06-25_18-33-30_TT6goodGND'
    tetrode = 6

oneTT = spikesorting.TetrodeToCluster(animalName, ephysSession, tetrode)
'''
oneTT.create_fet_files()
oneTT.run_clustering()
'''
'''
self.dataTT = loadopenephys.DataSpikes(self.tetrodeFile) #,readWaves=True)
self.nSpikes = self.dataTT.nRecords# FIXME: this is specific to the OpenEphys format
self.dataTT.samples = self.dataTT.samples.astype(float)-2**15# FIXME: this is specific to OpenEphys
self.dataTT.timestamps = self.dataTT.timestamps/self.dataTT.samplingRate
'''

oneTT.save_report()
'''
    if timeZero is None:
        timeZero = timeStamps[0]
'''
예제 #5
0
            print 'here'
            oneTT.create_fet_files()
            oneTT.run_clustering()
            oneTT.save_report()
            ISIviolationsDict[ephysSession][tetrode-1]= oneTT.get_ISI_values() #you can only get the ISI values after running save_report()
    except:
        print "error with session "+ephysSession
        if (ephysSession not in badSessionList):
            badSessionList.append(ephysSession)
'''
for indEphys, ephysSession in enumerate(ephysSessionArray):
    try:
        for tetrode in range(1, (numTetrodes + 1)):
            oneTT = spikesorting.TetrodeToCluster(
                animalName,
                ephysSession,
                tetrode,
                features=['peak', 'valleyFirstHalf'])
            #oneTT.create_fet_files()
            #oneTT.run_clustering()
            oneTT.save_report('cluster_report_tuning')
    except:
        badSessions.append(ephysSession)

for session in badSessions:
    print session
    #oneTT.save_report()
    #ISIviolationsDict[ephysSession][tetrode-1]= oneTT.get_ISI_values() #you can only get the ISI values after running save_report()
'''
finalOutputDir = outputDir+'/'+animalName+'_processed'
ISIDict = {}
예제 #6
0
 def test_clustering_single_session(self):
     oneTT = spikesorting.TetrodeToCluster('test', 'testdata', 2)
     oneTT.load_waveforms()
     oneTT.create_fet_files()
     oneTT.run_clustering()
     oneTT.save_report()
예제 #7
0
#subject = 'gosi004'
#ephysSession = '2017-02-11_15-46-30'
#ephysSession = '2017-02-11_15-54-13'

subject = 'd1pi015'
ephysSession = '2016-08-07_16-48-07'

srate = 30000.0
'''
inforecFilename = '/data/inforec/gosi004_inforec.py'
inforec = imp.load_source('module.name', inforecFilename)
inforec.experiments[5].sites[0].sessions[0].ephys_dir
'''

ttdata = spikesorting.TetrodeToCluster(subject, ephysSession, tetrode)
ttdata.load_waveforms()
ttdata.set_clusters_from_file()
'''
report = spikesorting.ClusterReportFromData(ttdata.timestamps,
                                            ttdata.samples,
                                            ttdata.clusters,
                                            outputDir=None,
                                            figtitle='title',
                                            showfig=False)
'''

plt.clf()
for cluster in range(1, 7):
    #cluster = 2
    indsThisCluster = (ttdata.clusters == cluster)