Ejemplo n.º 1
0
def classify_from_raw_data(JobType,
                           DatFileName,
                           ProbeFileName,
                           max_spikes=None,
                           output_dir=None,
                           clu_dir=None):
    """Top level function that starts a data processing job. JobType is "batch" or "generalize". """
    print_params()

    if not os.path.exists(DatFileName):
        raise Exception("Dat file %s does not exist" % DatFileName)
    DatFileName = os.path.abspath(DatFileName)

    n_ch_dat, sample_rate = get_dat_pars(DatFileName)
    set_globals_samples(sample_rate)

    DatDir = os.path.abspath(os.path.dirname(DatFileName))
    probe_stuff.load_probe(ProbeFileName)
    global N_CH
    N_CH = probe_stuff.N_SITES

    basename = processed_basename(DatFileName, ProbeFileName, JobType)
    OutDir = join(output_dir, basename) if output_dir else join(
        DatDir, basename)
    if clu_dir is not None: clu_dir = os.path.abspath(clu_dir)
    with indir(OutDir):
        Channels_dat = [site.dat for site in probe_stuff.PROBE_SITES]
        if JobType == "batch":
            cluster_from_raw_data(basename, DatFileName, n_ch_dat,
                                  Channels_dat, probe_stuff.PROBE_GRAPH,
                                  max_spikes)
Ejemplo n.º 2
0
def classify_from_raw_data(clusterdir,JobType,DatFileName,ProbeFileName,max_spikes=None,output_dir=None,clu_dir=None):
    """Top level function that starts a data processing job. JobType is "batch" or "generalize". """
    print_params()

    if not os.path.exists(DatFileName):
        raise Exception("Dat file %s does not exist"%DatFileName)
    DatFileName = os.path.abspath(DatFileName)
    
    n_ch_dat,sample_rate = get_dat_pars(DatFileName)
    set_globals_samples(sample_rate)
    
    DatDir = os.path.abspath(os.path.dirname(DatFileName))
    probe_stuff.load_probe(ProbeFileName)
    global N_CH; N_CH = probe_stuff.N_SITES
    
    basename = processed_basename(DatFileName,ProbeFileName,JobType)
    OutDir = join(output_dir,basename) if output_dir else join(DatDir,basename)
    if clu_dir is not None: clu_dir = os.path.abspath(clu_dir)
    with indir(OutDir):    
        Channels_dat = [site.dat for site in probe_stuff.PROBE_SITES]
        if JobType == "batch":
            cluster_from_raw_data(clusterdir,basename,DatFileName,n_ch_dat,Channels_dat,probe_stuff.PROBE_GRAPH,max_spikes)
        realOutDir = os.getcwd()
        #elif JobType == "generalize":
            #generalize_group_from_raw_data_splitprobe(basename,DatFileName,n_ch_dat,Channels_dat,probe_stuff.PROBE_GRAPH,max_spikes,clu_dir)                        
    return realOutDir
Ejemplo n.º 3
0
def extract_intra_spikes(DatFileName,IntraChannel,output_dir=None,ExtraChannels=None):
    """extracts spikes times from intracellular data"""
    
    THRESH_FRAC = .5
    
    DatFileName = os.path.abspath(DatFileName)
    DatDir = os.path.dirname(DatFileName)
    basename = intra_basename(DatFileName)
    
    OutDir = join(output_dir,basename) if output_dir else join(DatDir,basename)
    
    with indir(OutDir):
        SpkFileName = basename+'.spk.1'
        
        n_ch_dat,sample_rate = get_dat_pars(DatFileName)
        global N_CH
        N_CH = 1 if ExtraChannels is None else 1 + len(ExtraChannels)
        set_globals_samples(sample_rate)
    
        print("extracting intracellular spikes from %s"%DatFileName)
    
        n_samples = num_samples(DatFileName,n_ch_dat,n_bytes=np.nbytes[DTYPE])
        AllDataArr = np.memmap(DatFileName,dtype=np.int16,shape=(n_samples,n_ch_dat),mode='r')
    
        b,a = signal.butter(3,100./(SAMPLE_RATE/2),'high') #filter at 100 Hz
        IntraArr = AllDataArr[:,IntraChannel].copy()
        IntraArr = signal.filtfilt(b,a,IntraArr)
        Thresh = IntraArr.max()*THRESH_FRAC    
    
        Segs = contig_segs(np.flatnonzero(IntraArr > Thresh),padding=2)
        TmList = map(lambda Seg: Seg[IntraArr[Seg].argmax()],Segs)
        CluList = np.ones(len(TmList),dtype=np.int)
        FetList = np.zeros((len(TmList),1),dtype=np.int)
        SpkList = [get_padded(IntraArr,PeakSample-S_BEFORE,PeakSample+S_AFTER) for PeakSample in TmList]
        SpkArr = np.array(SpkList)[:,:,np.newaxis]
        
        
        if ExtraChannels is not None:
            ExtraArr = AllDataArr[:,ExtraChannels].copy()
            #b,a = signal.butter(BUTTER_ORDER,(F_LOW/(SAMPLE_RATE/2),.95),'pass')                
            ExtraArr = filtfilt2d(b,a,ExtraArr)
            ExtraSpkList = [get_padded(ExtraArr,PeakSample-S_BEFORE,PeakSample+S_AFTER) for PeakSample in TmList]        
            ExtraSpkArr = np.array(ExtraSpkList)
            SpkArr *= ExtraSpkArr[0].max()/SpkArr[0].max()                       
            SpkArr = np.concatenate((np.array(ExtraSpkList),SpkArr),axis=2)
    
        output.write_spk(np.array(SpkArr),SpkFileName)
        write_files(basename,CluList,TmList,FetList,[])
Ejemplo n.º 4
0
def extract_intra_spikes(DatFileName,
                         IntraChannel,
                         output_dir=None,
                         ExtraChannels=None):
    """extracts spikes times from intracellular data"""

    THRESH_FRAC = .5

    DatFileName = os.path.abspath(DatFileName)
    DatDir = os.path.dirname(DatFileName)
    basename = intra_basename(DatFileName)

    OutDir = join(output_dir, basename) if output_dir else join(
        DatDir, basename)

    with indir(OutDir):
        SpkFileName = basename + '.spk.1'

        n_ch_dat, sample_rate = get_dat_pars(DatFileName)
        global N_CH
        N_CH = 1 if ExtraChannels is None else 1 + len(ExtraChannels)
        set_globals_samples(sample_rate)

        print("extracting intracellular spikes from %s" % DatFileName)

        n_samples = num_samples(DatFileName,
                                n_ch_dat,
                                n_bytes=np.nbytes[DTYPE])
        AllDataArr = np.memmap(DatFileName,
                               dtype=np.int16,
                               shape=(n_samples, n_ch_dat),
                               mode='r')

        b, a = signal.butter(3, 100. / (SAMPLE_RATE / 2),
                             'high')  #filter at 100 Hz
        IntraArr = AllDataArr[:, IntraChannel].copy()
        IntraArr = signal.filtfilt(b, a, IntraArr)
        Thresh = IntraArr.max() * THRESH_FRAC

        Segs = contig_segs(np.flatnonzero(IntraArr > Thresh), padding=2)
        TmList = map(lambda Seg: Seg[IntraArr[Seg].argmax()], Segs)
        CluList = np.ones(len(TmList), dtype=np.int)
        FetList = np.zeros((len(TmList), 1), dtype=np.int)
        SpkList = [
            get_padded(IntraArr, PeakSample - S_BEFORE, PeakSample + S_AFTER)
            for PeakSample in TmList
        ]
        SpkArr = np.array(SpkList)[:, :, np.newaxis]

        if ExtraChannels is not None:
            ExtraArr = AllDataArr[:, ExtraChannels].copy()
            #b,a = signal.butter(BUTTER_ORDER,(F_LOW/(SAMPLE_RATE/2),.95),'pass')
            ExtraArr = filtfilt2d(b, a, ExtraArr)
            ExtraSpkList = [
                get_padded(ExtraArr, PeakSample - S_BEFORE,
                           PeakSample + S_AFTER) for PeakSample in TmList
            ]
            ExtraSpkArr = np.array(ExtraSpkList)
            SpkArr *= ExtraSpkArr[0].max() / SpkArr[0].max()
            SpkArr = np.concatenate((np.array(ExtraSpkList), SpkArr), axis=2)

        output.write_spk(np.array(SpkArr), SpkFileName)
        write_files(basename, CluList, TmList, FetList, [])