示例#1
0
 def __init__(self, parent=None):
     super(ThreadedTasks, self).__init__(parent)
     self.selection_task = inthread(SelectionTask)(impatient=True)
     self.correlograms_task = inprocess(CorrelogramsTask)(
         impatient=True, use_master_thread=False)
     # HACK: the similarity matrix view does not appear to update on
     # some versions of Mac+Qt, but it seems to work with inthread
     if sys.platform == 'darwin':
         self.similarity_matrix_task = inthread(SimilarityMatrixTask)(
             impatient=True)
     else:
         self.similarity_matrix_task = inprocess(SimilarityMatrixTask)(
             impatient=True, use_master_thread=False)
示例#2
0
 def __init__(self, parent=None):
     super(ThreadedTasks, self).__init__(parent)
     self.selection_task = inthread(SelectionTask)(
         impatient=True)
     self.correlograms_task = inprocess(CorrelogramsTask)(
         impatient=True, use_master_thread=False)
     # HACK: the similarity matrix view does not appear to update on
     # some versions of Mac+Qt, but it seems to work with inthread
     if sys.platform == 'darwin':
         self.similarity_matrix_task = inthread(SimilarityMatrixTask)(
             impatient=True)
     else:
         self.similarity_matrix_task = inprocess(SimilarityMatrixTask)(
             impatient=True, use_master_thread=False)
示例#3
0
文件: gallery.py 项目: suujon/galry
 def __init__(self, folder=None):
     # get list of images in the folder
     if len(sys.argv) > 1:
         folder = sys.argv[1]
     else:
         folder = '.'
     self.folder = folder
     self.files = sorted(filter(lambda f: f.lower().endswith('.jpg'), os.listdir(folder)))
     self.cache = {}
     self.n = len(self.files)
     # Number of images to keep forward/backward in cache.
     self.steps = 2
     self.loader = inprocess(Loader)()
     self.nav = Navigator(self.n, self.steps)
     self.set_index(0)
示例#4
0
def test_tasksinprocess_state():
    tasks = qtools.inprocess(TestTasks)(7)
    tasks.operation(3, 4, coeff=2)
    x = tasks.kwarg0
    assert x == 7
    tasks.join()
示例#5
0
def test_tasksinprocess_constructor():
    tasks = qtools.inprocess(TestTasks)(7)
    tasks.operation(3, 4, coeff=2)
    tasks.join()
示例#6
0
文件: dataio.py 项目: rossant/spiky
    def load(self, filename, fileindex=1, probefile=None):#, progressbar=None):
        
        # load XML
        self.holder = DataHolder()
        
        try:
            path = get_actual_filename(filename, 'xml', None)
            params = parse_xml(path, fileindex=fileindex)
        except Exception as e:
            raise Exception(("The XML file was not found and the data cannot "
                "be loaded."))
        
        
        # klusters tests
        nchannels = params['nchannels']
        nsamples = params['nsamples']
        fetdim = params['fetdim']
        freq = params['rate']
        
        self.filename = filename
        self.fileindex = fileindex
        
        # if filename.endswith('_spiky'):
            # filename = filename.replace('_spiky', '')
            # spiky = True
        # else:
            # spiky = False
                
        
        # FEATURES
        # -------------------------------------------------
        # features = load_text_fast(filename + ".fet.%d" % fileindex, np.int32, skiprows=1)
        path = get_actual_filename(filename, 'fet', fileindex)
        features, headers = load_text_pandas(path, np.int32, skiprows=1, returnheaders=True)
        features = np.array(features, dtype=np.float32)
        
        # Find out the number of extra features.
        nfet = int(headers[0])
        # HACK: sometimes, problem with penultimate column due to double white space
        if features.shape[1] != nfet:
            features = np.hstack((features[:,:-2], features[:,-1].reshape((-1, 1))))
        nextrafet = nfet - fetdim * nchannels
        
        # HACK: there are either 1 or 5 dimensions more than fetdim*nchannels
        # we can't be sure so we first try 1, if it does not work we try 5
        # try:
        features = features.reshape((-1, nfet))
        # except:
            # raise Exception(("The FET file was not found and the data cannot "
                # "be loaded."))
            # log_debug("The number of columns is not fetdim (%d) x nchannels (%d) + 1." \
                # % (fetdim, nchannels))
            # try:
                # features = features.reshape((-1, fetdim * nchannels + 5))
                
            # except:
                # log_debug("The number of columns is not fetdim (%d) x nchannels (%d) + 5, so I'm confused and I can't continue. Sorry :(" \
                    # % (fetdim, nchannels))
                
        nspikes = features.shape[0]


        # CLUSTERS
        # -------------------------------------------------
        try:
            # if spiky:
                # path = filename + "_spiky.clu.%d" % fileindex
            # else:
                # path = filename + ".clu.%d" % fileindex
            path = get_actual_filename(filename, 'clu', fileindex)
            # clusters = load_text(path, np.int32)
            clusters = load_text_pandas(path, np.int32)
        except Exception as e:
            log_warn("CLU file '%s' not found" % filename)
            clusters = np.zeros(nspikes + 1, dtype=np.int32)
            clusters[0] = 1
        # nclusters = clusters[0]
        clusters = clusters[1:]
        # if progressbar:
            # progressbar.setValue(1)
        ssignals.emit(self, 'FileLoading', .2)
            
        
        # get the spiketimes
        spiketimes = features[:,-1].copy()
        # remove the last column in features, containing the spiketimes
        # features = features[:,:nchannels * fetdim]
        nextrafet = features.shape[1] - nchannels * fetdim
        
        # normalize normal features
        m = features[:,:-nextrafet].min()
        M = features[:,:-nextrafet].max()
        # force symmetry
        vx = max(np.abs(m), np.abs(M))
        m, M = -vx, vx
        features[:,:-nextrafet] = -1+2*(features[:,:-nextrafet]-m)/(M-m)
        
        
        # normalize extra features
        m = features[:,-nextrafet:].min()
        M = features[:,-nextrafet:].max()
        # # force symmetry
        # vx = max(np.abs(m), np.abs(M))
        # m, M = -vx, vx
        features[:,-nextrafet:] = -1+2*(features[:,-nextrafet:]-m)/(M-m)
        
        # if progressbar:
            # progressbar.setValue(2)
        ssignals.emit(self, 'FileLoading', .4)
            
            
        
        # MASKS
        # -------------------------------------------------
        # first: try fmask
        try:
            # masks = load_text(filename + ".fmask.%d" % fileindex, np.float32, skiprows=1)
            path = get_actual_filename(filename, 'fmask', fileindex)
            masks = load_text_pandas(path, np.float32, skiprows=1)
            self.holder.masks_complete = masks
            masks = masks[:,:-1:fetdim]
            # masks = masks[::fetdim]
        except Exception as e:
            try:
                # otherwise, try mask
                # masks = load_text(filename + ".mask.%d" % fileindex, np.float32, skiprows=1)
                path = get_actual_filename(filename, 'mask', fileindex)
                masks = load_text_pandas(path, np.float32, skiprows=1)
                # masks = masks[:,:-1:fetdim]
                self.holder.masks_complete = masks
                masks = masks[:,:-1:fetdim]
                # masks = masks[::fetdim]
            except:
                # finally, warning and default masks (everything to 1)
                log_warn("MASK file '%s' not found" % filename)
                masks = np.ones((nspikes, nchannels))
                self.holder.masks_complete = np.ones(features.shape)
        
        # if progressbar:
            # progressbar.setValue(3)
        ssignals.emit(self, 'FileLoading', .6)
        
        
        
        # WAVEFORMS
        # -------------------------------------------------
        try:
            path = get_actual_filename(filename, 'spk', fileindex)
            waveforms = load_binary(path)
            # print waveforms.shape
            # print (nspikes, nsamples, nchannels)
            # DEBUG
            # nchannels = 32
            
            # print waveforms.shape
            # print nspikes * nsamples * nchannels
            
            waveforms = waveforms.reshape((nspikes, nsamples, nchannels))
        except IOError as e:
            log_warn("SPK file '%s' not found" % filename)
            waveforms = np.zeros((nspikes, nsamples, nchannels))
        
        # if progressbar:
            # progressbar.setValue(4)
        ssignals.emit(self, 'FileLoading', .8)
            
            
            
        
        self.holder.freq = freq
        
        
        self.holder.nspikes = nspikes
        self.holder.nchannels = nchannels
        self.holder.nextrafet = nextrafet
        
        # construct spike times from random interspike interval
        self.holder.spiketimes = spiketimes
        
        self.holder.duration = spiketimes[-1] / float(self.holder.freq)
    
        # normalize waveforms at once
        waveforms = (waveforms - waveforms.mean())
        waveforms = waveforms / np.abs(waveforms).max()
        
        self.holder.waveforms = waveforms
        self.holder.waveforms_info = dict(nsamples=nsamples)
        
        self.holder.fetdim = fetdim
        self.holder.features = features
        
        self.holder.masks = masks
        
        self.holder.clusters = clusters
        
        # create the groups info object
        # Default groups
        
        # GROUPS
        # --------------------------------------
        try:
            path = get_actual_filename(filename, 'groups', fileindex)
        
            info = load_pickle(path)
            clusters_info = info['clusters_info']
            groups_info = info['groups_info']
            
        except:
        
            groups_info = {
                0: dict(groupidx=0, name='Noise', color=0, spkcount=0),
                1: dict(groupidx=1, name='Multi-unit', color=1, spkcount=0),
                2: dict(groupidx=2, name='Good', color=2, spkcount=nspikes),
            }
            clusters_info = get_clusters_info(clusters, groupidx=2)
            
            
        nclusters = len(clusters_info)
        self.holder.nclusters = nclusters
        
        self.holder.clusters_info = dict(
            clusters_info=clusters_info,
            groups_info=groups_info,
            )

            
            
            
        # c = Counter(clusters)
        # self.holder.clusters_counter = c
        
            
        probe = None
        try:
            if probefile:
                probe = np.loadtxt(probefile)
        except Exception as e:
            print(str(e))
        self.holder.probe = dict(positions=probe)
        
        # cross correlograms
        nsamples_correlograms = 20
        self.holder.correlograms_info = dict(nsamples=nsamples_correlograms)
        
        # self.holder.correlation_matrix = rdn.rand(nclusters, nclusters)
        # self.holder.correlation_matrix = np.array([[]])
        # features = 
        # self.holder.correlation_matrix = correlation_matrix(features, clusters)
        # self.holder.correlation_matrix_queue = CorrelationMatrixQueue(self.holder)
        # self.holder.correlation_matrix_queue.process()
        
        
        # TASKS.add('correlation_matrix_queue', CorrelationMatrixQueue, self.holder)
        tasks.TASKS.correlation_matrix_queue = inprocess(tasks.CorrelationMatrixQueue)(self.holder)
        tasks.TASKS.correlation_matrix_queue.process()
        
        return self.holder
示例#7
0
def test_tasksinprocess_state():
    tasks = qtools.inprocess(TestTasks)(7)
    tasks.operation(3, 4, coeff=2)
    x = tasks.kwarg0
    assert x == 7
    tasks.join()
示例#8
0
def test_tasksinprocess_constructor():
    tasks = qtools.inprocess(TestTasks)(7)
    tasks.operation(3, 4, coeff=2)
    tasks.join()