示例#1
0
def save_locations(file_path, meta, port, event, topic='locations'):
    socket = subscribe(port, topic)
    with h5py.File(file_path, 'a') as f:
        now = str(datetime.now())
        g = f.create_group(now)
        g.create_dataset('metadata', data=meta.encode('ascii', 'ignore'))
        f.flush()
        i = 0
        last_x = 0
        while not event.is_set():
            topic = socket.recv_string()
            data = socket.recv_pyobj()
            data = data.values
            data = data
            x, y = data.shape[0], data.shape[
                1]  # The first values is the number of rows, h5py uses the opposite notation
            if i == 0:
                dset = g.create_dataset('locations', (x, y + 1),
                                        maxshape=(None, y + 1))
            else:
                dset.resize((x + last_x, y + 1))
            dset[last_x:last_x + x, 0] = i
            dset[last_x:last_x + x, 1:] = data
            last_x += x
            i += 1
            f.flush()
示例#2
0
def calculate_locations(port, topic, event, publisher_queue, **kwargs):
    socket = subscribe(port, topic)
    if 'diameter' not in kwargs:
        raise DiameterNotDefined('A diameter is mandatory for locating particles')

    while not event.is_set():
        socket.recv_string()
        data = socket.recv_pyobj()  # flags=0, copy=True, track=False)
        image = data[1]  # image[0] is the timestamp of the frame
        locations = tp.locate(image, **kwargs)
        publisher_queue.put({'topic': 'locations', 'data': locations})
示例#3
0
    def accumulate_links(self):
        self._accumulate_links_event.clear()
        socket = subscribe(self.publisher.port, 'particle_links')
        while not self._accumulate_links_event.is_set():
            if general_stop_event.is_set():
                break

            topic = socket.recv_string()
            data = socket.recv_pyobj()
            if self.locations.shape[0] == 0:
                self.locations = data[0]
            else:
                self.locations = self.locations.append(data[0])
示例#4
0
def link_locations(port, topic, event, publisher_queue, **kwargs):

    if 'search_range' not in kwargs:
        raise LinkException('Search Range must be specified')

    socket = subscribe(port, topic)
    t = 0  # First frame
    linker = Linker(**kwargs)
    while not event.is_set():
        topic = socket.recv_string()
        locations = socket.recv_pyobj()
        coords = np.vstack((locations['x'], locations['y'])).T
        if t == 0:
            linker.init_level(coords, t)
        else:
            linker.next_level(coords, t)
        t += 1
        locations['particle'] = linker.particle_ids
        locations['frame'] = t
        publisher_queue.put({'topic': 'particle_links', 'data': [locations, linker.particle_ids]})
示例#5
0
    def accumulate_links(self):
        """ Asynchronous method to store the links in this class. It looked like a good idea to keep this information in
        a single location, regardless of whether another process is listening on the topic. This in principle can be
        used to analyse data retrospectively.

        .. todo:: Still needs to clear the memory after several calls. Need to fit better in the architecture of the
            program
        """
        self._accumulate_links_event.clear()
        socket = subscribe(self.publisher.port, 'particle_links')
        while not self._accumulate_links_event.is_set():
            if general_stop_event.is_set():
                break

            topic = socket.recv_string()
            data = socket.recv_pyobj()
            if self.locations.shape[0] == 0:
                self.locations = data[0]
            else:
                self.locations = self.locations.append(data[0])
 def run(self):
     socket = subscribe(self.port, self.topic)
     while self.keep_receiving:
         socket.recv_string()
         data = socket.recv_pyobj()  # flags=0, copy=True, track=False)
         self.data_received.emit(data)