Ejemplo n.º 1
0
 def _run(self):
     for plate_id in self._imagecontainer.plates:
         analyzer = AnalyzerCore(plate_id, self._settings,
                                 copy.deepcopy(self._imagecontainer))
         h5_links = analyzer.processPositions(self)
         if h5_links:
             link_hdf5_files(h5_links)
Ejemplo n.º 2
0
    def finish(self):
        self.log_receiver.shutdown()
        self.log_receiver.server_close()
        self.log_receiver_thread.join()

        if len(self.post_hdf5_link_list) > 0:
            post_hdf5_link_list = reduce(lambda x,y: x + y,
                                         self.post_hdf5_link_list)
            link_hdf5_files(sorted(post_hdf5_link_list))
Ejemplo n.º 3
0
    def finish(self):
        self.log_receiver.shutdown()
        self.log_receiver.server_close()
        self.log_receiver_thread.join()

        if len(self.post_hdf5_link_list) > 0:
            post_hdf5_link_list = reduce(lambda x, y: x + y,
                                         self.post_hdf5_link_list)
            link_hdf5_files(sorted(post_hdf5_link_list))
Ejemplo n.º 4
0
        elif len(compound) == 1:
            if not multiple_plates:
                plate_id = os.path.split(path_input)[1]
                pos = compound[0]
            else:
                parser.error("Position must be of the form 'plateid___position'. Found '%s' instead." % item)
        else:
            parser.error("Position must be of the form 'position' or 'plateid___position'. Found '%s' instead." % item)

        if not plate_id in plates:
            plates[plate_id] = []
        plates[plate_id].append(pos)

    # start one analyzer per plate with the corresponding positions
    post_hdf5_link_list = []
    for plate_id in plates:
        # redefine the positions
        settings.set(SECTION_NAME_GENERAL, 'constrain_positions', True)
        settings.set(SECTION_NAME_GENERAL, 'positions', ','.join(plates[plate_id]))
        logger.info("Launching analyzer for plate '%s' with positions %s" % (plate_id, plates[plate_id]))
        # initialize and run the analyzer
        analyzer = AnalyzerCore(plate_id, settings, imagecontainer)
        hdf_links = analyzer.processPositions()
        post_hdf5_link_list.append(hdf_links)

    if settings.get('Output', 'hdf5_create_file') and settings.get('Output', 'hdf5_merge_positions'):
        if len(post_hdf5_link_list) > 0:
            post_hdf5_link_list = reduce(lambda x,y: x + y, post_hdf5_link_list)
            link_hdf5_files(sorted(post_hdf5_link_list))
    print 'BATCHPROCESSING DONE!'
Ejemplo n.º 5
0
                        try:
                            plate, pos, hdf_files = r.get()
                            if len(hdf_files) > 0:
                                hdf5_link_list.append(hdf_files)
                        except Exception, e:
                            exceptions.append(e)

            if len(exceptions) > 0:
                sep = 79*'-'+'\n'
                msg = sep.join([traceback.format_exc(e) for e in exceptions])
                raise MultiProcessingError(msg)
        finally:
            self.close_logreceiver()
            if len(hdf5_link_list) > 0:
                hdf5_link_list = reduce(lambda x, y: x + y, hdf5_link_list)
            link_hdf5_files(sorted(hdf5_link_list))

    def abort(self, wait=False):
        self._mutex.lock()
        try:
            self._abort = True
        finally:
            self._mutex.unlock()
        # timing is essential, flag must be set before terminate is called
        self.pool.terminate()
        if wait:
            self.wait()
        self.aborted.emit()

    def _run(self):
        self._abort = False
Ejemplo n.º 6
0
        elif len(compound) == 1:
            if not multiple_plates:
                plate_id = os.path.split(path_input)[1]
                pos = compound[0]
            else:
                parser.error("Position must be of the form 'plateid___position'. Found '%s' instead." % item)
        else:
            parser.error("Position must be of the form 'position' or 'plateid___position'. Found '%s' instead." % item)

        if not plate_id in plates:
            plates[plate_id] = []
        plates[plate_id].append(pos)

    # start one analyzer per plate with the corresponding positions
    post_hdf5_link_list = []
    for plate_id in plates:
        # redefine the positions
        settings.set(SECTION_NAME_GENERAL, 'constrain_positions', True)
        settings.set(SECTION_NAME_GENERAL, 'positions', ','.join(plates[plate_id]))
        logger.info("Launching analyzer for plate '%s' with positions %s" % (plate_id, plates[plate_id]))
        # initialize and run the analyzer
        analyzer = AnalyzerCore(plate_id, settings, imagecontainer)
        hdf_links = analyzer.processPositions()
        post_hdf5_link_list.append(hdf_links)

    if settings.get('Output', 'hdf5_create_file') and settings.get('Output', 'hdf5_merge_positions'):
        if len(post_hdf5_link_list) > 0:
            post_hdf5_link_list = reduce(lambda x,y: x + y, post_hdf5_link_list)
            link_hdf5_files(sorted(post_hdf5_link_list))
    print 'BATCHPROCESSING DONE!'
Ejemplo n.º 7
0
                        try:
                            plate, pos, hdf_files = r.get()
                            if len(hdf_files) > 0:
                                hdf5_link_list.append(hdf_files)
                        except Exception, e:
                            exceptions.append(e)

            if len(exceptions) > 0:
                sep = 79 * '-' + '\n'
                msg = sep.join([traceback.format_exc(e) for e in exceptions])
                raise MultiProcessingError(msg)
        finally:
            self.close_logreceiver()
            if len(hdf5_link_list) > 0:
                hdf5_link_list = reduce(lambda x, y: x + y, hdf5_link_list)
            link_hdf5_files(sorted(hdf5_link_list))

    def abort(self, wait=False):
        self._mutex.lock()
        try:
            self._abort = True
        finally:
            self._mutex.unlock()
        # timing is essential, flag must be set before terminate is called
        self.pool.terminate()
        if wait:
            self.wait()
        self.aborted.emit()

    def _run(self):
        self._abort = False