Ejemplo n.º 1
0
 def enqueue_blobs_thread(self, gpu_id, blob_names):
     """Transfer mini-batches from a mini-batch queue to a BlobsQueue."""
     batch_size=0
     with self.coordinator.stop_on_exception():
         while not self.coordinator.should_stop():
             if self._minibatch_queue.qsize == 0:
                 logger.warning('Mini-batch queue is empty')
             flag=True
             if self._roidb_tmp is not None and batch_size%2==1:
                 flag=False
             if flag:
                 blobs = coordinated_get(self.coordinator, self._minibatch_queue)
             else:
                 blobs = coordinated_get(self.coordinator,self._unlabel_minibatch_queue)
             self.enqueue_blobs(gpu_id, blob_names, blobs.values())
             logger.debug(
                 'batch queue size {}'.format(self._minibatch_queue.qsize())
             )
             batch_size+=1
         logger.info('Stopping enqueue thread')
Ejemplo n.º 2
0
 def enqueue_blobs_thread(self, gpu_id, blob_names):
     """Transfer mini-batches from a mini-batch queue to a BlobsQueue."""
     with self.coordinator.stop_on_exception():
         while not self.coordinator.should_stop():
             if self._minibatch_queue.qsize == 0:
                 logger.warning('Mini-batch queue is empty')
             blobs = coordinated_get(self.coordinator, self._minibatch_queue)
             self.enqueue_blobs(gpu_id, blob_names, blobs.values())
             logger.info(
                 'batch queue size {}'.format(self._minibatch_queue.qsize())
             )
         logger.info('Stopping enqueue thread')
Ejemplo n.º 3
0
 def enqueue_blobs_thread(self, gpu_id, blob_names):
     """Transfer mini-batches from a mini-batch queue to a BlobsQueue."""
     with self.coordinator.stop_on_exception():
         while not self.coordinator.should_stop():
             if self._minibatch_queue.qsize == 0:
                 logger.warning('Mini-batch queue is empty')
             blobs = coordinated_get(self.coordinator, self._minibatch_queue)
             self.enqueue_blobs(gpu_id, blob_names, blobs.values())
             logger.debug(
                 'batch queue size {}'.format(self._minibatch_queue.qsize())
             )
         logger.info('Stopping enqueue thread')
Ejemplo n.º 4
0
    def enqueue_blobs_thread_mp(self, gpu_id, blob_names):
        """Transfer mini-batches from a mini-batch queue to a BlobsQueue."""
        with self.coordinator.stop_on_exception():
            while not self.coordinator.should_stop():
                if self._minibatch_queue_mp.qsize() == 0:
                    logger.warning('minibatch_queue_mp -batch queue is empty')
                blobs = coordinated_get(self.coordinator, self._minibatch_queue_mp)

                if cfg.LOG_IMAGES and np.random.random() <= 0.01:
                    self.save_im_masks(blobs)

                self.enqueue_blobs(gpu_id, blob_names, blobs.values())
                logger.debug(
                    'batch queue size {}'.format(self._minibatch_queue_mp.qsize())
                )
            logger.info('Stopping enqueue thread')