コード例 #1
0
def _configure(env_work_dir=None):
    """Load configuration settings.

    :returns config dict if configuration was loaded without issues.
            None or a specific exception otherwise.
    """
    assert env_work_dir, 'Working directory required.'
    assert os.path.exists(env_work_dir), \
        'working directory invalid: {}'.format(env_work_dir)
    print("Configuring server...")
    secrets_file = os.path.join(env_work_dir, SECRETS_FILE)
    config_file = os.path.join(env_work_dir, CONFIG_FILE)
    try:
        if os.path.isfile(secrets_file):
            with open(secrets_file) as sf:
                secrets_config = sf.read()
        else:
            secrets_config = ""
            log.warning('Secrets file not found. '
                        'Proceeding without it: %s',
                        secrets_file)
        with open(config_file) as cf:
            base_config = cf.read()
            all_config = secrets_config + "\n" + base_config
        config = yaml.safe_load(all_config)
        # configure logging
        logging_config = None
        if config:
            logging_config = config.get('logging', None)
        _configure_logging(logging_config)
        return config
    except Exception as e:
        log.error("Failed to load configuration: %s", str(e))
        stacktrace()
        return None
コード例 #2
0
 def _run_gst_service(self):
     log.debug("Starting Gst service process...")
     self._gst_out_queue = self._get_sample_queue()
     self._gst_process_stop_signal = multiprocessing.Event()
     self._gst_process_eos_reached = multiprocessing.Event()
     gst_service = self._get_gst_service_starter()
     self._gst_process = multiprocessing.Process(
         target=gst_service,
         name='Gstreamer Service Process',
         daemon=True,
         kwargs={
             'source_conf': self._source_conf,
             'out_queue': self._gst_out_queue,
             'stop_signal': self._gst_process_stop_signal,
             'eos_reached': self._gst_process_eos_reached,
         })
     self._gst_process.daemon = True
     self._gst_process.start()
     gst_proc = self._gst_process
     while not self._stop_requested and gst_proc.is_alive():
         # do not use process.join() to avoid deadlock due to shared queue
         try:
             next_sample = self._gst_out_queue.get(timeout=1)
             # print('next sample received from gst queue, _on_new_sample')
             self._on_new_sample(sample=next_sample)
         except queue.Empty:
             log.debug('no new sample available yet in gst out queue')
         except Exception as e:
             log.warning('AVElement loop caught an error: %s. ', str(e))
             stacktrace(logging.WARNING)
             # print('Exception caught from _on_new_sample %r' % e)
     # print('end of _run_gst_service.')
     log.debug('exiting _run_gst_service')
コード例 #3
0
ファイル: interpreter.py プロジェクト: githwd/ambianic-edge
 def run(self):
     log.debug("invoking healing target method %r", self._target)
     try:
         self._target()
     except Exception as e:
         log.warning("Error %r while running healing method %r.", e, self._target)
         log.warning(stacktrace())
     log.debug("invoking healing on_finished method %r", self._on_finished)
     try:
         self._on_finished()
     except Exception as e:
         log.warning(
             "Error %r while calling on_finished method %r.", e, self._on_finished
         )
         log.warning(stacktrace())
コード例 #4
0
def _get_edgetpu_interpreter(model=None):  # pragma: no cover
    # Note: Looking for ideas how to test Coral EdgeTPU dependent code
    # in a cloud CI environment such as Travis CI and Github
    tf_interpreter = None
    if model:
        try:
            edgetpu_delegate = load_delegate('libedgetpu.so.1.0')
            assert edgetpu_delegate
            tf_interpreter = Interpreter(
                model_path=model, experimental_delegates=[edgetpu_delegate])
            log.debug('EdgeTPU available. Will use EdgeTPU model.')
        except Exception as e:
            log.debug('EdgeTPU error: %r', e)
            stacktrace(logging.DEBUG)
    return tf_interpreter
コード例 #5
0
    def process_sample(self, **sample):
        """Detect objects in sample image."""
        log.debug("%s received new sample", self.__class__.__name__)
        if not sample:
            # pass through empty samples to next element
            yield None
        else:
            try:
                image = sample["image"]
                thumbnail, tensor_image, inference_result = self.detect(
                    image=image)

                inference_result = self.convert_inference_result(
                    inference_result)
                log.debug("Object detection inference_result: %r",
                          inference_result)
                inf_meta = {
                    "display": "Object Detection",
                }
                # pass on the results to the next connected pipe element
                processed_sample = {
                    "image": image,
                    "thumbnail": thumbnail,
                    "inference_result": inference_result,
                    "inference_meta": inf_meta,
                }
                yield processed_sample
            except Exception as e:
                log.error(
                    'Error "%s" while processing sample. '
                    "Dropping sample: %s",
                    str(e),
                    str(sample),
                )
                log.warning(stacktrace())
コード例 #6
0
    def process_sample(self, **sample):
        """Detect objects in sample image."""
        log.debug("%s received new sample", self.__class__.__name__)
        if not sample:
            # pass through empty samples to next element
            yield None
        else:
            try:
                image = sample['image']
                thumbnail, tensor_image, inference_result = \
                    self.detect(image=image)

                inference_result = self.convert_inference_result(
                    inference_result)
                log.debug('Object detection inference_result: %r',
                          inference_result)
                inf_meta = {
                    'display': 'Object Detection',
                }
                # pass on the results to the next connected pipe element
                processed_sample = {
                    'image': image,
                    'thumbnail': thumbnail,
                    'inference_result': inference_result,
                    'inference_meta': inf_meta
                }
                yield processed_sample
            except Exception as e:
                log.error(
                    'Error "%s" while processing sample. '
                    'Dropping sample: %s', str(e), str(sample))
                log.warning(stacktrace())
コード例 #7
0
 def process_sample(self, **sample):
     log.debug("Pipe element %s received new sample with keys %s.",
               self.__class__.__name__, str([*sample]))
     if not sample:
         # pass through empty samples to next element
         yield None
     else:
         try:
             image = sample['image']
             prev_inference_result = sample.get('inference_result', None)
             log.debug("Received sample with inference_result: %s",
                       str(prev_inference_result))
             person_regions = []
             if not prev_inference_result:
                 yield None
             else:
                 # - crop out top-k person detections
                 # - apply face detection to cropped person areas
                 # - pass face detections on to next pipe element
                 for category, confidence, box in prev_inference_result:
                     if category == 'person' and \
                       confidence >= self._tfengine.confidence_threshold:
                         person_regions.append(box)
                 # get only topk person detecions
                 person_regions = person_regions[:self.topk]
                 log.debug('Received %d person boxes for face detection',
                           len(person_regions))
                 for box in person_regions:
                     person_image = self.crop_image(image, box)
                     inference_result = self.detect(image=person_image)
                     log.warning('Face detection inference_result: %r',
                                 inference_result)
                     processed_sample = {
                         'image': person_image,
                         'inference_result': inference_result
                     }
                     yield processed_sample
         except Exception as e:
             log.warning(
                 'Error %r while processing sample. '
                 'Dropping sample: %r', e, sample)
             stacktrace(logging.WARNING)
コード例 #8
0
 def process_sample(self, **sample):
     log.debug("%s received new sample", self.__class__.__name__)
     if not sample:
         # pass through empty samples to next element
         yield None
     else:
         try:
             image = sample['image']
             inference_result = self.detect(image=image)
             # pass on the results to the next connected pipe element
             processed_sample = {
                 'image': image,
                 'inference_result': inference_result
             }
             yield processed_sample
         except Exception as e:
             log.error(
                 'Error "%s" while processing sample. '
                 'Dropping sample: %s', str(e), str(sample))
             stacktrace(logging.WARNING)
コード例 #9
0
 def run(self):
     """Run the gstreamer pipeline service."""
     log.info("Starting %s", self.__class__.__name__)
     self._register_sys_signal_handler()
     self._register_stop_handler()
     try:
         self._gst_loop()
     except Exception as e:
         log.warning('GST loop exited with error: %s. ', str(e))
         log.warning(stacktrace())
     finally:
         log.debug('Gst service cleaning up before exit...')
         self._gst_cleanup()
         # self._out_queue.close()
         log.debug("Gst service cleaned up and ready to exit.")
     log.info("Stopped %s", self.__class__.__name__)