def _init_logging(self): self._stop_logging_event = threading.Event() self._log_queue = CountableQueue() self._logger = getLogger() self._logger.addHandler(QueueHandler(self._log_queue)) self._logger.setLevel(self._args.log_level) filename = path.join(self._args.log_path, '{}.log'.format(self.app_name)) self._log_handler = LogHandler(threading.Thread, "logger", self._stop_logging_event, self._log_queue, filename=filename, kwargs={'log_level': self._args.log_level}) self._log_handler.start()
def test_ffmpeg(self): """Runs two FFmpeg subprocesses: the first encodes raw 24-bit frames into raw MPEG-4 video stream, the second decodes that stream and feeds the reader again with raw 24-bit frames. The latter triggers detection of simple shapes on an image. The number of shapes detected is counted and signals to end the test. """ width = 480 height = 360 encoder_frame_buffer = FrameBuffer(10, width, height) decoder_frame_buffer = FrameBuffer(10, width, height) encoder_frame_queue = Queue(1) decoder_frame_queue = Queue(1) artist_subscribe_queue = Queue(1) decoder_subscribe_queue = Queue(1) log_queue = Queue() getLogger().addHandler(QueueHandler(log_queue)) stop_process_event = Event() latch = CountDownLatch(100) encoder = FFmpegEncoder("encoder", stop_process_event, log_queue, encoder_frame_queue, encoder_frame_buffer, ['ffmpeg', '-hide_banner', '-loglevel', 'panic', '-f', 'rawvideo', '-pix_fmt', 'rgb24', '-s', '{}x{}'.format(width, height), '-i', '-', '-an', '-f', 'm4v', '-'], None, PIPE) decoder = FFmpegDecoder("decoder", stop_process_event, log_queue, decoder_frame_queue, decoder_frame_buffer, ['ffmpeg', '-hide_banner', '-loglevel', 'panic', '-f', 'm4v', '-i', '-', '-f', 'rawvideo', '-pix_fmt', 'rgb24', '-'], None, PIPE) artist = Artist("artist", stop_process_event, log_queue, encoder_frame_queue, encoder_frame_buffer) conductor = WorkPublish(Thread, "conductor", stop_process_event, log_queue, artist_subscribe_queue, encoder_frame_buffer) processes = [LogHandler(Thread, "logger", stop_process_event, log_queue, filename=None), artist, conductor, encoder, decoder, Copy(Thread, "copier", stop_process_event, log_queue, encoder.stdout, decoder.stdin), ShapeDetector(Process, "detector", stop_process_event, log_queue, decoder_frame_queue, decoder_frame_buffer), ShapeCounter(Thread, "counter", stop_process_event, log_queue, decoder_subscribe_queue, decoder_frame_buffer, latch)] artist.subscribe(artist_subscribe_queue) decoder.subscribe(decoder_subscribe_queue) for process in processes: process.start() try: self.assertTrue(latch.wait(15)) finally: stop_process_event.set() for process in processes: process.join(30)
def test_watch_dog(self): """Runs the watchdog and a child thread that stumbles all the time, forcing the watchdog to restart it. Counts the number of restarts during timeout. """ log_queue = Queue() stop_process_event = Event() getLogger().addHandler(QueueHandler(log_queue)) latch = CountDownLatch(3) log_handler = LogHandler(Thread, "logger", stop_process_event, log_queue, filename=None) watch_dog = WatchDog("watchdog", stop_process_event, log_queue, 0.1, kwargs={'log_level': ERROR}) stumble = Stumble(Thread, "tumbler", stop_process_event, log_queue, 0.1, latch) log_handler.start() watch_dog.start() stumble.start() watch_dog.add_child(log_handler) watch_dog.add_child(stumble) self.assertTrue(latch.wait(5)) stop_process_event.set() watch_dog.remove_child(log_handler) watch_dog.remove_child(stumble) stumble.join(30) watch_dog.join(30) log_handler.join(30)
def test_shape_detection(self): """Tests TensorFlow object detection using the trained model of simple geometric shapes. The detector recognises shapes drawn on a frame image, the sieve filters those having confidence above 50%, lastly shapes are counted signalling to end the test. """ with EnvironmentVarGuard() as env: env.set("TF_CPP_MIN_LOG_LEVEL", "3") env.set("CORAL_VISIBLE_DEVICES", "") env.set("CUDA_VISIBLE_DEVICES", "") frame_buffer = FrameBuffer(10, 100, 100) frame_queue = Queue(1) subscriber_queue = Queue(1) detection_sieve_queue = Queue(1) log_queue = Queue() getLogger().addHandler(QueueHandler(log_queue)) stop_process_event = Event() latch = CountDownLatch(100) artist = Artist("artist", stop_process_event, log_queue, frame_queue, frame_buffer) detection_sieve = DetectionSieve("sieve", stop_process_event, log_queue, detection_sieve_queue, frame_buffer, self._create_filters(), RateLimiter()) processes = [artist, detection_sieve, LogHandler(Thread, "logger", stop_process_event, log_queue, filename=None), ShapeCounter(Thread, "counter", stop_process_event, log_queue, subscriber_queue, frame_buffer, latch)] processes += create_object_detectors(Process, stop_process_event, log_queue, frame_queue, {artist.name: frame_buffer}, self._get_model_path()) artist.subscribe(detection_sieve_queue) detection_sieve.subscribe(subscriber_queue) for process in processes: process.start() try: self.assertTrue(latch.wait(15)) finally: stop_process_event.set() for process in processes: process.join(30)
def create_config(self, delegate_class, distribution_queue, read_config, work_config): """Prepares configuration of a test. :param delegate_class: Process or Thread :param distribution_queue: queue to return distribution after test finishes :param read_config: FPS configuration of the readers :param work_config: FPS configuration of the workers :return: arrays of readers and workers """ log_queue = mp.Queue() getLogger().addHandler(QueueHandler(log_queue)) log_handler = LogHandler(delegate_class, "LogHandler", self.stop_process_event, log_queue, filename=None) frame_queue = mp.Queue() all_semaphores = {} readers = [] for pos in range(len(read_config)): reader_name = "reader {}".format(pos) reader_queue_semaphore = mp.BoundedSemaphore(1) all_semaphores[reader_name] = reader_queue_semaphore decoder_queue = BalancedQueue( frame_queue, {reader_name: reader_queue_semaphore}, reader_name) readers.append( DummyRead(delegate_class, reader_name, self.stop_process_event, log_queue, decoder_queue, read_config[pos])) workers = [] for pos in range(len(work_config)): workers.append( DummyWork(delegate_class, "worker {}".format(pos), self.stop_process_event, log_queue, BalancedQueue(frame_queue, all_semaphores), distribution_queue, work_config[pos])) return [log_handler], readers, workers, \ [log_queue, frame_queue], all_semaphores
def test_numpy_stream(self): """Tests shared memory usage across processes. One process fills a frame buffer with random data, while another performs simple math operations on the given random image, comparing with predicted result. """ frame_buffer = FrameBuffer(5, 10, 10, 1, 'd') frame_queue = Queue() log_queue = Queue() getLogger().addHandler(QueueHandler(log_queue)) stop_process_event = Event() log_handler = LogHandler(Process, "logger", stop_process_event, log_queue, filename=None) reader = NumpyRead(Process, "reader", stop_process_event, log_queue, frame_queue, frame_buffer) worker = NumpyWork(Process, "worker", stop_process_event, log_queue, frame_queue, frame_buffer) log_handler.start() reader.start() worker.start() try: # Wait till the last frame in the buffer is used. self.assertTrue(frame_buffer.frames[-1].latch.wait( State.PUBLISH, 5)) finally: stop_process_event.set() reader.join(30) worker.join(30) log_handler.join(30) self.assertEqual(len(frame_buffer.frames), frame_buffer.status[State.PUBLISH]) self.assertEqual(first=len(frame_buffer.frames), second=worker.matches, msg="Not all frames were processed")
def test_snapshot(self): width = 100 height = 100 frame_buffer = FrameBuffer(10, width, height) frame_queue = Queue(1) log_queue = Queue() getLogger().addHandler(QueueHandler(log_queue)) stop_process_event = Event() effect = MockEffect() effect.draw_rect = MagicMock() snapshot = Snapshot("snapshot", stop_process_event, log_queue, frame_queue, frame_buffer, self._create_detect_config(width, height), [effect]) processes = [ snapshot, LogHandler(Thread, "logger", stop_process_event, log_queue, filename=None) ] for process in processes: process.start() try: frame_index = 0 frame = frame_buffer.frames[frame_index] frame.header.detections[0].label = COCO_CLASSES.index('book') frame.header.detections[0].bounding_box.x_min = 1 frame.header.detections[0].bounding_box.y_min = 2 frame.header.detections[0].bounding_box.x_max = 3 frame.header.detections[0].bounding_box.y_max = 4 frame.header.epoch = time() frame.latch.next() frame.latch.next() payload = Payload(None, frame_index) frame_queue.put(payload) self.assertTrue( frame.latch.wait_for(State.READY, stop_process_event.is_set, 10)) with self.assertRaises(AssertionError): snapshot.get('person') self.assertIsNotNone(snapshot.get('book')) effect.draw_rect.assert_called_with(1, 2, 3, 4) finally: stop_process_event.set() for process in processes: process.join(30)
def prepare_shape_model(groups): frame_buffer = FrameBuffer(10, 300, 300) frame_queue = Queue(1) subscriber_queue = Queue(1) log_queue = CountableQueue() getLogger().addHandler(QueueHandler(log_queue)) stop_logging_event = Event() log_handler = LogHandler(Thread, "logger", stop_logging_event, log_queue, filename=None) log_handler.start() for group, count in groups.items(): path = os.path.abspath( os.path.join( Path(__file__).parent.parent.parent.parent, 'build/test/model')) os.makedirs(os.path.join(path, "images", group), exist_ok=True) os.makedirs(os.path.join(path, "annotations"), exist_ok=True) stop_process_event = Event() latch = CountDownLatch(count) artist = Artist("artist", stop_process_event, log_queue, frame_queue, frame_buffer) processes = [ artist, ShapeDetector(Thread, "detector", stop_process_event, log_queue, frame_queue, frame_buffer), Classifier(Thread, "classifier", stop_process_event, log_queue, subscriber_queue, frame_buffer, path, group, latch, kwargs={'log_level': DEBUG}) ] artist.subscribe(subscriber_queue) for process in processes: process.start() try: latch.wait() finally: stop_process_event.set() for process in processes: process.join(30) stop_logging_event.set() log_queue.join()
class _BasicApp: @property def app_name(self): return Path(__file__).parent.stem def _parse_commandline_arguments(self): parser = ArgumentParser(description='Object detection for video surveillance') parser.add_argument('-c', "--config", dest='config_file_name', metavar='CONFIG_FILE_NAME', required=True, help='configuration file') parser.add_argument("--model-path", dest='model_path', metavar='MODEL_PATH', default=path.join(getcwd(), 'model'), help="path to log file") parser.add_argument("--log-path", dest='log_path', metavar='LOG_PATH', default=getcwd(), help="path to log file") parser.add_argument('--log-level', dest='log_level', metavar='LOG_LEVEL', type=str, choices=['debug', 'info', 'warning', 'error', 'fatal'], default=environ.get('LOG_LEVEL', 'info'), help='log level') self._args = parser.parse_args() self._args.log_level = self._args.log_level.upper() def _install_signal_handler(self): self._stop_main_event = threading.Event() for s in [SIGINT, SIGTERM]: signal(s, partial(lambda stop_event, *_args: stop_event.set(), self._stop_main_event)) def _init_logging(self): self._stop_logging_event = threading.Event() self._log_queue = CountableQueue() self._logger = getLogger() self._logger.addHandler(QueueHandler(self._log_queue)) self._logger.setLevel(self._args.log_level) filename = path.join(self._args.log_path, '{}.log'.format(self.app_name)) self._log_handler = LogHandler(threading.Thread, "logger", self._stop_logging_event, self._log_queue, filename=filename, kwargs={'log_level': self._args.log_level}) self._log_handler.start() def _stop_logging(self): self._log_queue.join() self._stop_logging_event.set() self._log_handler.join(30) def _read_config(self): self._config_path = path.dirname(self._args.config_file_name) self._config = normalize(validate(parse(self._args.config_file_name)), self._config_path) def _init_watch_dog(self): self._stop_watch_dog_event = threading.Event() self._watch_dog = WatchDog("watchdog", self._stop_watch_dog_event, self._log_queue, kwargs={'log_level': self._args.log_level}) self._watch_dog.add_child(self._log_handler) self._watch_dog.start() def _stop_watch_dog(self): self._stop_watch_dog_event.set() self._watch_dog.join(30)
def test_subscribe(self): """Tests the coherence among the processes sharing the same frame buffer. The queues tying the processes are of limited size (1) and the workers are a bit slower, than the reader. The reader has to drop frames if any of the workers is busy handling previous frame. The frame state latch must get back to READY state, if neither the main worker nor subscriber can pick up the next frame. This ensures the buffer will never overflow. """ width = 500 height = 500 frame_buffer_in = FrameBuffer(10, width, height) frame_buffer_out = FrameBuffer(10, width, height) frame_queue = Queue(1) subscriber_queue = Queue(1) subscriber_queue1 = Queue(1) subscriber_queue2 = Queue(1) subscriber_queue3 = Queue(1) log_queue = Queue() getLogger().addHandler(QueueHandler(log_queue)) stop_process_event = Event() latch = CountDownLatch(100) effects = [CopyHeaderEffect(), CopyImageEffect(), DrawEffect()] artist = Artist("artist", stop_process_event, log_queue, frame_queue, frame_buffer_in) conductor = VisualEffects("conductor", stop_process_event, log_queue, subscriber_queue, frame_buffer_in, frame_buffer_out, effects) processes = [ artist, conductor, LogHandler(Thread, "logger", stop_process_event, log_queue, filename=None), ShapeDetector(Process, "detector", stop_process_event, log_queue, frame_queue, frame_buffer_in), ShapeCounter(Thread, "counter1", stop_process_event, log_queue, subscriber_queue1, frame_buffer_out, latch), ShapeCounter(Thread, "counter2", stop_process_event, log_queue, subscriber_queue2, frame_buffer_out, latch), ShapeCounter(Thread, "counter3", stop_process_event, log_queue, subscriber_queue3, frame_buffer_out, latch) ] artist.subscribe(subscriber_queue) conductor.subscribe(subscriber_queue1) conductor.subscribe(subscriber_queue2) conductor.subscribe(subscriber_queue3) for process in processes: process.start() try: self.assertTrue(latch.wait(15)) finally: stop_process_event.set() for process in processes: process.join(30) conductor.unsubscribe(subscriber_queue1) conductor.unsubscribe(subscriber_queue2) conductor.unsubscribe(subscriber_queue3) artist.unsubscribe(subscriber_queue)