예제 #1
0
    def render_svg(self, svg, buf):
        meta = GstVideo.buffer_get_video_meta(buf)
        if meta:
            assert meta.n_planes == 1
            assert meta.width == self.width
            assert meta.height == self.height
            assert meta.stride[0] >= self.min_stride
            stride = meta.stride[0]
        else:
            stride = self.min_stride

        with _gst_buffer_map(buf, Gst.MapFlags.WRITE) as mapped:
            assert len(mapped) >= stride * self.height

            # Fill with transparency.
            ctypes.memset(ctypes.addressof(mapped), 0, ctypes.sizeof(mapped))

            surface = libcairo.cairo_image_surface_create_for_data(
                ctypes.addressof(mapped), int(cairo.FORMAT_ARGB32), self.width,
                self.height, stride)

            # Render the SVG overlay.
            data = svg.encode('utf-8')
            context = libcairo.cairo_create(surface)
            handle = librsvg.rsvg_handle_new_from_data(data, len(data), 0)
            librsvg.rsvg_handle_render_cairo(handle, context)
            librsvg.rsvg_handle_close(handle, 0)
            libgobject.g_object_unref(handle)
            libcairo.cairo_surface_flush(surface)
            libcairo.cairo_surface_destroy(surface)
            libcairo.cairo_destroy(context)
예제 #2
0
 def __init__(self, buffer, caps=None):
     self.buffer = buffer
     self.caps = caps
     self.caps_str = self.caps.get_structure(0)
     self.video_meta = GstVideo.buffer_get_video_meta(buffer)
     if (not self.video_meta):
         self.video_meta = GstVideo.VideoInfo()
         self.video_meta.from_caps(self.caps)
     self.width = self.video_meta.width
     self.height = self.video_meta.height
     self.format_str = self.caps_str.get_string("format")
     self.channels = VideoFrame.FORMAT_CHANNELS[self.format_str]
예제 #3
0
파일: gstreamer.py 프로젝트: Revanee/posnet
    def inference_loop(self):
        while True:
            with self.condition:
                while not self.gstbuffer and self.running:
                    self.condition.wait()
                if not self.running:
                    break
                gstbuffer = self.gstbuffer
                self.gstbuffer = None

            # Input tensor is expected to be tightly packed, that is,
            # width and stride in pixels are expected to be the same.
            # For the Coral devboard using GPU this will always be true,
            # but when using generic GStreamer CPU based elements the line
            # stride will always be a multiple of 4 bytes in RGB format.
            # In case of mismatch we have to copy the input line by line.
            # For best performance input tensor size should take this
            # into account when using CPU based elements.
            # TODO: Use padded posenet models to avoid this.
            meta = GstVideo.buffer_get_video_meta(gstbuffer)
            assert meta and meta.n_planes == 1
            bpp = 3  # bytes per pixel.
            buf_stride = meta.stride[0]  # 0 for first and only plane.
            inf_stride = meta.width * bpp

            if inf_stride == buf_stride:
                # Fast case, pass buffer as input tensor as is.
                input_tensor = gstbuffer
            else:
                # Slow case, need to pack lines tightly (copy).
                result, mapinfo = gstbuffer.map(Gst.MapFlags.READ)
                assert result
                data_view = memoryview(mapinfo.data)
                input_tensor = bytearray(inf_stride * meta.height)
                src_offset = dst_offset = 0
                for row in range(meta.height):
                    src_end = src_offset + inf_stride
                    dst_end = dst_offset + inf_stride
                    input_tensor[dst_offset:dst_end] = data_view[
                        src_offset:src_end]
                    src_offset += buf_stride
                    dst_offset += inf_stride
                input_tensor = bytes(input_tensor)
                gstbuffer.unmap(mapinfo)

            output = self.inf_callback(input_tensor)
            with self.condition:
                self.output = output
                self.condition.notify_all()
예제 #4
0
    def run_inference(self, inf_buf, inf_caps):
        start = time.monotonic()
        inference_time, data = self.engine.run_inference(inf_buf)

        # Underlying output tensor is owned by engine and if we want to
        # keep the data around while running another inference we have
        # to make our own copy.
        self.inf_q.put(data.copy())

        if self.save_every_n_frames > 0 and self.frames % self.save_every_n_frames == 0:
            meta = GstVideo.buffer_get_video_meta(inf_buf)
            result, mapinfo = inf_buf.map(Gst.MapFlags.READ)
            image = Image.frombytes('RGB', (meta.width, meta.height), mapinfo.data)
            image.save('inf_{:05d}.png'.format(self.frames))
            inf_buf.unmap(mapinfo)
        elapsed = time.monotonic() - start
        self.inf_times.append(elapsed)
예제 #5
0
 def __init__(self, pool, glupload):
     self.glcontext = glupload.context
     res, self.dmabuf = pool.acquire_buffer()
     assert res == Gst.FlowReturn.OK
     assert GstAllocators.is_dmabuf_memory(self.dmabuf.peek_memory(0))
     with _gst_buffer_map(self.dmabuf, Gst.MapFlags.WRITE) as mapped:
         self.ptr = ctypes.addressof(mapped)
         self.len = ctypes.sizeof(mapped)
         self.clear()
     meta = GstVideo.buffer_get_video_meta(self.dmabuf)
     assert meta
     self.surface = libcairo.cairo_image_surface_create_for_data(
         self.ptr, int(cairo.FORMAT_ARGB32), meta.width, meta.height,
         meta.stride[0])
     self.cairo = libcairo.cairo_create(self.surface)
     res, self.gl_buffer = glupload.perform_with_buffer(self.dmabuf)
     assert res == GstGL.GLUploadReturn.DONE
     memory = self.gl_buffer.peek_memory(0)
     assert GstGL.is_gl_memory(memory)
     self.texture_id = libgstgl.gst_gl_memory_get_texture_id(hash(memory))
     self.sync = GstGL.buffer_add_gl_sync_meta(self.glcontext,
                                               self.gl_buffer)
예제 #6
0
 def video_meta(self) -> GstVideo.VideoMeta:
     return GstVideo.buffer_get_video_meta(self.__buffer)