def stream(self) -> openni2.VideoStream: """Lazy load for stream.""" if self._stream is None: self._stream = openni2.VideoStream( openni2.Device(self.name.encode()), self.player_type) self._stream.start() return self._stream
def getVideo(file_name): OP.initialize() framesColor = [] framesDepth = [] file = OP.Device(file_name) file.set_depth_color_sync_enabled cStream = OP.VideoStream(file, OP.SENSOR_COLOR) dStream = OP.VideoStream(file, OP.SENSOR_DEPTH) cStream.start() dStream.start() for i in range(cStream.get_number_of_frames()): framesColor.append(cStream.read_frame()) framesDepth.append(dStream.read_frame()) print(framesDepth[len(framesDepth) - 1]._frame) cStream.stop() dStream.stop() return framesColor, framesDepth
def get_video(file_name, progress_bar): openni2.initialize() frames_color = [] frames_depth = [] file = openni2.Device(file_name) # Открываем потоки для чтения данных из файла c_stream = openni2.VideoStream(file, openni2.SENSOR_COLOR) d_stream = openni2.VideoStream(file, openni2.SENSOR_DEPTH) c_stream.start() d_stream.start() progress_bar.setValue(1) progress_bar.setVisible(True) per_cent = d_stream.get_number_of_frames() // 100 for i in range(d_stream.get_number_of_frames()): # Добавляем загруженные фреймы в общий список (frames_depth) depth_frame = d_stream.read_frame() # From https://stackoverflow.com/a/55539208/8245749 depth_frame_data = depth_frame.get_buffer_as_uint16() depth_img = np.frombuffer(depth_frame_data, dtype=np.uint16) img8 = (depth_img / 256).astype(np.uint8) img8 = ((img8 - img8.min()) / (img8.ptp() / 255)).astype(np.uint8) frames_depth.append(img8.repeat(4)) # добавляем загруженные фреймы в общий список (frames_color) color_frame = c_stream.read_frame() frames_color.append(color_frame) progress_bar.setValue(i // per_cent) c_stream.stop() d_stream.stop() return frames_color, frames_depth
def __init__(self, video_path, is_color, slider): super().__init__() self.is_color = is_color self.is_paused = False self.current_frame = 2 # cap2.oni запускается только со 2-го кадра self.slider: QSlider = slider self.dev = openni2.Device.open_file(video_path.encode('utf-8')) self.pbs = openni2.PlaybackSupport(self.dev) self.image_stream = openni2.VideoStream( self.dev, SENSOR_COLOR if is_color else SENSOR_DEPTH) self.image_stream.start()
def set_sensor(self, is_color): self.is_color = is_color self.image_stream.stop() self.image_stream = openni2.VideoStream( self.dev, SENSOR_COLOR if is_color else SENSOR_DEPTH) self.image_stream.start()