async def test_get_image(hass, filename): """Test that the has_keyframe metadata matches the media.""" await async_setup_component(hass, "stream", {"stream": {}}) source = generate_h264_video() # Since libjpeg-turbo is not installed on the CI runner, we use a mock with patch( "homeassistant.components.camera.img_util.TurboJPEGSingleton" ) as mock_turbo_jpeg_singleton: mock_turbo_jpeg_singleton.instance.return_value = mock_turbo_jpeg() stream = create_stream(hass, source, {}) with patch.object(hass.config, "is_allowed_path", return_value=True): make_recording = hass.async_create_task(stream.async_record(filename)) await make_recording assert stream._keyframe_converter._image is None assert await stream.async_get_image() == EMPTY_8_6_JPEG await stream.stop()
async def test_get_image(hass, record_worker_sync): """Test that the has_keyframe metadata matches the media.""" await async_setup_component(hass, "stream", {"stream": {}}) source = generate_h264_video() # Since libjpeg-turbo is not installed on the CI runner, we use a mock with patch("homeassistant.components.camera.img_util.TurboJPEGSingleton" ) as mock_turbo_jpeg_singleton: mock_turbo_jpeg_singleton.instance.return_value = mock_turbo_jpeg() stream = create_stream(hass, source, {}) # use record_worker_sync to grab output segments with patch.object(hass.config, "is_allowed_path", return_value=True): await stream.async_record("/example/path") assert stream._keyframe_converter._image is None await record_worker_sync.join() assert await stream.async_get_image() == EMPTY_8_6_JPEG stream.stop()
async def test_camera_stream_source_configured(hass, run_driver, events): """Test a camera that can stream with a configured source.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) await async_setup_component(hass, camera.DOMAIN, {camera.DOMAIN: { "platform": "demo" }}) await hass.async_block_till_done() entity_id = "camera.demo_camera" hass.states.async_set(entity_id, None) await hass.async_block_till_done() acc = Camera( hass, run_driver, "Camera", entity_id, 2, { CONF_STREAM_SOURCE: "/dev/null", CONF_SUPPORT_AUDIO: True }, ) not_camera_acc = Switch( hass, run_driver, "Switch", entity_id, 4, {}, ) bridge = HomeBridge("hass", run_driver, "Test Bridge") bridge.add_accessory(acc) bridge.add_accessory(not_camera_acc) await acc.run() assert acc.aid == 2 assert acc.category == 17 # Camera await _async_setup_endpoints(hass, acc) working_ffmpeg = _get_working_mock_ffmpeg() session_info = acc.sessions[MOCK_START_STREAM_SESSION_UUID] with patch( "homeassistant.components.demo.camera.DemoCamera.stream_source", return_value=None, ), patch( "homeassistant.components.homekit.type_cameras.HAFFmpeg", return_value=working_ffmpeg, ): await _async_start_streaming(hass, acc) await _async_stop_all_streams(hass, acc) expected_output = ( "-map 0:v:0 -an -c:v libx264 -profile:v high -tune zerolatency -pix_fmt " "yuv420p -r 30 -b:v 299k -bufsize 1196k -maxrate 299k -payload_type 99 -ssrc {v_ssrc} -f " "rtp -srtp_out_suite AES_CM_128_HMAC_SHA1_80 -srtp_out_params " "zdPmNLWeI86DtLJHvVLI6YPvqhVeeiLsNtrAgbgL " "srtp://192.168.208.5:51246?rtcpport=51246&localrtcpport=51246&pkt_size=1316 -map 0:a:0 " "-vn -c:a libopus -application lowdelay -ac 1 -ar 24k -b:a 24k -bufsize 96k -payload_type " "110 -ssrc {a_ssrc} -f rtp -srtp_out_suite AES_CM_128_HMAC_SHA1_80 -srtp_out_params " "shnETgfD+7xUQ8zRdsaytY11wu6CO73IJ+RZVJpU " "srtp://192.168.208.5:51108?rtcpport=51108&localrtcpport=51108&pkt_size=188" ) working_ffmpeg.open.assert_called_with( cmd=[], input_source="-i /dev/null", output=expected_output.format(**session_info), stdout_pipe=False, extra_cmd="-hide_banner -nostats", stderr_pipe=True, ) await _async_setup_endpoints(hass, acc) working_ffmpeg = _get_working_mock_ffmpeg() session_info = acc.sessions[MOCK_START_STREAM_SESSION_UUID] with patch( "homeassistant.components.demo.camera.DemoCamera.stream_source", return_value="rtsp://example.local", ), patch( "homeassistant.components.homekit.type_cameras.HAFFmpeg", return_value=working_ffmpeg, ): await _async_start_streaming(hass, acc) await _async_stop_all_streams(hass, acc) # Calling a second time should not throw await _async_stop_all_streams(hass, acc) turbo_jpeg = mock_turbo_jpeg(first_width=16, first_height=12, second_width=300, second_height=200) with patch("turbojpeg.TurboJPEG", return_value=turbo_jpeg): TurboJPEGSingleton() assert await acc.async_get_snapshot({ "aid": 2, "image-width": 300, "image-height": 200 }) # Verify the bridge only forwards async_get_snapshot for # cameras and valid accessory ids assert await bridge.async_get_snapshot({ "aid": 2, "image-width": 300, "image-height": 200 }) with pytest.raises(ValueError): assert await bridge.async_get_snapshot({ "aid": 3, "image-width": 300, "image-height": 200 }) with pytest.raises(ValueError): assert await bridge.async_get_snapshot({ "aid": 4, "image-width": 300, "image-height": 200 })