def test_deploy_async( self, caplog: LogCaptureFixture, fx_deployments: YamlLoaderDeployment, mocker: MockerFixture, runway_context: MockRunwayContext, ) -> None: """Test deploy async.""" caplog.set_level(logging.INFO, logger="runway") mock_futures = mocker.patch(f"{MODULE}.concurrent.futures") executor = MagicMock() executor.__enter__.return_value = executor mock_futures.ProcessPoolExecutor.return_value = executor mocker.patch.object(Module, "use_async", True) mock_mp_context = mocker.patch("multiprocessing.get_context") obj = Module( context=runway_context, definition=fx_deployments.load( "simple_parallel_module").modules[0], ) assert not obj.deploy() assert ("parallel_parent:processing modules in parallel... (output " "will be interwoven)" in caplog.messages) mock_mp_context.assert_called_once_with("fork") mock_futures.ProcessPoolExecutor.assert_called_once_with( max_workers=runway_context.env.max_concurrent_modules, mp_context=mock_mp_context.return_value, ) executor.submit.assert_has_calls([ call(obj.child_modules[0].run, "deploy"), call(obj.child_modules[1].run, "deploy"), ]) assert executor.submit.return_value.result.call_count == 2
def test_deploy(self, fx_deployments, monkeypatch, runway_context): """Test deploy.""" mock_run = MagicMock() monkeypatch.setattr(Module, "run", mock_run) mod = Module( context=runway_context, definition=fx_deployments.load("min_required").modules[0], ) assert mod.deploy() mock_run.assert_called_once_with("deploy")
def test_deploy( self, fx_deployments: YamlLoaderDeployment, mocker: MockerFixture, runway_context: MockRunwayContext, ) -> None: """Test deploy.""" mock_run = mocker.patch.object(Module, "run") mod = Module( context=runway_context, definition=fx_deployments.load("min_required").modules[0], ) assert mod.deploy() mock_run.assert_called_once_with("deploy")
def test_deploy_sync(self, caplog, fx_deployments, monkeypatch, runway_context): """Test deploy sync.""" caplog.set_level(logging.INFO, logger="runway") mock_run = MagicMock() monkeypatch.setattr(Module, "use_async", False) monkeypatch.setattr(Module, "run", mock_run) mod = Module( context=runway_context, definition=fx_deployments.load( "simple_parallel_module").modules[0], ) assert not mod.deploy() assert "parallel_parent:processing modules sequentially..." in caplog.messages mock_run.assert_has_calls([call("deploy"), call("deploy")])
def test_deploy_sync( self, caplog: LogCaptureFixture, fx_deployments: YamlLoaderDeployment, mocker: MockerFixture, runway_context: MockRunwayContext, ) -> None: """Test deploy sync.""" caplog.set_level(logging.INFO, logger="runway") mocker.patch.object(Module, "use_async", False) mock_run = mocker.patch.object(Module, "run") mod = Module( context=runway_context, definition=fx_deployments.load( "simple_parallel_module").modules[0], ) assert not mod.deploy() assert "parallel_parent:processing modules sequentially..." in caplog.messages mock_run.assert_has_calls([call("deploy"), call("deploy")]) # type: ignore
def test_deploy_async(self, mock_futures, caplog, fx_deployments, monkeypatch, runway_context): """Test deploy async.""" caplog.set_level(logging.INFO, logger="runway") executor = MagicMock() mock_futures.ProcessPoolExecutor.return_value = executor monkeypatch.setattr(Module, "use_async", True) obj = Module( context=runway_context, definition=fx_deployments.load( "simple_parallel_module").modules[0], ) assert not obj.deploy() assert ("parallel_parent:processing modules in parallel... (output " "will be interwoven)" in caplog.messages) mock_futures.ProcessPoolExecutor.assert_called_once_with( max_workers=runway_context.env.max_concurrent_modules) executor.submit.assert_has_calls([ call(obj.child_modules[0].run, "deploy"), call(obj.child_modules[1].run, "deploy"), ]) mock_futures.wait.assert_called_once() assert executor.submit.return_value.result.call_count == 2