def test_run_multiple_computational_sidecar_dask( event_loop: asyncio.AbstractEventLoop, dask_client: Client, ubuntu_task: ServiceExampleParam, mocker: MockerFixture, ): NUMBER_OF_TASKS = 50 mocker.patch( "simcore_service_dask_sidecar.computational_sidecar.core.get_integration_version", autospec=True, return_value=ubuntu_task.integration_version, ) futures = [ dask_client.submit( run_computational_sidecar, ubuntu_task.docker_basic_auth, ubuntu_task.service_key, ubuntu_task.service_version, ubuntu_task.input_data, ubuntu_task.output_data_keys, ubuntu_task.log_file_url, ubuntu_task.command, resources={}, ) for _ in range(NUMBER_OF_TASKS) ] results = dask_client.gather(futures) # for result in results: # check that the task produce the expected data, not less not more for output_data in results: for k, v in ubuntu_task.expected_output_data.items(): assert k in output_data assert output_data[k] == v
def test_add_artifact_file_of_type_str_should_read_file_from_disc_and_upload_it( active_run, client_mock, artifact_api_mock, mocker: MockerFixture): # arrange file_path = path.normpath('path/to/file.txt') file_bytes = bytes('foo', 'utf-8') file = io.BytesIO(file_bytes) artifact = Artifact(name='artifact name', version=23) path_mock = mocker.patch('mlaide.active_run.Path') path_mock.return_value.is_file.return_value = True path_mock.return_value.read_bytes.return_value = file_bytes bytes_io_mock = mocker.patch('mlaide.active_run.BytesIO') bytes_io_mock.return_value = file # act active_run.add_artifact_file(artifact, file_path) # assert artifact_api_mock.upload_file.assert_called_once_with( client=client_mock.return_value, project_key='project key', artifact_name='artifact name', artifact_version=23, filename=file_path, file=file) path_mock.assert_called_once_with(file_path) bytes_io_mock.assert_called_once_with(file_bytes)
def mocked_director_v2_scheduler(mocker: MockerFixture, exp_status_code: int) -> None: """because the monitor is disabled some functionality needs to be mocked""" # MOCKING get_stack_status def get_stack_status(node_uuid: NodeID) -> RunningDynamicServiceDetails: if exp_status_code == status.HTTP_307_TEMPORARY_REDIRECT: raise DynamicSidecarNotFoundError(node_uuid) return RunningDynamicServiceDetails.parse_obj( RunningDynamicServiceDetails.Config.schema_extra["examples"][0]) mocker.patch( "simcore_service_director_v2.modules.dynamic_sidecar.scheduler.task.DynamicSidecarsScheduler.get_stack_status", side_effect=get_stack_status, ) # MOCKING remove_service def remove_service(node_uuid: NodeID, can_save: Optional[bool]) -> None: if exp_status_code == status.HTTP_307_TEMPORARY_REDIRECT: raise DynamicSidecarNotFoundError(node_uuid) mocker.patch( "simcore_service_director_v2.modules.dynamic_sidecar.scheduler.task.DynamicSidecarsScheduler.mark_service_for_removal", side_effect=remove_service, )
def test_fails_when_user_input_not_yes(self, fake_snakemake: Path, mocker: MockerFixture): o = fake_snakemake / "old-style" mocker.patch("builtins.input", return_value="") out = output.retrofit_output(o, [o / "config/config.yaml"]) assert out is False assert (o / "config").exists()
async def mock_download_file( mocker: MockerFixture, this_node_file: Path, project_id: str, node_uuid: str, download_file_folder: Path, ): async def mock_download_file_from_link( download_link: URL, local_folder: Path, file_name: Optional[str] = None, client_session: Optional[ClientSession] = None, ) -> Path: assert str(local_folder).startswith(str(download_file_folder)) destination_path = local_folder / this_node_file.name destination_path.parent.mkdir(parents=True, exist_ok=True) shutil.copy(this_node_file, destination_path) return destination_path mocker.patch( "simcore_sdk.node_ports_common.filemanager.get_download_link_from_s3", return_value="a fake link", ) mocker.patch( "simcore_sdk.node_ports_common.filemanager.download_file_from_link", side_effect=mock_download_file_from_link, )
def test__get_config__error(mocker: MockerFixture) -> None: """ Test "_get_config" method must fail on an error. :param mocker: mock :type mocker: MockerFixture """ out = StringIO() mocker.patch( "sys.argv", [ "notification_jabber.py", "-r", "*****@*****.**", "-m", "TEST", "-c", "notification_jabber.ini", ], ) mocker.patch( "builtins.open", return_value=IOError(), ) with pytest.raises(SystemExit): with contextlib2.redirect_stderr(out): NotificationJabber() assert ( # nosec: B101 "ERROR: Config file read notification_jabber.ini error." in out.getvalue().strip())
def test__get_options(mocker: MockerFixture) -> None: """ Test "_get_options" method must return argparse namespace. :param mocker: mock :type mocker: MockerFixture """ mocker.patch( "sys.argv", [ "notification_jabber.py", "-r", "*****@*****.**", "-m", "TEST", "-c", "notification_jabber.ini", ], ) mocker.patch( "notification_jabber.NotificationJabber._get_config", return_value={ "jid": "*****@*****.**", "password": "******", }, ) notifier = NotificationJabber() assert isinstance(notifier.options, Namespace) # nosec: B101
def test__get_config__no_config_error(mocker: MockerFixture) -> None: """ Test "_get_config" method must fail on no config file error. :param mocker: mock :type mocker: MockerFixture """ out = StringIO() mocker.patch( "sys.argv", [ "notification_jabber.py", "-r", "*****@*****.**", "-m", "TEST", ], ) with pytest.raises(SystemExit): with contextlib2.redirect_stderr(out): NotificationJabber() assert ( # nosec: B101 "ERROR: Config file /etc/nagios/notification_jabber.ini does not exist" in out.getvalue().strip())
def mock_capture_internal(mocker: MockerFixture): get_internal_metrics_team_id.cache_clear() mocker.patch.object(settings, "CAPTURE_INTERNAL_METRICS", True) mocker.patch("posthog.utils.get_machine_id", return_value="machine_id") yield mocker.patch("posthog.api.capture.capture_internal") mocker.patch.object(settings, "CAPTURE_INTERNAL_METRICS", False) get_internal_metrics_team_id.cache_clear()
async def test_monitor_signaled(output: CapLines, amonitor: Monitor, mocker: MockerFixture, exc_class, text: str): def raiser(*args): raise exc_class('test') mocker.patch('foremon.queue.queueiter.__init__', raiser) await amonitor.start_interactive() assert output.stderr_expect(text)
def preprocess_pipeline(logger: Logger, mocker: MockerFixture) -> PreprocessingPipeline: """Initialize PreprocessingPipeline with mocked LID model and MP table.""" mocker.patch("fi_parliament_tools.preprocessing.fasttext.load_model") mocker.patch("fi_parliament_tools.preprocessing.pd.read_csv") pipeline = PreprocessingPipeline(logger, [], "lid_dummy", "mptable_dummy", "recipe_dummy") return pipeline
def mock_git_info(mocker: MockerFixture) -> None: mocker.patch( "poetry.vcs.git.Git.info", return_value=namedtuple("GitRepoLocalInfo", "origin revision")( origin="https://github.com/sdispater/pendulum.git", revision="bb058f6b78b2d28ef5d9a5e759cfa179a1a713d6", ), )
def mock_nodeports(mocker: MockerFixture) -> None: mocker.patch( "simcore_service_dynamic_sidecar.modules.nodeports.upload_outputs", return_value=None, ) mocker.patch( "simcore_service_dynamic_sidecar.modules.nodeports.download_target_ports", return_value=42, )
def test_successfully_retrofits(self, fake_snakemake: Path, mocker: MockerFixture): o = fake_snakemake / "old-style" mocker.patch("builtins.input", return_value="yes") out = output.retrofit_output(o, [o / "config/config.yaml"]) assert out is True assert not (o / "config").exists() assert dirlen(o / "app1") == 1 with (o / ".snakebids").open() as f: assert json.load(f)["mode"] == "bidsapp"
def noninteractive(mocker: MockerFixture) -> MagicMock: loop = asyncio.get_event_loop() # The Click.testing.CliRunner replaces `sys.stdin` with something that is # notcompatible with `add_reader` so we just mock the call. mocker.patch.object(loop, 'add_reader', lambda *_: None) mocker.patch.object(loop, 'remove_reader', lambda *_: None) mock: MagicMock = mocker.MagicMock(name='Foremon._run') mocker.patch('foremon.app.Foremon._run', new=mock) spy = mocker.spy(Foremon, 'run_forever') return spy
def test_get_input_output_settings(self, mocker: MockerFixture): mocker.patch( "Arbie.Actions.action.yaml.safe_load", return_value={ Action.input_key: MockAction.in_settings, Action.output_key: MockAction.out_settings, }, ) action = Action() assert MockAction.in_settings_parsed == action.get_input_settings() assert MockAction.out_settings_parsed == action.get_output_settings()
def test_import_ticket_names(qtbot: qtbot.QtBot, mocker: MockerFixture): """Tests the import ticket names feature""" gui_manager.clear_windows() gui_manager.initialize() for window in gui_manager.window_list: qtbot.addWidget(window) # Import ticket names and prizes mocker.patch('PyQt5.QtWidgets.QFileDialog.exec') mocker.patch('PyQt5.QtWidgets.QFileDialog.selectedFiles', return_value=['examples/ticket_names.txt']) gui_manager.window_list[0].import_ticket_names_action.trigger() mocker.patch('PyQt5.QtWidgets.QFileDialog.selectedFiles', return_value=['examples/prizes.txt']) gui_manager.window_list[0].import_prizes_action.trigger() # Restart before starting raffle.restart() # Mock prize alerts mocker.patch('Ui.prize_alert.PrizeAlert') # Draw all of the tickets in a random order ticket_numbers = list(range(1, 226)) shuffle(ticket_numbers) for ticket_number in ticket_numbers: qtbot.mouseClick( gui_manager.window_list[0].ticket_labels[ticket_number - 1], Qt.LeftButton) # Draw the next ticket QApplication.processEvents() sleep(0.01) # Export the results to a csv file mocker.patch('PyQt5.QtWidgets.QFileDialog.getSaveFileName', return_value=['results.csv', 'CSV files (*.csv)']) gui_manager.window_list[0].export_results_action.trigger() QApplication.processEvents() with open("results.csv", 'r') as results: for i, ticket_number in enumerate(ticket_numbers): values = results.readline().strip().split(',') # Verify information is present in results assert values[0] == str(i + 1) assert values[1] == str(ticket_number) assert values[2] == raffle.tickets[ticket_number - 1].name if raffle.get_prize_from_number(i + 1): assert values[3] == raffle.get_prize_from_number( i + 1).description.strip()
def mock_catalog_background_task(mocker: MockerFixture): """patch the setup of the background task so we can call it manually""" mocker.patch( "simcore_service_catalog.core.events.start_registry_sync_task", return_value=None, autospec=True, ) mocker.patch( "simcore_service_catalog.core.events.stop_registry_sync_task", return_value=None, autospec=True, )
def test_focus_other(self, app, mocker: MockerFixture): mocker.patch("prompt_toolkit.layout.Layout.focus") app.pane_focus_other() assert app._previous_focus == Pane.left assert app._current_focus == Pane.right assert app._filepane_focus == Pane.right app._layout_mode = LayoutMode.single app.pane_focus_other() assert app._previous_focus == Pane.left assert app._current_focus == Pane.right assert app._filepane_focus == Pane.right
def mock_containers_get(mocker: MockerFixture) -> int: """raises a DockerError with a random HTTP status which is also returned""" mock_status_code = random.randint(1, 999) async def mock_get(*args: str, **kwargs: Any) -> None: raise aiodocker.exceptions.DockerError( status=mock_status_code, data=dict(message="aiodocker_mocked_error")) mocker.patch("aiodocker.containers.DockerContainers.get", side_effect=mock_get) return mock_status_code
def mock_data_manager(mocker: MockerFixture) -> None: mocker.patch( "simcore_service_dynamic_sidecar.modules.data_manager.upload_path_if_exists", return_value=None, ) mocker.patch( "simcore_service_dynamic_sidecar.modules.data_manager.pull_path_if_exists", return_value=None, ) importlib.reload( importlib.import_module( "simcore_service_dynamic_sidecar.api.containers"))
def test_upgrade_kernel_rebuild_no_config(mocker: MockFixture, sp_mocker: SubprocessMocker) -> None: mocker.patch('upkeep.glob', return_value=['/etc/profile']) mocker.patch('upkeep.chdir') mocker.patch('upkeep.isfile', return_value=False) mocker.patch('upkeep.sp.run', side_effect=sp_mocker.get_output) mocker.patch('upkeep.sp.check_call', side_effect=sp_mocker.get_output) sp_mocker.add_output3(('eselect', '--colour=no', 'kernel', 'list'), stdout_output=' [1] *\n [2] \n') sp_mocker.add_output3( ('eselect', '--colour=no', '--brief', 'kernel', 'list'), stdout_output=' *\n \n') assert upgrade_kernel() == 1
def test_run_computational_sidecar_dask(dask_client: Client, ubuntu_task: ServiceExampleParam, mocker: MockerFixture): mocker.patch( "simcore_service_dask_sidecar.computational_sidecar.core.get_integration_version", autospec=True, return_value=ubuntu_task.integration_version, ) future = dask_client.submit( run_computational_sidecar, ubuntu_task.docker_basic_auth, ubuntu_task.service_key, ubuntu_task.service_version, ubuntu_task.input_data, ubuntu_task.output_data_keys, ubuntu_task.log_file_url, ubuntu_task.command, resources={}, ) worker_name = next(iter(dask_client.scheduler_info()["workers"])) output_data = future.result() # check that the task produces expected logs worker_logs = [ log for _, log in dask_client.get_worker_logs()[worker_name] ] # type: ignore for log in ubuntu_task.expected_logs: r = re.compile( rf"\[{ubuntu_task.service_key}:{ubuntu_task.service_version} - .+\/.+ - .+\]: ({log})" ) search_results = list(filter(r.search, worker_logs)) assert ( len(search_results) > 0 ), f"Could not find {log} in worker_logs:\n {pformat(worker_logs, width=240)}" # check that the task produce the expected data, not less not more for k, v in ubuntu_task.expected_output_data.items(): assert k in output_data assert output_data[k] == v for k, v in output_data.items(): assert k in ubuntu_task.expected_output_data assert v == ubuntu_task.expected_output_data[k] # if there are file urls in the output, check they exist if isinstance(v, FileUrl): with fsspec.open(f"{v.url}") as fp: assert fp.details.get("size") > 0
def mock_dynamic_sidecar_api_calls(mocker: MockerFixture) -> None: class_path = ( f"{DIRECTOR_V2_MODULES}.dynamic_sidecar.client_api.DynamicSidecarClient" ) for function_name, return_value in [ ("service_save_state", None), ("service_restore_state", None), ("service_pull_output_ports", 42), ("service_outputs_create_dirs", None), ]: mocker.patch( f"{class_path}.{function_name}", # pylint: disable=cell-var-from-loop side_effect=lambda *args, **kwargs: return_value, )
async def mocked_node_ports_filemanager_fcts( mocker: MockerFixture, ) -> Dict[str, mock.MagicMock]: return { "entry_exists": mocker.patch( "simcore_service_director_v2.utils.dask.port_utils.filemanager.entry_exists", autospec=True, return_value=False, ), "delete_file": mocker.patch( "simcore_service_director_v2.utils.dask.port_utils.filemanager.delete_file", autospec=True, return_value=None, ), }
def test_setup_and_run(self, config_file: str, store: Store, mocker: MockerFixture): setup_mocks(mocker, config_file) run_mock = mocker.patch("Arbie.settings_parser.ActionTree.run") run_main() assert run_mock.called
def mocked_dask_worker_job_id(mocker: MockerFixture) -> str: mock_get_worker = mocker.patch( "dask_task_models_library.container_tasks.events.get_worker", autospec=True ) fake_job_id = "some_fake_job_id" mock_get_worker.return_value.get_current_task.return_value = fake_job_id return fake_job_id
async def test_parse_output_data( aiopg_engine: aiopg.sa.engine.Engine, # type: ignore published_project: PublishedProject, user_id: UserID, fake_io_schema: Dict[str, Dict[str, str]], fake_task_output_data: TaskOutputData, mocker: MockerFixture, ): # need some fakes set in the DB sleeper_task: CompTaskAtDB = published_project.tasks[1] no_outputs = {} await set_comp_task_outputs(aiopg_engine, sleeper_task.node_id, fake_io_schema, no_outputs) # mock the set_value function so we can test it is called correctly mocked_node_ports_set_value_fct = mocker.patch( "simcore_sdk.node_ports_v2.port.Port.set_value") # test dask_job_id = generate_dask_job_id( sleeper_task.image.name, sleeper_task.image.tag, user_id, published_project.project.uuid, sleeper_task.node_id, ) await parse_output_data(aiopg_engine, dask_job_id, fake_task_output_data) # the FileUrl types are converted to a pure url expected_values = { k: v.url if isinstance(v, FileUrl) else v for k, v in fake_task_output_data.items() } mocked_node_ports_set_value_fct.assert_has_calls( [mock.call(value) for value in expected_values.values()])
def mock_get_full_table( mocker: MockerFixture, query_get_full_table: Tuple[List[List[Optional[str]]], List[str]] ) -> MagicMock: """Mock a small table instead of the true big table.""" mock: MagicMock = mocker.patch("fi_parliament_tools.parsing.query.Query.get_full_table") mock.return_value = query_get_full_table return mock
def mock_session_start_time(request: SubRequest, mocker: MockerFixture) -> MagicMock: """Mock SessionQuery.get_session_start_time call.""" mock: MagicMock = mocker.patch( "fi_parliament_tools.parsing.documents.SessionQuery") mock.return_value.get_session_start_time.return_value = request.param return mock