def test_log_names_respects_filter(self): time_series_logs = (('ts_1', (1., )), ('ts_2', (3., )), ('ts_3', [2.]), ('ts_4', [3.])) fake1 = create_test_workspace(ws_name='fake1', time_series_logs=time_series_logs[:2]) fake2 = create_test_workspace(ws_name='fake2', time_series_logs=time_series_logs[2:]) table_workspace = WorkspaceFactory.createTable() fit1 = FitInformation( mock.MagicMock(), 'func1', [StaticWorkspaceWrapper(fake1.name(), fake1)], StaticWorkspaceWrapper(fake1.name(), table_workspace), mock.MagicMock()) fit2 = FitInformation( mock.MagicMock(), 'func1', [StaticWorkspaceWrapper(fake2.name(), fake2)], StaticWorkspaceWrapper(fake2.name(), table_workspace), mock.MagicMock()) self.mock_active_fit_history = mock.PropertyMock( return_value=[fit1, fit2]) type(self.fitting_context ).active_fit_history = self.mock_active_fit_history self.fitting_context.all_latest_fits = mock.MagicMock( return_value=[fit1, fit2]) self.fitting_context.add_fit(fit1) self.fitting_context.add_fit(fit2) required_logs = ('ts_2', 'ts_4') log_names = self.fitting_context.log_names( filter_fn=lambda log: log.name in required_logs) self.assertEqual(len(required_logs), len(log_names)) for name in required_logs: self.assertTrue(name in log_names, msg="{} not found in log list".format(name))
def test_ad_group_audience_view_service_transport_channel_mtls_with_adc( transport_class ): mock_ssl_cred = mock.Mock() with mock.patch.multiple( "google.auth.transport.grpc.SslCredentials", __init__=mock.Mock(return_value=None), ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object(transport_class, "create_channel", autospec=True) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel mock_cred = mock.Mock() with pytest.warns(DeprecationWarning): transport = transport_class( host="squid.clam.whelk", credentials=mock_cred, api_mtls_endpoint="mtls.squid.clam.whelk", client_cert_source=None, ) grpc_create_channel.assert_called_once_with( "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, scopes=( 'https://www.googleapis.com/auth/adwords', ), ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), ], ) assert transport.grpc_channel == mock_grpc_channel
def test_coupled_enrichment_terms_compute_new_velocity(self): """ Test of the method coupled_terms_compute_new_velocity """ self.mock_discontinuity.enr_velocity_new = np.array([[ 0., ], [ 0., ]]) self.mock_discontinuity.mass_matrix_enriched = mock.PropertyMock() self.mock_discontinuity.mass_matrix_enriched.inverse_enriched_mass_matrix_coupling_dof = np.array( [[1., 2.], [2., 1.]]) self.my_nodes._force = np.array([[ 1., ], [ 1., ]]) self.my_nodes._upundemi = np.array([[ 1., ], [ 1., ]]) self.my_nodes.coupled_enrichment_terms_compute_new_velocity( self.mock_discontinuity, 1.) np.testing.assert_array_equal(self.my_nodes._upundemi, np.array([[ 6., ], [ 5., ]])) np.testing.assert_array_equal(self.mock_discontinuity.enr_velocity_new, np.array([[ 3., ], [ 3., ]]))
def test_happy_path(self, mock_response, mock_session): mock_session.request.return_value = mock_response # 1. run_rule success, 2. get_operation success, 3. list_results success. type(mock_response).status_code = mock.PropertyMock( side_effect=[200, 200, 200]) mock_response.raise_for_status.side_effect = [None, None, None] expected_results = [{"match": {}}] mock_response.json.side_effect = [ # JSON result of run_rule(). { "name": "operations/rulejob_jo_12345678-1234-1234-1234-1234567890ab", }, # JSON result of get_operation(). { "name": "operations/rulejob_jo_12345678-1234-1234-1234-1234567890ab", "done": True, "metadata": { "ruleId": None, "@type": None, "runStartedTime": "2020-01-01T00:00:00Z", }, }, # JSON result of list_results(). { "results": expected_results, }, ] event_end_time = datetime.datetime.now() event_start_time = event_end_time - datetime.timedelta(hours=1) actual_results = run_rule_and_wait.run_rule_and_wait( mock_session, "ru_12345678-1234-1234-1234-1234567890ab", event_start_time, event_end_time) self.assertEqual(actual_results, expected_results)
def test_load_credentials_with_named_profile_succeeds( self, mock_client_with_credentials): # GIVEN mock_sts_client = mock.MagicMock() mock_client_with_credentials.return_value = mock_sts_client mock_credentials = mock.MagicMock() mock_credentials.sections.return_value = [TEST_PROFILE_NAME] # Allow for two reads of each credential part mock_credentials.get.side_effect = [ TEST_ACCESS_KEY, TEST_SECRET_KEY, TEST_ACCESS_KEY, TEST_SECRET_KEY ] mock_context = mock.MagicMock() mock_context.aws.load_credentials.return_value = mock_credentials mock_context.aws.get_credentials_file_path = mock.MagicMock( return_value='test-path') # Select the test profile via passed args mock_args = mock.MagicMock() type(mock_args).profile = mock.PropertyMock(return_value=TEST_PROFILE_NAME) mock_sts_results = self.mock_sts_caller_identity_results() mock_sts_client.get_caller_identity.return_value = mock_sts_results # WHEN profile.list(mock_context, mock_args) # THEN # Validate STS calls mock_client_with_credentials.assert_called_once_with( aws_access_key_id=TEST_ACCESS_KEY, aws_secret_access_key=TEST_SECRET_KEY) mock_sts_client.get_caller_identity.assert_called_once_with() # Validate results mock_context.view.profile_list.assert_called_once_with( self.expected_view_results(), mock_context.aws.get_credentials_file_path())
def test_happy_path(self, mock_response, mock_session): mock_session.request.return_value = mock_response type(mock_response).status_code = mock.PropertyMock(return_value=200) expected_rh = { "retrohuntId": "oh_aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", "ruleId": "ru_bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb", "versionId": "ru_bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb@v_123456789_12345789", "eventStartTime": "2021-01-01T00:00:00Z", "eventEndTime": "2021-01-02T00:00:00Z", "retrohuntStartTime": "2020-01-01T00:00:00Z", "retrohuntEndTime": "2020-01-02T00:00:00Z", "state": "RUNNING", "progressPercentage": "0.0", } mock_response.json.return_value = expected_rh end_time = datetime.datetime.now() start_time = end_time - datetime.timedelta(hours=1) actual_rh = run_retrohunt.run_retrohunt( mock_session, "ru_12345678-1234-1234-1234-1234567890ab", start_time, end_time) self.assertEqual(actual_rh, expected_rh)
def test_run_now(self, mock_requests): mock_requests.codes.ok = 200 mock_requests.post.return_value.json.return_value = {'run_id': '1'} status_code_mock = mock.PropertyMock(return_value=200) type(mock_requests.post.return_value).status_code = status_code_mock json = { 'notebook_params': NOTEBOOK_PARAMS, 'jar_params': JAR_PARAMS, 'job_id': JOB_ID } run_id = self.hook.run_now(json) self.assertEquals(run_id, '1') mock_requests.post.assert_called_once_with( run_now_endpoint(HOST), json={ 'notebook_params': NOTEBOOK_PARAMS, 'jar_params': JAR_PARAMS, 'job_id': JOB_ID }, auth=(LOGIN, PASSWORD), headers=USER_AGENT_HEADER, timeout=self.hook.timeout_seconds)
async def test_engaged_axes(async_client, mocked_hw): pm = mock.PropertyMock() type(mocked_hw).engaged_axes = pm pm.return_value = { hwtypes.Axis.X: True, hwtypes.Axis.Y: False, hwtypes.Axis.Z: True, hwtypes.Axis.A: True, hwtypes.Axis.B: False, hwtypes.Axis.C: True} should_return = { 'x': {'enabled': True}, 'y': {'enabled': False}, 'z': {'enabled': True}, 'a': {'enabled': True}, 'b': {'enabled': False}, 'c': {'enabled': True} } res0 = await async_client.get('/motors/engaged') result0 = await res0.text() assert res0.status == 200 assert json.loads(result0) == should_return pm.assert_called_once()
def setUp(self): super().setUp() self.snapcraft_yaml = fixture_setup.SnapcraftYaml( self.path, parts={"part0": { "plugin": "nil" }}, ) self.useFixture(self.snapcraft_yaml) self.mock_lc_init = self.useFixture( fixtures.MockPatch("snapcraft_legacy.cli.remote.LaunchpadClient", autospec=True)).mock self.mock_lc = self.mock_lc_init.return_value self.mock_lc_architectures = mock.PropertyMock(return_value=["i386"]) type(self.mock_lc).architectures = self.mock_lc_architectures self.mock_lc.has_outstanding_build.return_value = False self.mock_project = self.useFixture( fixtures.MockPatchObject( snapcraft_legacy.project.Project, "_get_project_directory_hash", return_value="fakehash123", ))
def test_pull_image_exists_locally_with_no_newer_image_valid( self, mock_docker_client, mock_docker_api_client, mock_get_local_id): # arrange test_id = '1234' mock_get_local_id.return_value = test_id mock_docker_api_client.inspect_image.return_value = {'Id': test_id} type(mock_docker_client).api = mock.PropertyMock( return_value=mock_docker_api_client) client = EdgeDockerClient.create_instance(mock_docker_client) image = 'test_image' username = "******" password = "******" auth_dict = {'username': username, 'password': password} # act result = client.pull(image, username, password) # assert mock_get_local_id.assert_called_with(image) mock_docker_api_client.inspect_image.assert_called_with(image) mock_docker_client.images.pull.assert_called_with( image, auth_config=auth_dict) self.assertFalse(result)
def test_happy_path(self, mock_response, mock_session): mock_session.request.return_value = mock_response type(mock_response).status_code = mock.PropertyMock(return_value=200) version_id = "ru_12345678-1234-1234-1234-1234567890ab@v_100000_000000" detection_id = "de_12345678-1234-1234-1234-1234567890ab" expected_detection = { "id": detection_id, "type": "RULE_DETECTION", "createdTime": "2020-11-05T12:00:00Z", "detectionTime": "2020-11-05T01:00:00Z", "timeWindow": { "startTime": "2020-11-05T00:00:00Z", "endTime": "2020-11-05T01:00:00Z", }, "detection": [ { "ruleId": "ru_12345678-1234-1234-1234-1234567890ab", "ruleName": "rule content", "ruleVersion": version_id, "urlBackToProduct": "https://chronicle.security", "alertState": "ALERTING", "ruleType": "MULTI_EVENT", "detectionFields": [ { "key": "fieldName", "value": "fieldValue", } ] } ], } mock_response.json.return_value = expected_detection detection = get_detection.get_detection(mock_session, version_id, detection_id) self.assertEqual(detection, expected_detection)
def test_run_exception(builder): builder.build = mock.Mock(side_effect=ValueError('test')) builder.prepare = mock.MagicMock() mock_work = get_mock_work() type(mock_work).working_dir = mock.PropertyMock(return_value='work_dir') called = False error_message = None try: builder.run(mock_work, retry=3) except RuntimeError as e: called = True error_message = str(e) assert called expected_error_message = ("pacakge_dict: {'name': 'a'}, " "num: 1, work_dir: work_dir") assert error_message == expected_error_message if sys.version_info >= (3, 6): builder.build.assert_called() # bulid should be called 3 times for retry setting. calls = [mock.call({'name': 'a'}, retry=3)] * 4 builder.build.assert_has_calls(calls) assert builder.build.call_count == len(calls)
def test_happy_path_without_page_size(self, mock_response, mock_session): mock_session.request.return_value = mock_response type(mock_response).status_code = mock.PropertyMock(return_value=200) expected_rule = { "ruleId": "ru_12345678-1234-1234-1234-1234567890ab", "versionId": "ru_12345678-1234-1234-1234-1234567890ab@v_100000_000000", "rule": "rule content", "metadata": { "author": "first_author", "contributor": "first_contributor", }, } expected_page_token = "page token here" mock_response.json.return_value = { "rules": [expected_rule], "nextPageToken": expected_page_token, } rules, next_page_token = list_rule_versions.list_rule_versions( mock_session, "ru_12345678-1234-1234-1234-1234567890ab") self.assertEqual(len(rules), 1) self.assertEqual(rules[0], expected_rule) self.assertEqual(next_page_token, expected_page_token)
def test_do_login_require_captcha(self): # Prepare mocked parameters auth_api = AuthAPI() session = self.get_mocked_session() # Mock 'url' property on post to call require_captcha url = 'http://test/?sid=test123&test=1' post_response = mock.MagicMock() type(post_response).url = mock.PropertyMock(return_value=url) session.post = mock.Mock(return_value=post_response) # Do login, expect require captcha method being called with mock.patch('vk_requests.auth.AuthAPI.require_auth_captcha') as \ require_captcha: auth_api.do_login(session=session) self.assertTrue(require_captcha.called) call_params = dict(tuple(require_captcha.call_args_list[0])[1]) keys = ('query_params', 'form_text', 'login_form_data', 'session') for k in keys: self.assertIn(k, call_params) self.assertEqual(call_params['query_params'], { 'sid': 'test123', 'test': '1' })
def test_cache_not_cleared_for_nested_transaction(self, db_session, mock_transaction): """ The cache should not be cleared when a nested transaction ends. """ @lru_cache_in_transaction(db_session) def random_float(*args, **kwargs): return random.random() a = random_float("a") b = random_float("b") c = random_float("c", with_keywords=True) assert random_float("a") == a assert random_float("b") == b assert random_float("c", with_keywords=True) == c type(mock_transaction).parent = mock.PropertyMock( return_value=mock.Mock(spec=db_session.transaction)) db_session.dispatch.after_transaction_end(db_session, mock_transaction) assert random_float("a") == a assert random_float("b") == b assert random_float("c", with_keywords=True) == c
def test_create_subject(self, mock_response, mock_session): mock_session.request.return_value = mock_response type(mock_response).status_code = mock.PropertyMock(return_value=200) subject_id = "*****@*****.**" subject_type = "SUBJECT_TYPE_ANALYST" roles = ["Test"] expected = { "subject": { "name": subject_id, "type": subject_type, "roles": [{ "name": "Test", "title": "Test role", "description": "The Test role", "createTime": "2020-11-05T00:00:00Z", "isDefault": False, "permissions": [{ "name": "Test", "title": "Test permission", "description": "The Test permission", "createTime": "2020-11-05T00:00:00Z", },] },] }, } mock_response.json.return_value = expected actual = create_subject.create_subject(mock_session, subject_id, subject_type, roles) self.assertEqual(actual, expected)
def test_route(scenario_test_data): """Tests that ProxyAircraftControls implements route""" mock_aircraft_controls = mock.Mock() proxy_aircraft_controls = ProxyAircraftControls(mock_aircraft_controls) # Test error error from properties all_properties_mock = mock.PropertyMock(return_value="Sim error") type(mock_aircraft_controls).all_properties = all_properties_mock err = proxy_aircraft_controls.route(None) assert err == "Sim error" # Test error when aircraft has no route test_scenario = copy.deepcopy(TEST_SCENARIO) test_scenario["aircraft"][0].pop("route") proxy_aircraft_controls.set_initial_properties(_TEST_SECTOR_ELEMENT, test_scenario) full_data, sim_data = scenario_test_data test_callsign = list(full_data)[0] all_properties_mock.return_value = sim_data err = proxy_aircraft_controls.route(test_callsign) assert err == "Aircraft has no route" # Test valid response proxy_aircraft_controls.set_initial_properties(_TEST_SECTOR_ELEMENT, TEST_SCENARIO) err = proxy_aircraft_controls.route(test_callsign) route = [x["fixName"] for x in TEST_SCENARIO["aircraft"][0]["route"]] assert err == (_TEST_SECTOR_ELEMENT.routes()[0].name, route[0], route)
def test_upload_chunked(self, session_finish_mock, session_append_mock, session_start_mock, files_upload_mock): DATA = b'Lots of data.' session_start_result = mock.MagicMock() type(session_start_result).session_id = mock.PropertyMock( side_effect=['session-xxxxx']) session_start_mock.return_value = session_start_result self.assertFalse(files_upload_mock.called) self.assertFalse(session_start_mock.called) self.assertFalse(session_append_mock.called) self.assertFalse(session_finish_mock.called) with tempfile.NamedTemporaryFile() as temp_file: temp_file.write(DATA) temp_file.flush() dsmr_dropbox.services.upload_chunked('dummy-token', temp_file.name, '/remote-path.ext') # Only small file upload should be called. self.assertTrue(files_upload_mock.called) self.assertFalse(session_start_mock.called) self.assertFalse(session_append_mock.called) self.assertFalse(session_finish_mock.called) # Large file upload (> 2 MB chunks). with tempfile.NamedTemporaryFile() as temp_file: temp_file.write(DATA * 2 * 1024 * 1024) temp_file.flush() dsmr_dropbox.services.upload_chunked('dummy-token', temp_file.name, '/remote-path.ext') self.assertTrue(session_start_mock.called) self.assertTrue(session_append_mock.called) self.assertTrue(session_finish_mock.called)
def test_copy_chunk_remotely(self, mock_post): dst = "http://remote.destination/upload" session = requests.Session() session.params = {'dst': 'foo'} attrs = { 'json.return_value': { 'upload_id': uuid.uuid4().hex }, 'elapsed': mock.PropertyMock(**{'total_seconds.return_value': 1}) } mock_response = mock.Mock() mock_response.configure_mock(**attrs) mock_post.return_value = mock_response upload_id = uuid.uuid4().hex copy_chunk_remotely(self.src, dst, 1, 3, upload_id=upload_id, requests_session=session, block_size=1) mock_post.assert_called_once_with( dst, files={'file': ('src.txt', b'o')}, data={ 'upload_id': upload_id, 'dst': 'foo' }, headers={'Content-Range': 'bytes 1-1/3'}, timeout=60, )
def test_url(self, mock_url, mock_check_host): self.set_servers_and_routes() mock_check_host.return_value = True self.assertEqual(f'https://1.1.1.1:{defaults.DEFAULT_PORT}', self.n1.url()) self.assertEqual(f'https://n2_dns:{defaults.DEFAULT_PORT}', self.n2.url()) self.assertEqual(f'https://n3:8000', self.n3.url()) self.assertEqual(f'https://1.1.1.1:{defaults.DEFAULT_PORT}', self.r1.url()) self.assertEqual(f'https://n2_dns:{defaults.DEFAULT_PORT}', self.r2.url()) mock_url.return_value = '/' self.assertEqual(f'https://1.1.1.1:{defaults.DEFAULT_PORT}/', self.n1.url('api')) mock_url.assert_called_once_with('api') me = Server(name='me', gates=[('127.0.0.1', 5), ('192.168.1.2', 2)], me=True) self.assertEqual(f'https://127.0.0.1:5/', me.url('api')) with mock.patch('dimensigon.domain.entities.server.current_app' ) as mock_current_app: type(mock_current_app.dm.config).http_config = mock.PropertyMock( return_value={'keyfile': 'x'}) me = Server(name='me', gates=[('192.168.1.2', 2)], me=True) self.assertEqual(f'http://192.168.1.2:2/', me.url('api')) s = Server('test', port=8000) with self.assertRaises(errors.UnreachableDestination): s.url()
def test_discover_by_device_type_override(): """Test entity discovery by device type overriding.""" ep_channels = mock.MagicMock(spec_set=zha_channels.ChannelPool) ep_mock = mock.PropertyMock() ep_mock.return_value.profile_id = 0x0104 ep_mock.return_value.device_type = 0x0100 type(ep_channels).endpoint = ep_mock overrides = {ep_channels.unique_id: {"type": zha_const.SWITCH}} get_entity_mock = mock.MagicMock( return_value=(mock.sentinel.entity_cls, mock.sentinel.claimed) ) with mock.patch( "homeassistant.components.zha.core.registries.ZHA_ENTITIES.get_entity", get_entity_mock, ), mock.patch.dict(disc.PROBE._device_configs, overrides, clear=True): disc.PROBE.discover_by_device_type(ep_channels) assert get_entity_mock.call_count == 1 assert ep_channels.claim_channels.call_count == 1 assert ep_channels.claim_channels.call_args[0][0] is mock.sentinel.claimed assert ep_channels.async_new_entity.call_count == 1 assert ep_channels.async_new_entity.call_args[0][0] == zha_const.SWITCH assert ep_channels.async_new_entity.call_args[0][1] == mock.sentinel.entity_cls
def test_conditional_includes_from_branch_name(self, rw_dir): # Initiate mocked branch branch = mock.Mock() type(branch).name = mock.PropertyMock(return_value="/foo/branch") # Initiate mocked repository repo = mock.Mock(active_branch=branch) # Initiate config files. path1 = osp.join(rw_dir, "config1") path2 = osp.join(rw_dir, "config2") template = "[includeIf \"onbranch:{}\"]\n path={}\n" # Ensure that config is included is branch is correct. with open(path1, "w") as stream: stream.write(template.format("/foo/branch", path2)) with GitConfigParser(path1, repo=repo) as config: assert config._has_includes() assert config._included_paths() == [("path", path2)] # Ensure that config is included is branch is incorrect. with open(path1, "w") as stream: stream.write(template.format("incorrect", path2)) with GitConfigParser(path1, repo=repo) as config: assert not config._has_includes() assert config._included_paths() == [] # Ensure that config is included with branch using glob pattern. with open(path1, "w") as stream: stream.write(template.format("/foo/**", path2)) with GitConfigParser(path1, repo=repo) as config: assert config._has_includes() assert config._included_paths() == [("path", path2)]
def test_task_functions(self, input_patch, output_patch): with mock.patch("bwg.tasks.ner.NERTask.workflow_resources", new_callable=mock.PropertyMock()) as workflow_mock: task_config = { "STANFORD_MODELS_PATH": "", "STANFORD_NER_MODEL_PATH": "", "CORPUS_ENCODING": "", "WIKIPEDIA_READING_OUTPUT_PATH": "" } output_patch.return_value = MockOutput() input_patch.return_value = MockInput(NER_TASK["input"]) workflow_mock.__get__ = mock.Mock( return_value={ "tokenizer": MockTokenizer(), "ner_tagger": MockTagger(self.naive_ner_tag) }) task = bwg.tasks.ner.NERTask(task_config=task_config) # Testing self._test_task(task) self._test_ner_tag(task)
def test_was_modified_since_fallback(self): """DownloadMixin.was_modified_since() fallbacks to `True`. When: * calling file wrapper's ``was_modified_since()`` raises ``NotImplementedError`` or ``AttributeError``; * and accessing ``size`` and ``modified_time`` from file wrapper raises ``NotImplementedError`` or ``AttributeError``... ... then :meth:`django_downloadview.views.base.DownloadMixin.was_modified_since` returns ``True``. """ file_wrapper = mock.Mock() file_wrapper.was_modified_since = mock.Mock( side_effect=NotImplementedError) type(file_wrapper).modified_time = mock.PropertyMock( side_effect=NotImplementedError) mixin = views.DownloadMixin() self.assertIs(mixin.was_modified_since(file_wrapper, 'fake since'), True)
def test_service_instance_aai_service_instance(): customer_mock = mock.MagicMock() service_instantiation = ServiceInstantiation( name="test", request_id="test_request_id", instance_id="test_instance_id", sdc_service=mock.MagicMock(), cloud_region=mock.MagicMock(), tenant=mock.MagicMock(), customer=customer_mock, owning_entity=mock.MagicMock(), project=mock.MagicMock()) status_mock = mock.PropertyMock( return_value=ServiceInstantiation.StatusEnum.IN_PROGRESS) type(service_instantiation).status = status_mock with pytest.raises(AttributeError): service_instantiation.aai_service_instance status_mock.return_value = return_value = ServiceInstantiation.StatusEnum.COMPLETED assert service_instantiation.aai_service_instance is not None customer_mock.get_service_subscription_by_service_type.side_effect = ValueError with pytest.raises(AttributeError): service_instantiation.aai_service_instance
def test_add_slot(self, mock_slot): settings = { 'SPIDER_CLOSE_DELAY': 10, 'ADDSLOT_ENABLED': True, 'RANDOMIZE_DOWNLOAD_DELAY': True, 'SLOTS': { 'your_slot1': { 'concurrency': 2, 'delay': 0 }, 'your_slot2': { 'concurrency': 3, 'delay': 1 } } } def side_effect(*args, **kwargs): if args[1] == 0: # your_slot1.delay return {'concurrency': 2, 'delay': 0} if args[1] == 1: # your_slot2.delay return {'concurrency': 3, 'delay': 1} crawler = get_crawler(settings_dict=settings) crawler.engine = mock.Mock() pptmk = mock.PropertyMock(return_value={}) type(crawler.engine.downloader).slots = pptmk add_slot = AddSlot.from_crawler(crawler) mock_slot.side_effect = side_effect assert add_slot is not None add_slot.spider_opened(None) assert add_slot.downloader_slots.get('your_slot1') is not None assert add_slot.downloader_slots.get('your_slot2') is not None assert add_slot.downloader_slots.get('your_slot1').get( 'concurrency') is 2 assert add_slot.downloader_slots.get('your_slot1').get('delay') is 0
def test_is_study_inside_database_returns_true_only_if_one_study_is_found( self): # given repo = DataRepoTinydb("") type(repo).db = mock.PropertyMock() dummy_study = StudyDTO(path="path") repo.db.search = mock.Mock(return_value=["first_element"]) # when output = repo.is_study_inside_database(study=dummy_study) # then assert output is True repo.db.search = mock.Mock( return_value=["first_element", "second_element"]) # when output = repo.is_study_inside_database(study=dummy_study) # then assert output is False repo.db.search = mock.Mock(return_value=[]) # when output = repo.is_study_inside_database(study=dummy_study) # then assert output is False
def test_catalog_list(self): auth_ref = identity_fakes.fake_auth_ref( identity_fakes.TOKEN, fake_service=self.service_catalog, ) self.ar_mock = mock.PropertyMock(return_value=auth_ref) type(self.app.client_manager).auth_ref = self.ar_mock arglist = [] verifylist = [] parsed_args = self.check_parser(self.cmd, arglist, verifylist) # In base command class Lister in cliff, abstract method take_action() # returns a tuple containing the column names and an iterable # containing the data to be listed. columns, data = self.cmd.take_action(parsed_args) self.assertEqual(self.columns, columns) datalist = (( 'supernova', 'compute', catalog.EndpointsColumn(self.service_catalog['endpoints']), ), ) self.assertListItemEqual(datalist, tuple(data))
def test_discover_by_device_type(device_type, component, hit): """Test entity discovery by device type.""" ep_channels = mock.MagicMock(spec_set=zha_channels.ChannelPool) ep_mock = mock.PropertyMock() ep_mock.return_value.profile_id = 0x0104 ep_mock.return_value.device_type = device_type type(ep_channels).endpoint = ep_mock get_entity_mock = mock.MagicMock( return_value=(mock.sentinel.entity_cls, mock.sentinel.claimed) ) with mock.patch( "homeassistant.components.zha.core.registries.ZHA_ENTITIES.get_entity", get_entity_mock, ): disc.PROBE.discover_by_device_type(ep_channels) if hit: assert get_entity_mock.call_count == 1 assert ep_channels.claim_channels.call_count == 1 assert ep_channels.claim_channels.call_args[0][0] is mock.sentinel.claimed assert ep_channels.async_new_entity.call_count == 1 assert ep_channels.async_new_entity.call_args[0][0] == component assert ep_channels.async_new_entity.call_args[0][1] == mock.sentinel.entity_cls
def test_lists_readable_android_directory_using_helper_method(self, mock_runner): mock_response = mock.Mock() mock_response.is_successful.return_value = True type(mock_response).data = mock.PropertyMock(return_value={ 'path': '/foo/bar', 'readable': True, 'writable': True, 'files': { 'test': { 'fileName': 'test', 'readable': True, 'writable': True, 'attributes': { 'isDirectory': False, 'isFile': True, 'isHidden': False, 'lastModified': 1507189001000, 'size': 249, } } } }) mock_runner.return_value.get_last_message.return_value = mock_response with capture(_ls_android, ['/foo/bar']) as o: output = o expected_outut = """Type Last Modified Read Write Hidden Size Name ------ ----------------------- ------ ------- -------- ------- ------ File 2017-10-05 07:36:41 GMT True True False 249.0 B test Readable: Yes Writable: Yes """ self.assertEqual(output, expected_outut)