def test_collect(self, urllib): alert_meta = { 'name': 'alert1', 'label': 'label1', 'serviceName': 'service1', 'componentName': 'component1', 'uuid': '123', 'enabled': 'true' } alert_source_meta = { 'jmx': { 'property_list': [ 'x/y' ] }, 'uri': { 'http': '192.168.0.10:8080', 'https_property': '{{hdfs-site/dfs.http.policy}}', 'https_property_value': 'HTTPS_ONLY' }, "reporting": { "ok": { "text": "OK: {0}" }, "warning": { "text": "Warn: {0}", "value": 2 }, "critical": { "text": "Crit: {0}", "value": 5 } } } cluster = 'c1' host = 'host1' expected_text = 'Warn: 4' def collector_side_effect(clus, data): self.assertEquals(data['name'], alert_meta['name']) self.assertEquals(data['label'], alert_meta['label']) self.assertEquals(data['text'], expected_text) self.assertEquals(data['service'], alert_meta['serviceName']) self.assertEquals(data['component'], alert_meta['componentName']) self.assertEquals(data['uuid'], alert_meta['uuid']) self.assertEquals(data['enabled'], alert_meta['enabled']) self.assertEquals(data['cluster'], cluster) self.assertEquals(clus, cluster) response = Mock() urllib.return_value = response response.read = Mock(return_value='{"beans": [{"y": 4}]}') mock_collector = MagicMock() mock_collector.put = Mock(side_effect=collector_side_effect) alert = MetricAlert(alert_meta, alert_source_meta) alert.set_helpers(mock_collector, {'foo-site/bar': 12, 'foo-site/baz': 'asd'}) alert.set_cluster(cluster, host) alert.collect()
def test_sucesso(self): usuario = Usuario(nome='Teste', email='*****@*****.**', google_id='123', admin=False) usuario.put() usuarios = [usuario] resposta_mock = Mock() crud.usuario_listar(resposta_mock) resposta_mock.assert_called_once_with('/templates/usuario_listar.html', {'lista_usuarios': usuarios})
def test_updateComponents(self, LiveStatus_mock): LiveStatus_mock.SERVICES = [] LiveStatus_mock.CLIENT_COMPONENTS = [] LiveStatus_mock.COMPONENTS = [] self.controller.componentsUrl = "foo_url/" sendRequest = Mock() self.controller.sendRequest = sendRequest self.controller.sendRequest.return_value = {"clusterName":"dummy_cluster_name", "stackName":"dummy_stack_name", "stackVersion":"dummy_stack_version", "components":{"PIG":{"PIG":"CLIENT"}, "MAPREDUCE":{"MAPREDUCE_CLIENT":"CLIENT", "JOBTRACKER":"MASTER","TASKTRACKER":"SLAVE"}}} self.controller.updateComponents("dummy_cluster_name") sendRequest.assert_called_with('foo_url/dummy_cluster_name', None) services_expected = [u'MAPREDUCE', u'PIG'] client_components_expected = [ {'serviceName':u'MAPREDUCE','componentName':u'MAPREDUCE_CLIENT'}, {'serviceName':u'PIG','componentName':u'PIG'} ] components_expected = [ {'serviceName':u'MAPREDUCE','componentName':u'TASKTRACKER'}, {'serviceName':u'MAPREDUCE','componentName':u'JOBTRACKER'} ] self.assertEquals(LiveStatus_mock.SERVICES, services_expected) self.assertEquals(LiveStatus_mock.CLIENT_COMPONENTS, client_components_expected) self.assertEquals(LiveStatus_mock.COMPONENTS, components_expected)
def setUp(self): response = Mock(spec=requests.Response) response.raw = Mock( spec=requests.packages.urllib3.response.HTTPResponse) response.raw.closed = False response.raw.getheader.return_value = 1028 client = Mock(spec=DropboxClient) client.files_download.return_value = ({}, response) self.reader = ChunkedReader(client, '/file1.txt')
def test_open_read(self, mock_download): """Test opening a file for read.""" response = Mock(spec=requests.Response) response.raw = Mock( spec=requests.packages.urllib3.response.HTTPResponse) response.raw.getheader.return_value = 0 mock_download.return_value = ({}, response) reader = self.fs.open('/file.txt') self.assertIsInstance(reader, ChunkedReader)
def test_update_reference(self): initialGame = Mock(spec=Game, name="initialGame") nextGame = Mock(spec=Game, name="nextGame") initialGame.transition = Mock(return_value=nextGame) ref = GameReference(initialGame) ref.transition() initialGame.transition.assert_any_call() self.assertIs(ref.game, nextGame)
def test_member_access(self): method_return_value = "method_return_value" field_value = "field_value" game = Mock(spec=Game) game.method = Mock(return_value=method_return_value) game.field = field_value ref = GameReference(game) actual_method_return = ref.method() game.method.assert_any_call() self.assertEqual(actual_method_return, method_return_value) self.assertEqual(ref.field, field_value)
def test_threshold_hit_prevents_consequent_calls(mock_remote: Mock): mock_remote.side_effect = ConnectionError('Connection refused') circuitbreaker = CircuitBreakerMonitor.get('threshold_1') assert circuitbreaker.closed with raises(ConnectionError): circuit_threshold_1() assert circuitbreaker.opened with raises(CircuitBreakerError): circuit_threshold_1() mock_remote.assert_called_once()
def setup_class(self): self.server = Server("idp_conf") sign_alg = Mock() sign_alg.return_value = ds.SIG_RSA_SHA512 digest_alg = Mock() digest_alg.return_value = ds.DIGEST_SHA512 self.restet_default = ds.DefaultSignature ds.DefaultSignature = MagicMock() ds.DefaultSignature().get_sign_alg = sign_alg ds.DefaultSignature().get_digest_alg = digest_alg conf = config.SPConfig() conf.load_file("server_conf") self.client = client.Saml2Client(conf) self.name_id = self.server.ident.transient_nameid( "urn:mace:example.com:saml:roland:sp", "id12") self.ava = {"givenName": ["Derek"], "surName": ["Jeter"], "mail": ["*****@*****.**"], "title": "The man"}
def test_new_particle_multidist(self): eps = 1. threshold = [eps, eps] prior = lambda : 1 thetai = 1 postfn = lambda theta: thetai dist = Mock() distances = [[eps*2, eps*2], [eps/2, eps*2], [eps*2, eps/2], [eps, eps]] dist.side_effect = distances Y = None wrapper = abcpmc.sampler._RejectionSamplingWrapper(threshold, prior, postfn, dist, Y) _, _, cnt = wrapper(0) assert cnt == len(distances)
def test_get_aws_identity(get_uri): ''' Test that AWS identity success and failure handled properly ''' # returns OK get_uri.side_effect = [ Mock(ok=True, content=b'{"test": "test"}'), Mock(ok=True, content="test") ] conn = InsightsConnection(InsightsConfig()) assert aws.get_aws_identity(conn) # URIs don't return OK status, return None get_uri.side_effect = [ Mock(ok=False, content=None), Mock(ok=False, content=None) ] assert aws.get_aws_identity(conn) is None # URIs can't connect, return None get_uri.side_effect = [None, None] assert aws.get_aws_identity(conn) is None
def test_propose_multidist(self): eps = 1. threshold = [eps, eps] thetai = 1 postfn = lambda theta: thetai dist = Mock() distances = [[eps * 2, eps * 2], [eps / 2, eps * 2], [eps * 2, eps / 2], [eps, eps]] dist.side_effect = distances Y = None sampler = abcpmc.Sampler(2, Y, postfn, dist) thetas = np.array([[0.5], [1]]) weights = np.array([0.75, 0.25]) pool = abcpmc.sampler.PoolSpec(1, threshold, 1, thetas, None, weights) wrapper = abcpmc.sampler.ParticleProposal(sampler, threshold, pool, {}) _, _, cnt = wrapper(0) assert cnt == len(distances)
def client(): """Mock client. Considerations when appropriate: * utilize botocore.stub.Stubber * separate runtime client from client """ client_mock = Mock() client_mock._client_config.user_agent = ( "Boto3/1.14.24 Python/3.8.5 Linux/5.4.0-42-generic Botocore/1.17.24 Resource" ) return client_mock
def test_archive_too_big(largest_archive_file): config = Mock(base_url="www.example.com", proxy=None) connection = InsightsConnection(config) with patch("insights.client.connection.os.stat", **{"return_value.st_size": 104857600}): with patch('insights.client.connection.logger.info') as mock_logger: connection._archive_too_big("archive_file") largest_archive_file.assert_called_once_with("archive_file") assert mock_logger.call_count == 3 assert [ "insights.spec-big" in args[0][0] for args in mock_logger.call_args_list ]
def test_rhsm_platform_url(set_auto_configuration, initConfig): ''' Ensure the correct host URL is selected for auto_config on a platform RHSM upload ''' initConfig().get.side_effect = [ 'subscription.rhsm.redhat.com', '443', '', '', '', '', '' ] config = Mock(base_url=None, upload_url=None, legacy_upload=False) _try_satellite6_configuration(config) # set_auto_configuration.assert_called_with(config, 'cloud.redhat.com', None, None, False) set_auto_configuration.assert_called_with(config, 'cert-api.access.redhat.com', None, None, False, False)
def test_get_workflows_data_with_actions(self): workflow_json = { 'name': 'foo', 'active': True, 'actions': [{ 'cmd': 'something' }] } workflows = self.dal.get_workflows({'workflows': [workflow_json]}, Mock(id=1), Mock(id=2), Mock()) for key, value in { 'name': 'foo', 'active': True, "stage_id": 2, "order": 0, "actions": 1 }.items(): if key == 'actions': eq_(value, len(getattr(workflows[0], 'actions'))) else: eq_(value, getattr(workflows[0], key))
def _create_mock_oai_harvester_metadata_format_set(): """ Mock an OaiHarvesterMetadataFormatSet. Returns: OaiHarvesterMetadataFormatSet mock. """ mock_oai_harvester_metadata_format = Mock( spec=OaiHarvesterMetadataFormatSet) _set_oai_harvester_metadata_format_set_fields( mock_oai_harvester_metadata_format) return mock_oai_harvester_metadata_format
def _create_mock_template_version_manager(title="", versions=None, is_disabled=False, user_id=""): """ Returns a mock template version manager :return: """ if versions is None: versions = [] mock_template_version_manager = Mock(spec=TemplateVersionManager) mock_template_version_manager.title = title mock_template_version_manager.id = ObjectId() mock_template_version_manager.versions = versions mock_template_version_manager.disabled_versions = [] mock_template_version_manager.is_disabled = is_disabled mock_template_version_manager.user = str(user_id) mock_template_version_manager._cls = TemplateVersionManager.class_name return mock_template_version_manager
def test_file_result(get_branch_info, try_disk, raw_config_parser, data_collector, verify_permissions): """ Configuration from file is loaded from the "uploader.json" key. """ if six.PY3: open_name = 'builtins.open' else: open_name = '__builtin__.open' with patch(open_name, create=True) as mock_open: mock_open.side_effect = [ mock.mock_open(read_data='').return_value, mock.mock_open(read_data='').return_value, mock.mock_open( read_data='[remove]\nfiles=/etc/some_file,/tmp/another_file'). return_value ] raw_config_parser.side_effect = [ Mock(sections=Mock(return_value=['remove']), items=Mock( return_value=[('files', '/etc/some_file,/tmp/another_file')])) ] config = collect_args() collect(config) name, args, kwargs = try_disk.mock_calls[0] collection_rules = try_disk.return_value.copy() collection_rules.update({"file": args[0]}) rm_conf = {"files": removed_files} branch_info = get_branch_info.return_value data_collector.return_value.run_collection.assert_called_once_with( collection_rules, rm_conf, branch_info) data_collector.return_value.done.assert_called_once_with( collection_rules, rm_conf)
def test_osdisks_parsing(self, isfile_mock, chk_writable_mount_mock): df_output =\ """Filesystem Type 1024-blocks Used Available Capacity Mounted on /dev/mapper/docker-253:0-4980899-d45c264d37ab18c8ed14f890f4d59ac2b81e1c52919eb36a79419787209515f3 xfs 31447040 1282384 30164656 5% / tmpfs tmpfs 32938336 4 32938332 1% /dev tmpfs tmpfs 32938336 0 32938336 0% /sys/fs/cgroup /dev/mapper/fedora-root ext4 224161316 12849696 199901804 7% /etc/resolv.conf /dev/mapper/fedora-root ext4 224161316 12849696 199901804 7% /etc/hostname /dev/mapper/fedora-root ext4 224161316 12849696 199901804 7% /etc/hosts shm tmpfs 65536 0 65536 0% /dev/shm /dev/mapper/fedora-root ext4 224161316 12849696 199901804 7% /run/secrets """ def isfile_side_effect(path): assume_files = ["/etc/resolv.conf", "/etc/hostname", "/etc/hosts"] return path in assume_files def chk_writable_mount_side_effect(path): assume_read_only = ["/run/secrets"] return path not in assume_read_only isfile_mock.side_effect = isfile_side_effect chk_writable_mount_mock.side_effect = chk_writable_mount_side_effect with patch("subprocess.Popen") as open_mock: proc_mock = Mock() attr = {'communicate.return_value': [df_output]} proc_mock.configure_mock(**attr) open_mock.return_value = proc_mock result = Hardware.osdisks() self.assertEquals(1, len(result)) expected_mounts_left = ["/"] mounts_left = [item["mountpoint"] for item in result] self.assertEquals(expected_mounts_left, mounts_left)
def _create_mock_data_structure_element(): """ Returns a mock data structure element :return: """ mock_element = Mock(spec=DataStructureElement) mock_element.user = "******" mock_element.tag = "tag" mock_element.value = "value" mock_element.options = {} mock_element.children = [] return mock_element
def test_staff_returns_http_200( self, mock_oaipmhmessage_get_message_labelled, mock_oai_registry_api_harvest_registry, mock_oai_registry_api_get_by_id, mock_data_serializer_is_valid, mock_data_serializer_data, mock_oai_metadata_format_api_get_all_by_registry_id, mock_oai_set_api_get_all_by_registry_id, mock_oai_metadata_format_api_update_for_all_harvest_by_list_ids, mock_oai_set_api_update_for_all_harvest_by_list_ids, mock_queryset_values_list, ): # Arrange mock_oaipmhmessage_get_message_labelled.return_value = None mock_oai_registry_api_harvest_registry.return_value = [] mock_oai_registry_api_get_by_id.return_value = Mock(spec=OaiRegistry) mock_data_serializer_data.return_value = Mock(spec=HarvestSerializer) mock_data_serializer_is_valid.return_value = True mock_oai_metadata_format_api_get_all_by_registry_id.return_value = Mock( spec=QuerySet) mock_oai_set_api_get_all_by_registry_id.return_value = Mock( spec=QuerySet) mock_oai_metadata_format_api_update_for_all_harvest_by_list_ids.return_value = ( None) mock_oai_set_api_update_for_all_harvest_by_list_ids.return_value = None mock_queryset_values_list.return_value = None # Act response = RequestMock.do_request_put( rest_oai_registry.Harvest.as_view(), user=create_mock_user("1", is_staff=True), param=self.param, data=self.data, ) # Assert self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_get_workable_work_requests_verify_filters(self, get_db_session): action_dal = ActionDal() session = WrapperHelper() get_db_session.return_value = [session] action_instance = Mock() action_instance.serialize.return_value = {} pipeline_parameters = Mock() action_configs = Mock() session.results.append( (action_instance, pipeline_parameters, action_configs)) action_dal.get_workable_work_requests() eq_(8, len(session.filter_args)) filter_1 = session.filter_args[0] filter_2 = session.filter_args[1] filter_3 = session.filter_args[2] filter_4 = session.filter_args[3] filter_5 = session.filter_args[4] filter_6 = session.filter_args[5] filter_7 = session.filter_args[6] filter_8 = session.filter_args[7] eq_(ActionInstance.__table__.columns['status_id'], filter_1.left) eq_(StatusConstants.READY, filter_1.right.value) eq_(ActionInstance.__table__.columns['manual'], filter_2.left) eq_(0, filter_2.right.value) eq_(ActionInstance.__table__.columns['pipeline_instance_id'], filter_3.left) eq_(PipelineInstance.__table__.columns['id'], filter_3.right) eq_(PipelineInstance.__table__.columns['status_id'], filter_4.left) eq_(StatusConstants.INPROGRESS, filter_4.right.value)
def test_satellite_missed_queues_with_more_data(): host_uuids = Mock() host_uuids.content = HOST_UUIDS_2.splitlines() qpid_queues = Mock() qpid_queues.content = QPID_QUEUES.splitlines() messages = Mock() messages.stream = mock_stream broker = { Specs.messages: messages, LocalSpecs.content_host_uuids: host_uuids, LocalSpecs.qpid_queues: qpid_queues, } result = satellite_missed_pulp_agent_queues(broker) assert result is not None assert isinstance(result, DatasourceProvider) expected = DatasourceProvider(content=MISSED_QUEUES_OUTPUT_2.splitlines(), relative_path=RELATIVE_PATH) assert sorted(result.content) == sorted(expected.content) assert result.relative_path == expected.relative_path
def test_setup(self): # store originals self.setup = core.setup # modify core.setup = Mock() # sys.argv = ['plebnet', 'setup'] sys.argv = prep('plebnet setup') # run cmdline.execute() # test core.setup.assert_called() core.setup.assert_called_once_with(Namespace(test_net=False)) # restore core.setup = self.setup
def test_addToQueue(self, pformatMock): actionQueue = MagicMock() updateComponents = Mock() self.controller.actionQueue = actionQueue self.controller.updateComponents = updateComponents self.controller.addToQueue(None) self.assertFalse(actionQueue.put.called) self.assertFalse(updateComponents.called) commands = ambari_simplejson.loads('[{"clusterName":"dummy_cluster"}]') self.controller.addToQueue(commands) self.assertTrue(actionQueue.put.called) self.assertTrue(updateComponents.called)
def test_registration_check_parse_error(get_proxies, _init_session, _): ''' Can't parse response Returns None ''' config = Mock(legacy_upload=False, base_url='example.com') conn = InsightsConnection(config) res = requests.Response() res._content = 'zSDFasfghsRGH' res.status_code = 200 conn.get = MagicMock(return_value=res) assert conn.api_registration_check() is None
def test_registration_check_bad_res(get_proxies, _init_session, _): ''' Failure HTTP response Returns None ''' config = Mock(legacy_upload=False, base_url='example.com') conn = InsightsConnection(config) res = requests.Response() res._content = 'wakannai' res.status_code = 500 conn.get = MagicMock(return_value=res) assert conn.api_registration_check() is None
def test_registration_check_ok_reg(get_proxies, _init_session, _): ''' Request completed OK, registered Returns True ''' config = Mock(legacy_upload=True, base_url='example.com') conn = InsightsConnection(config) res = requests.Response() res._content = json.dumps({'unregistered_at': None}) res.status_code = 200 conn.get = MagicMock(return_value=res) assert conn.api_registration_check()
def collect_args(*insights_config_args, **insights_config_custom_kwargs): """ Instantiates InsightsConfig with a default logging_file argument. """ all_insights_config_kwargs = { "logging_file": "/tmp/insights.log", "remove_file": conf_remove_file, "redaction_file": conf_file_redaction_file, "content_redaction_file": conf_file_content_redaction_file, "core_collect": False } all_insights_config_kwargs.update(insights_config_custom_kwargs) return InsightsConfig(*insights_config_args, **all_insights_config_kwargs), Mock()
def test_rhsm_stage_legacy_base_url_configured(): ''' Ensure the correct base URL is assembled for a legacy staging RHSM upload This will still force legacy_upload=False as there is no classic staging env, so the result is the same as platform upload. ''' config = Mock(base_url=None, upload_url=None, legacy_upload=True, proxy=None) set_auto_configuration(config, 'cert.cloud.stage.redhat.com', None, None, False, True) assert config.base_url == 'cert.cloud.stage.redhat.com/api'
def test_execute_alert(self): execution_commands = [{ 'clusterName': 'cluster', 'hostName': 'host', 'alertDefinition': { 'name': 'alert1' } }] scheduler = AlertSchedulerHandler(TEST_PATH, TEST_PATH, TEST_PATH, TEST_PATH, None, None) alert_mock = MagicMock() alert_mock.collect = Mock() alert_mock.set_helpers = Mock() scheduler._AlertSchedulerHandler__json_to_callable = Mock( return_value=alert_mock) scheduler._AlertSchedulerHandler__config_maps = {'cluster': {}} scheduler.execute_alert(execution_commands) scheduler._AlertSchedulerHandler__json_to_callable.assert_called_with( 'cluster', 'host', {'name': 'alert1'}) self.assertTrue(alert_mock.collect.called)
def test_registration_check_ok_reg_then_unreg(get_proxies, _init_session, _): ''' Request completed OK, was once registered but has been unregistered Returns the date it was unregistered ''' config = Mock(legacy_upload=True, base_url='example.com') conn = InsightsConnection(config) res = requests.Response() res._content = json.dumps({'unregistered_at': '2019-04-10'}) res.status_code = 200 conn.get = MagicMock(return_value=res) assert conn.api_registration_check() == '2019-04-10'
def test_skipped_job_finalized(self): # Test that a skipped job has been finalized job_spec = get_jobspec() context = get_context() context.cm.run_config.bail_on_job_failure = True job1 = Job_force_retry(3, job_spec, 1, context) job2 = Job_force_retry(0, job_spec, 1, context) job1.workload = Mock() job2.workload = Mock() context.cm._joblist = [job1, job2] context.run_state.add_job(job1) context.run_state.add_job(job2) runner = Runner(context, MockProcessorManager()) try: runner.run() except ExecutionError: assert_equal(job2.finalized, True) else: assert False, "ExecutionError not raised"
def test_propose_multidist(self): eps = 1. threshold = [eps, eps] thetai = 1 postfn = lambda theta: thetai dist = Mock() distances = [[eps*2, eps*2], [eps/2, eps*2], [eps*2, eps/2], [eps, eps]] dist.side_effect = distances Y = None sampler = abcpmc.Sampler(2, Y, postfn, dist) thetas = np.array([[0.5], [1]]) weights = np.array([0.75, 0.25]) pool = abcpmc.sampler.PoolSpec(1, threshold, 1, thetas, None, weights) wrapper = abcpmc.sampler.ParticleProposal(sampler, threshold, pool, {}) _, _, cnt = wrapper(0) assert cnt == len(distances)
def test_certSigningFailed(self, dumpsMock, loadsMock, sleepMock, pformatMock): register = MagicMock() self.controller.register = register dumpsMock.return_value = "request" response = {"responseId":1,} loadsMock.return_value = response self.controller.sendRequest = Mock(side_effect=ssl.SSLError()) self.controller.repeatRegistration=True self.controller.registerWithServer() #Conroller thread and the agent stop if the repeatRegistration flag is False self.assertFalse(self.controller.repeatRegistration)
def test_rhsm_platform_base_url_configured(): ''' Ensure the correct base URL is assembled for a platform RHSM upload ''' config = Mock(base_url=None, upload_url=None, legacy_upload=False, proxy=None) # set_auto_configuration(config, 'cloud.redhat.com', None, None, False) # assert config.base_url == 'cloud.redhat.com/api' # [CIRCUS MUSIC] set_auto_configuration(config, 'cert-api.access.redhat.com', None, None, False, False) # assert config.base_url == 'cert-api.access.redhat.com/r/insights/platform' assert config.base_url == 'cert-api.access.redhat.com/r/insights'
def test_platform_path_added(): ''' Ensure /platform is added when legacy_upload is false Ensure it's not added when legacy_upload is true ''' # auto_config=True, legacy_upload=True config = Mock(base_url='test.satellite.com:443/redhat_access/r/insights', auto_config=True, legacy_upload=True, offline=False) try_auto_configuration(config) assert config.base_url == 'test.satellite.com:443/redhat_access/r/insights' # auto_config=True, legacy_upload=False config = Mock(base_url='test.satellite.com:443/redhat_access/r/insights', auto_config=True, legacy_upload=False, offline=False) try_auto_configuration(config) assert config.base_url == 'test.satellite.com:443/redhat_access/r/insights/platform' # auto_config=False, legacy_upload=True config = Mock(base_url='test.satellite.com:443/redhat_access/r/insights', auto_config=False, legacy_upload=True, offline=False) try_auto_configuration(config) assert config.base_url == 'test.satellite.com:443/redhat_access/r/insights' # auto_config=False, legacy_upload=False config = Mock(base_url='test.satellite.com:443/redhat_access/r/insights', auto_config=False, legacy_upload=False, offline=False) try_auto_configuration(config) assert config.base_url == 'test.satellite.com:443/redhat_access/r/insights/platform'
def test_download_subtitles(monkeypatch, tmpdir, video, tvshow, create_sub, create_tvepisode, p): # Given subtitles = [create_sub(language=code, id=sid, content=content) for sid, code, content in p['list_subtitles']] best_subtitles = [create_sub(language=code, id=sid, content=content) for sid, code, content in p['best_subtitles']] video_path = text_type(tmpdir.ensure(video.name)) tvepisode = create_tvepisode(series=tvshow, season=3, episode=4, subtitles=p['existing_subtitles']) external_subtitles = p['external_subtitles'] embedded_subtitles = p['embedded_subtitles'] if p['embedded_subtitles'] is not None else True refine = Mock() compute_score = Mock(return_value=1) list_subtitles = Mock(return_value=subtitles) download_best_subtitles = Mock(return_value=best_subtitles) popen = Mock() monkeypatch.setattr(app, 'SYS_ENCODING', 'utf-8') monkeypatch.setattr(app, 'SUBTITLES_MULTI', p['multiple_subtitles']) monkeypatch.setattr(app, 'SUBTITLES_LANGUAGES', p['wanted_languages']) monkeypatch.setattr(app, 'SUBTITLES_PRE_SCRIPTS', p['pre_scripts']) monkeypatch.setattr(app, 'SUBTITLES_EXTRA_SCRIPTS', p['post_scripts']) monkeypatch.setattr(app, 'SUBTITLES_HEARING_IMPAIRED', p['hearing_impaired']) monkeypatch.setattr(sut, 'refine', refine) monkeypatch.setattr(sut, 'compute_score', compute_score) monkeypatch.setattr(ProviderPool, 'list_subtitles', list_subtitles) monkeypatch.setattr(ProviderPool, 'download_best_subtitles', download_best_subtitles) monkeypatch.setattr(subprocess, 'Popen', popen) # When actual = sut.download_subtitles(tv_episode=tvepisode, video_path=video_path, subtitles=external_subtitles, embedded_subtitles=embedded_subtitles) # Then assert p['expected'] == actual if p['expected']: assert len(p['pre_scripts']) + len(p['post_scripts']) * len(p['best_subtitles']) == popen.call_count if refine.called: assert embedded_subtitles == refine.call_args[1]['embedded_subtitles'] assert tvepisode == refine.call_args[1]['tv_episode']
def test_sat_legacy_url(set_auto_configuration, initConfig): ''' Ensure the correct host URL is selected for auto_config on a legacy Sat upload ''' initConfig().get.side_effect = [ 'test.satellite.com', '443', '', '', '', '', 'test_cert' ] config = Mock(base_url=None, upload_url=None, legacy_upload=True, insecure_connection=False) _try_satellite6_configuration(config) set_auto_configuration.assert_called_with( config, 'test.satellite.com:443/redhat_access', 'test_cert', None, True, False)
def test_metrics_post_event_proxy(post, config_file_factory, rhsm_config_file_factory): config_file = config_file_factory("") rhsm_config_file = rhsm_config_file_factory( proxy_hostname="localhost", proxy_port=3128, proxy_user="******", proxy_password="******" ) metrics_client = MetricsHTTPClient(config_file=config_file.name, rhsm_config_file=rhsm_config_file.name) event = Mock() metrics_client.post(event) post.assert_called_once_with( "https://cert-api.access.redhat.com:443/redhat_access/r/insights/platform/module-update-router/v1/event", json=event, proxies={"https": "http://*****:*****@localhost:3128"}, )
def test_listdir(self, mock_list, mock_metadata, mock_expired): """Test listing a directory.""" file1 = Mock(spec=FileMetadata) file1.name = 'file1.txt' file2 = Mock(spec=FileMetadata) file2.name = 'file2.txt' folder1 = Mock(spec=FolderMetadata) folder1.name = 'more_files' entries = [ file1, file2, folder1, Mock(spec=DeletedMetadata), ] mock_metadata.return_value = Mock(FolderMetadata) mock_list.side_effect = [ ListFolderResult(entries=entries), ListFolderResult(entries=[]), ListFolderResult(entries=[Mock(FolderMetadata)]), ListFolderResult(entries=[Mock(FolderMetadata)]), ] mock_expired.side_effect = [False, False, True] children = self.fs.listdir('/files') self.assertIsInstance(children, list) self.assertEqual(3, len(children)) # Check that it cached the result children = self.fs.listdir('/files') self.assertEqual(1, mock_metadata.call_count) self.assertEqual(1, mock_list.call_count) self.assertIsInstance(children, list) self.assertEqual(3, len(children)) self.assertEqual('file1.txt', children[0]) self.assertEqual('file2.txt', children[1]) self.assertEqual('more_files', children[2]) self.fs.listdir('/folder') # Check that it cached the result but still updates with no children children = self.fs.listdir('/folder') self.assertEqual(3, mock_metadata.call_count) self.assertEqual(3, mock_list.call_count) self.assertEqual(1, len(children)) # Check that it cached the result but still updates since it expired children = self.fs.listdir('/folder') self.assertEqual(4, mock_metadata.call_count) self.assertEqual(4, mock_list.call_count) self.assertEqual(1, len(children))
def test_resolve_cpu(self): # Set up a mock target mock = Mock() mock.big_core = "A72" mock.little_core = "A53" mock.core_names = ['A72', 'A72', 'A53', 'A53'] mock.number_of_cpus = 4 def mock_core_cpus(core): return [i for i, c in enumerate(mock.core_names) if c == core] def mock_online_cpus(): return [0, 1, 2] def mock_offline_cpus(): return [3] def mock_related_cpus(core): if core in [0, 1]: return [0, 1] elif core in [2, 3]: return [2, 3] mock.list_online_cpus = mock_online_cpus mock.list_offline_cpus = mock_offline_cpus mock.core_cpus = mock_core_cpus mock.core_cpus = mock_core_cpus mock.cpufreq.get_related_cpus = mock_related_cpus # Check retrieving cpus from a given prefix assert_equal(resolve_cpus('A72', mock), [0, 1]) assert_equal(resolve_cpus('A53', mock), [2, 3]) assert_equal(resolve_cpus('big', mock), [0, 1]) assert_equal(resolve_cpus('little', mock), [2, 3]) assert_equal(resolve_cpus('', mock), [0, 1, 2, 3]) assert_equal(resolve_cpus('cpu0', mock), [0]) assert_equal(resolve_cpus('cpu3', mock), [3]) # Check get unique domain cpus assert_equal(resolve_unique_domain_cpus('A72', mock), [0]) assert_equal(resolve_unique_domain_cpus('A53', mock), [2]) assert_equal(resolve_unique_domain_cpus('big', mock), [0]) assert_equal(resolve_unique_domain_cpus('little', mock), [2]) assert_equal(resolve_unique_domain_cpus('', mock), [0, 2]) assert_equal(resolve_unique_domain_cpus('cpu0', mock), [0]) assert_equal(resolve_unique_domain_cpus('cpu3', mock), [2])
def test_circuitbreaker_recover_half_open(mock_remote: Mock): circuitbreaker = CircuitBreakerMonitor.get('threshold_3') # initial state: closed assert circuitbreaker.closed assert circuitbreaker.state == STATE_CLOSED # no exception -> success assert circuit_threshold_3_timeout_1() # from now all subsequent calls will fail mock_remote.side_effect = ConnectionError('Connection refused') # 1. failed call -> original exception with raises(ConnectionError): circuit_threshold_3_timeout_1() assert circuitbreaker.closed assert circuitbreaker.failure_count == 1 # 2. failed call -> original exception with raises(ConnectionError): circuit_threshold_3_timeout_1() assert circuitbreaker.closed assert circuitbreaker.failure_count == 2 # 3. failed call -> original exception with raises(ConnectionError): circuit_threshold_3_timeout_1() # Circuit breaker opens, threshold has been reached assert circuitbreaker.opened assert circuitbreaker.state == STATE_OPEN assert circuitbreaker.failure_count == 3 assert 0 < circuitbreaker.open_remaining <= 1 # 4. failed call -> not passed to function -> CircuitBreakerError with raises(CircuitBreakerError): circuit_threshold_3_timeout_1() assert circuitbreaker.opened assert circuitbreaker.failure_count == 3 assert 0 < circuitbreaker.open_remaining <= 1 # 5. failed call -> not passed to function -> CircuitBreakerError with raises(CircuitBreakerError): circuit_threshold_3_timeout_1() assert circuitbreaker.opened assert circuitbreaker.failure_count == 3 assert 0 < circuitbreaker.open_remaining <= 1 # wait for 1 second (recover timeout) sleep(1) # circuit half-open -> next call will be passed through assert not circuitbreaker.closed assert circuitbreaker.open_remaining < 0 assert circuitbreaker.state == STATE_HALF_OPEN # State half-open -> function is executed -> original exception with raises(ConnectionError): circuit_threshold_3_timeout_1() assert circuitbreaker.opened assert circuitbreaker.failure_count == 4 assert 0 < circuitbreaker.open_remaining <= 1 # State open > not passed to function -> CircuitBreakerError with raises(CircuitBreakerError): circuit_threshold_3_timeout_1()
def test_tickets(self): resp_mock = Mock() tickets.index(resp_mock) resp_mock.assert_called_once_with('/templates/ingressos.html')
def test_aboutUs(self): resp_mock = Mock() about.index(resp_mock) resp_mock.assert_called_once_with('/templates/aboutus.html')
def test_sucesso(self): resposta_mock = Mock() crud.foto(resposta_mock) resposta_mock.assert_called_once_with('/templates/foto.html')
def test_sucesso(self): resposta_mock = Mock() crud.como_chegar(resposta_mock) resposta_mock.assert_called_once_with('/templates/como_chegar.html')
def test_circuitbreaker_reopens_after_successful_calls(mock_remote: Mock): circuitbreaker = CircuitBreakerMonitor.get('threshold_2') assert str(circuitbreaker) == 'threshold_2' # initial state: closed assert circuitbreaker.closed assert circuitbreaker.state == STATE_CLOSED assert circuitbreaker.failure_count == 0 # successful call -> no exception assert circuit_threshold_2_timeout_1() # from now all subsequent calls will fail mock_remote.side_effect = ConnectionError('Connection refused') # 1. failed call -> original exception with raises(ConnectionError): circuit_threshold_2_timeout_1() assert circuitbreaker.closed assert circuitbreaker.failure_count == 1 # 2. failed call -> original exception with raises(ConnectionError): circuit_threshold_2_timeout_1() # Circuit breaker opens, threshold has been reached assert circuitbreaker.opened assert circuitbreaker.state == STATE_OPEN assert circuitbreaker.failure_count == 2 assert 0 < circuitbreaker.open_remaining <= 1 # 4. failed call -> not passed to function -> CircuitBreakerError with raises(CircuitBreakerError): circuit_threshold_2_timeout_1() assert circuitbreaker.opened assert circuitbreaker.failure_count == 2 assert 0 < circuitbreaker.open_remaining <= 1 # from now all subsequent calls will succeed mock_remote.side_effect = None # but recover timeout has not been reached -> still open # 5. failed call -> not passed to function -> CircuitBreakerError with raises(CircuitBreakerError): circuit_threshold_2_timeout_1() assert circuitbreaker.opened assert circuitbreaker.failure_count == 2 assert 0 < circuitbreaker.open_remaining <= 1 # wait for 1 second (recover timeout) sleep(1) # circuit half-open -> next call will be passed through assert not circuitbreaker.closed assert circuitbreaker.failure_count == 2 assert circuitbreaker.open_remaining < 0 assert circuitbreaker.state == STATE_HALF_OPEN # successful call assert circuit_threshold_2_timeout_1() # circuit closed and reset'ed assert circuitbreaker.closed assert circuitbreaker.state == STATE_CLOSED assert circuitbreaker.failure_count == 0 # some another successful calls assert circuit_threshold_2_timeout_1() assert circuit_threshold_2_timeout_1() assert circuit_threshold_2_timeout_1()
def test_cadastrar_produto(self): resposta_mock = Mock() crud.cadastrar_produto(resposta_mock) resposta_mock.assert_called_once_with('/templates/add_product.html')
def test_sucesso(self): handler = Mock() home.index(handler) handler.assert_called_once_with('/templates/index.html')
def test_vitrine(self): resposta_mock = Mock() crud.vitrine(resposta_mock) resposta_mock.assert_called_once_with('/templates/shop.html')