def test_harvest_records_returns_errors_if_transform_raises( self, mock_list_records, mock_transform_operations): """ Args: mock_list_records: mock_transform_operations: Returns: """ # Arrange resumption_token = None content = [] status_code = status.HTTP_200_OK mock_list_records.return_value = Response( content, status=status_code), resumption_token error_message = "Error" expected_error = [{ 'status_code': status.HTTP_400_BAD_REQUEST, 'error': error_message }] registry = Mock(spec=OaiRegistry()) registry.url = "dummy_url" metadata_format = Mock(spec=OaiHarvesterMetadataFormat()) metadata_format.metadata_prefix = "oai_dummy" last_update = registry_all_sets = None mock_transform_operations.side_effect = Exception(error_message) # Act result = oai_registry_api._harvest_records(registry, metadata_format, last_update, registry_all_sets) # Assert self.assertEquals(result, expected_error)
def test_update_metadata_format_serializer_invalid(self, mock_metadata_format): # Arrange mock_metadata_format.return_value = Mock( spec=OaiProviderMetadataFormat()) # Act response = RequestMock.do_request_patch( rest_oai_provider_metadata_format.MetadataFormatDetail.as_view(), user=create_mock_user("1", is_staff=True), data=self.bad_data, param=self.param, ) # Assert self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_rhsm_legacy_url(set_auto_configuration, initConfig): ''' Ensure the correct host URL is selected for auto_config on a legacy RHSM upload ''' initConfig().get.side_effect = [ 'subscription.rhsm.redhat.com', '443', '', '', '', '', '' ] config = Mock(base_url=None, upload_url=None, legacy_upload=True, insecure_connection=False) _try_satellite6_configuration(config) set_auto_configuration.assert_called_with(config, 'cert-api.access.redhat.com', None, None, False)
def test_download_subtitles(monkeypatch, tmpdir, video, tvshow, create_sub, create_tvepisode, p): # Given subtitles = [create_sub(language=code, id=sid, content=content) for sid, code, content in p['list_subtitles']] best_subtitles = [create_sub(language=code, id=sid, content=content) for sid, code, content in p['best_subtitles']] video_path = str(tmpdir.ensure(video.name)) tvepisode = create_tvepisode(show=tvshow, season=3, episode=4, subtitles=p['existing_subtitles']) external_subtitles = p['external_subtitles'] embedded_subtitles = p['embedded_subtitles'] if p['embedded_subtitles'] is not None else True refine = Mock() compute_score = Mock(return_value=1) list_subtitles = Mock(return_value=subtitles) download_best_subtitles = Mock(return_value=best_subtitles) popen = Mock() monkeypatch.setattr('medusa.SYS_ENCODING', 'utf-8') monkeypatch.setattr('medusa.SUBTITLES_MULTI', p['multiple_subtitles']) monkeypatch.setattr('medusa.SUBTITLES_LANGUAGES', p['wanted_languages']) monkeypatch.setattr('medusa.SUBTITLES_PRE_SCRIPTS', p['pre_scripts']) monkeypatch.setattr('medusa.SUBTITLES_EXTRA_SCRIPTS', p['post_scripts']) monkeypatch.setattr('medusa.SUBTITLES_HEARING_IMPAIRED', p['hearing_impaired']) monkeypatch.setattr('medusa.subtitles.refine', refine) monkeypatch.setattr('medusa.subtitles.compute_score', compute_score) monkeypatch.setattr('subliminal.core.ProviderPool.list_subtitles', list_subtitles) monkeypatch.setattr('subliminal.core.ProviderPool.download_best_subtitles', download_best_subtitles) monkeypatch.setattr('subprocess.Popen', popen) # When actual = sut.download_subtitles(tv_episode=tvepisode, video_path=video_path, subtitles=external_subtitles, embedded_subtitles=embedded_subtitles) # Then assert p['expected'] == actual if p['expected']: assert len(p['pre_scripts']) + len(p['post_scripts']) * len(p['best_subtitles']) == popen.call_count if refine.called: assert embedded_subtitles == refine.call_args[1]['embedded_subtitles'] assert tvepisode == refine.call_args[1]['tv_episode']
def test_get_stages_data_with_workflows(self): stage_json = { 'name': 'foostage', 'active': True, 'workflows': [{ 'name': 'foo', 'active': True, 'actions': [] }] } stages = self.dal.get_stages({'stages': [stage_json]}, Mock(id=1), Mock()) for key, value in { 'name': 'foostage', 'active': True, 'pipeline_id': 1, 'order': 0, 'workflows': 1 }.items(): if 'workflows' == key: eq_(value, len(getattr(stages[0], key))) else: eq_(value, getattr(stages[0], key))
def test_sat_platform_url(set_auto_configuration, initConfig): ''' Ensure the correct host URL is selected for auto_config on a platform Sat upload ''' initConfig().get.side_effect = [ 'test.satellite.com', '443', '', '', '', '', 'test_cert' ] config = Mock(base_url=None, upload_url=None, legacy_upload=False, insecure_connection=False) _try_satellite6_configuration(config) set_auto_configuration.assert_called_with( config, 'test.satellite.com:443/redhat_access', 'test_cert', None, True)
def test_cancel_pipeline_instance_returns_404_if_invalid( self, db_session, pipeline_instance_by_id): """ @rapid-unit Workflow:Cancel Pipeline Instance:Should indicate not found if instance not found :return: :rtype: """ pipeline_instance_by_id.return_value = None db_session.return_value = [Mock()] with self.assertRaises(InvalidObjectException) as exception: self.dal.cancel_pipeline_instance(12345) eq_(404, exception.exception.code) eq_("Pipeline Instance not found", exception.exception.description)
def test_platform_path_added_cloud_redhat(): ''' Ensure /platform is added when legacy_upload is false for any base_url ending in /r/insights, otherwise not added ''' # classic API config = Mock(base_url='cert-api.access.redhat.com/r/insights', auto_config=True, legacy_upload=False, offline=False) try_auto_configuration(config) assert config.base_url == 'cert-api.access.redhat.com/r/insights/platform' # satellite config = Mock(base_url='test.satellite.com:443/redhat_access/r/insights', auto_config=True, legacy_upload=False, offline=False) try_auto_configuration(config) assert config.base_url == 'test.satellite.com:443/redhat_access/r/insights/platform' # cloud.redhat.com compatibility layer - classic API hosted on c.rh.c config = Mock(base_url='cloud.redhat.com/r/insights', auto_config=True, legacy_upload=False, offline=False) try_auto_configuration(config) assert config.base_url == 'cloud.redhat.com/r/insights/platform' # cloud.redhat.com API directly connected config = Mock(base_url='cloud.redhat.com/api', auto_config=True, legacy_upload=False, offline=False) try_auto_configuration(config) assert config.base_url == 'cloud.redhat.com/api'
def test_rhsm_platform_base_url_configured(): ''' Ensure the correct base URL is assembled for a platform RHSM upload ''' config = Mock(base_url=None, upload_url=None, legacy_upload=False, insecure_connection=False, proxy=None) # set_auto_configuration(config, 'cloud.redhat.com', None, None, False) # assert config.base_url == 'cloud.redhat.com/api' # [CIRCUS MUSIC] set_auto_configuration(config, 'cert-api.access.redhat.com', None, None, False) assert config.base_url == 'cert-api.access.redhat.com/r/insights/platform'
def test_skipped_job_state(self): # Test, if the first job fails and the bail parameter set, # that the remaining jobs have status: SKIPPED job_spec = get_jobspec() context = get_context() context.cm.run_config.bail_on_job_failure = True job1 = Job_force_retry(3, job_spec, 1, context) job2 = Job(job_spec, 1, context) job1.workload = Mock() job2.workload = Mock() context.cm._joblist = [job1, job2] context.run_state.add_job(job1) context.run_state.add_job(job2) runner = Runner(context, MockProcessorManager()) try: runner.run() except ExecutionError: assert_equal(job2.status, Status.SKIPPED) else: assert False, "ExecutionError not raised"
def test_platform_path_added(): ''' Ensure /platform is added when legacy_upload is false Ensure it's not added when legacy_upload is true ''' # auto_config=True, legacy_upload=True config = Mock(base_url='test.satellite.com:443/redhat_access/r/insights', auto_config=True, legacy_upload=True, offline=False) try_auto_configuration(config) assert config.base_url == 'test.satellite.com:443/redhat_access/r/insights' # auto_config=True, legacy_upload=False config = Mock(base_url='test.satellite.com:443/redhat_access/r/insights', auto_config=True, legacy_upload=False, offline=False) try_auto_configuration(config) assert config.base_url == 'test.satellite.com:443/redhat_access/r/insights/platform' # auto_config=False, legacy_upload=True config = Mock(base_url='test.satellite.com:443/redhat_access/r/insights', auto_config=False, legacy_upload=True, offline=False) try_auto_configuration(config) assert config.base_url == 'test.satellite.com:443/redhat_access/r/insights' # auto_config=False, legacy_upload=False config = Mock(base_url='test.satellite.com:443/redhat_access/r/insights', auto_config=False, legacy_upload=False, offline=False) try_auto_configuration(config) assert config.base_url == 'test.satellite.com:443/redhat_access/r/insights/platform'
def test_certSigningFailed(self, dumpsMock, loadsMock, sleepMock, pformatMock): register = MagicMock() self.controller.register = register dumpsMock.return_value = "request" response = {"responseId":1,} loadsMock.return_value = response self.controller.sendRequest = Mock(side_effect=ssl.SSLError()) self.controller.repeatRegistration=True self.controller.registerWithServer() #Conroller thread and the agent stop if the repeatRegistration flag is False self.assertFalse(self.controller.repeatRegistration)
def test_template_to_metadata_format_mapping_xslt_impossible_temp_meta_form( self, mock_get_xslt, mock_get_template, mock_get_meta_form): # Arrange mock_get_xslt.return_value = Mock(spec=XslTransformation) mock_get_template.return_value = Mock(spec=Template) mock_metadata_format = OaiProviderMetadataFormat() # Metadata format is template mock_metadata_format.is_template = True mock_get_meta_form.return_value = mock_metadata_format # Act response = RequestMock.do_request_post( rest_oai_provider_metadata_format.TemplateMetadataFormatXSLT. as_view(), user=create_mock_user("1", is_staff=True), data=self.data, ) # Assert self.assertEqual( response.data["message"]["oai_metadata_format"], ["Impossible to map a XSLT to a template metadata format"], ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_insights_id_request(get_proxies, init_session, get_canonical_facts, generate_machine_id): """ A POST requests to the check-in endpoint is issued with correct headers and body containing only an Insights ID if Canonical Facts collection fails. """ config = Mock(base_url="www.example.com") connection = InsightsConnection(config) connection.checkin() expected_url = connection.inventory_url + "/hosts/checkin" expected_headers = {"Content-Type": "application/json"} expected_data = {"insights_id": generate_machine_id.return_value} init_session.return_value.post.assert_called_once_with( expected_url, headers=expected_headers, data=dumps(expected_data))
def test_normal_job_finalized(self): # Test that a job is initialized then finalized normally job_spec = get_jobspec() context = get_context() job = Job_force_retry(0, job_spec, 1, context) job.workload = Mock() context.cm._joblist = [job] context.run_state.add_job(job) runner = Runner(context, MockProcessorManager()) runner.run() assert_equal(job.initialized, True) assert_equal(job.finalized, True)
def test_job_retry_status(self): job_spec = get_jobspec() context = get_context() job = Job_force_retry(2, job_spec, 1, context) job.workload = Mock() context.cm._joblist = [job] context.run_state.add_job(job) verifier = lambda _: assert_equal(job.status, Status.PENDING) signal.connect(verifier, signal.JOB_RESTARTED) runner = Runner(context, MockProcessorManager()) runner.run() signal.disconnect(verifier, signal.JOB_RESTARTED)
def test_template_to_metadata_format_mapping_xslt_oai_xslt_template_not_found( self, mock_get_template, mock_get_by_id): # Arrange mock_get_template.return_value = Mock(spec=Template) mock_get_by_id.side_effect = exceptions.DoesNotExist("Error") # Act response = RequestMock.do_request_post( rest_oai_provider_metadata_format.TemplateMetadataFormatXSLT. as_view(), user=create_mock_user("1", is_staff=True), data=self.data, ) # Assert self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def _create_mock_template_version_manager(title="", versions=None, is_disabled=False, user_id=""): """ Returns a mock template version manager :return: """ if versions is None: versions = [] mock_template_version_manager = Mock(spec=TemplateVersionManager) mock_template_version_manager.title = title mock_template_version_manager.id = ObjectId() mock_template_version_manager.versions = versions mock_template_version_manager.disabled_versions = [] mock_template_version_manager.is_disabled = is_disabled mock_template_version_manager.user = str(user_id) mock_template_version_manager._cls = TemplateVersionManager.class_name return mock_template_version_manager
def test_get_command(self): """ rapid-unit: Rapid Client:Remote Execution:Will download remote file when remote: is used. :return: :rtype: """ work_request = WorkRequest({"action_instance_id": 1, "cmd": "/bin/sh", "executable": "{}trial.sh".format(Communication.REMOTE_FILE), "args": "2>&1"}) executor = Executor(work_request, None) communicator = Mock() communicator.get_downloaded_file_name.return_value = "/tmp/rapidci/workspace/trial.sh" eq_(["/bin/sh", "/tmp/rapidci/workspace/trial.sh"], executor.get_command(communicator))
def test_canonical_facts_request(get_proxies, post, get_canonical_facts): """ A POST requests to the check-in endpoint is issued with correct headers and body containing Canonical Facts. """ config = Mock(base_url="www.example.com") connection = InsightsConnection(config) connection.checkin() expected_url = connection.inventory_url + "/hosts/checkin" expected_headers = {"Content-Type": "application/json"} expected_data = get_canonical_facts.return_value post.assert_called_once_with(expected_url, headers=expected_headers, data=dumps(expected_data))
def test_add_block_to_filestream(self, file_contents, data, start_sentinel, end_sentinel, expected_result): new_file_contents = [] mocked_file = self.__mock_file(file_contents) mocked_file.return_value.write = Mock( side_effect=lambda s: new_file_contents.append(s) #pylint:disable=unnecessary-lambda ) with self.__with_mocked_file(mocked_file), Environment('/'): utilities.add_block_to_file('/test', data, None, start_sentinel, end_sentinel) new_file_contents = "".join(new_file_contents) self.assertEqual(new_file_contents, expected_result)
def test_subproc_calls_fgrep(tmpfile, Popen): ''' Verify that the sed command to remove passwords is called Verify that fgrep is called when patterns to exclude are present and regex == False ''' Popen.return_value.communicate = Mock(return_value=('test', None)) _process_content_redaction(test_file.name, ['test1', 'test2'], False) tmpfile.assert_called_once() tmpfile.return_value.write.assert_called_once_with('\n'.join(['test1', 'test2']).encode('utf-8')) tmpfile.return_value.flush.assert_called_once() Popen.assert_has_calls([ call(['sed', '-rf', constants.default_sed_file, test_file.name], stdout=PIPE), call(['grep', '-v', '-F', '-f', tmpfile.return_value.name], stdin=Popen.return_value.stdout, stdout=PIPE) ])
def test_col_sortable_filter_name(self): ctx = Mock() ctx.parent = {} self.request.args = {'foo': 'bar'} expected = '<a href="http://puncover.com?foo=bar&sort=name_asc" class="sortable">Name</a>' actual = renderers.col_sortable_filter(ctx, 'Name', True) self.assertEqual(expected, actual) # if current sort is ascending, # mark as sorted ascending and populate link for descending ctx.parent = {'sort': 'name_asc'} self.request.args = {'sort': 'foo'} expected = '<a href="http://puncover.com?sort=name_desc" class="sortable sort_asc_alpha">Name</a>' actual = renderers.col_sortable_filter(ctx, 'Name', True) self.assertEqual(expected, actual)
def _create_mock_bucket(types=None): """Returns a mock bucket Args: types: Returns: """ if types is None: types = [] mock_bucket = Mock(spec=Bucket) mock_bucket.label = 'bucket' mock_bucket.label = '#000000' mock_bucket.types = types return mock_bucket
def _create_mock_type(filename="", content="", is_disable=False): """Returns a mock type Args: filename: content: is_disable: Returns: """ mock_type = Mock(spec=Type) mock_type.filename = filename mock_type.content = content mock_type.id = ObjectId() mock_type.is_disabled = is_disable return mock_type
def test_reconciled_stages(self): stage_instance = StageInstance(id=2, status_id=StatusConstants.SUCCESS, stage_id=2) pipeline_instance = PipelineInstance( stage_instances=[ StageInstance(id=1, status_id=StatusConstants.SUCCESS), stage_instance ], pipeline=Pipeline(stages=[Stage(id=1), Stage(id=2)])) instance_workflow_engine = InstanceWorkflowEngine( Mock(), pipeline_instance) instance_workflow_engine._load_pipeline() instance_workflow_engine.complete_a_stage(2, StatusConstants.SUCCESS) eq_(StatusConstants.SUCCESS, pipeline_instance.status_id) ok_(pipeline_instance.end_date is not None)
def test_file_dialog_applies_filename_filters(self): no_ext_name = "test name without extension" # Set filter: filter = self.dialog.list_filters( )[0] #this is a *.txt filter, at least it should be... @ReservedAssignment self.dialog.set_filter(filter) # Mock current filename self.dialog.get_filename = Mock(return_value=no_ext_name) # Assert we are working with the filter we expect self.assertEqual(filter.get_name(), "Text File") # Assert name is changed correctly self.assertEqual("%s.txt" % no_ext_name, self.dialog.filename) self.dialog.get_filename.assert_called_once_with()
def test_new_particle_multidist(self): eps = 1. threshold = [eps, eps] prior = lambda: 1 thetai = 1 postfn = lambda theta: thetai dist = Mock() distances = [[eps * 2, eps * 2], [eps / 2, eps * 2], [eps * 2, eps / 2], [eps, eps]] dist.side_effect = distances Y = None wrapper = abcpmc.sampler._RejectionSamplingWrapper( threshold, prior, postfn, dist, Y) _, _, cnt = wrapper(0) assert cnt == len(distances)
def test_staff_returns_http_200(self, mock_oaipmhmessage_get_message_labelled, mock_oai_registry_api_upsert, mock_oai_registry_api_get_by_id): # Arrange mock_oaipmhmessage_get_message_labelled.return_value = None mock_oai_registry_api_upsert.return_value = None mock_oai_registry_api_get_by_id.return_value = Mock(spec=OaiRegistry) # Act response = RequestMock.do_request_patch( rest_oai_registry.DeactivateRegistry.as_view(), user=create_mock_user('1', is_staff=True), param=self.param) # Assert self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_resolve_cpu(self): # Set up a mock target mock = Mock() mock.big_core = "A72" mock.little_core = "A53" mock.core_names = ['A72', 'A72', 'A53', 'A53'] mock.number_of_cpus = 4 def mock_core_cpus(core): return [i for i, c in enumerate(mock.core_names) if c == core] def mock_online_cpus(): return [0, 1, 2] def mock_offline_cpus(): return [3] def mock_related_cpus(core): if core in [0, 1]: return [0, 1] elif core in [2, 3]: return [2, 3] mock.list_online_cpus = mock_online_cpus mock.list_offline_cpus = mock_offline_cpus mock.core_cpus = mock_core_cpus mock.core_cpus = mock_core_cpus mock.cpufreq.get_related_cpus = mock_related_cpus # Check retrieving cpus from a given prefix assert_equal(resolve_cpus('A72', mock), [0, 1]) assert_equal(resolve_cpus('A53', mock), [2, 3]) assert_equal(resolve_cpus('big', mock), [0, 1]) assert_equal(resolve_cpus('little', mock), [2, 3]) assert_equal(resolve_cpus('', mock), [0, 1, 2, 3]) assert_equal(resolve_cpus('cpu0', mock), [0]) assert_equal(resolve_cpus('cpu3', mock), [3]) # Check get unique domain cpus assert_equal(resolve_unique_domain_cpus('A72', mock), [0]) assert_equal(resolve_unique_domain_cpus('A53', mock), [2]) assert_equal(resolve_unique_domain_cpus('big', mock), [0]) assert_equal(resolve_unique_domain_cpus('little', mock), [2]) assert_equal(resolve_unique_domain_cpus('', mock), [0, 2]) assert_equal(resolve_unique_domain_cpus('cpu0', mock), [0]) assert_equal(resolve_unique_domain_cpus('cpu3', mock), [2])