def test_delete_with_scans(self): """Delete a Source used by a scan.""" cred = Credential(name='cred2', username='******', password='******') cred.save() source = Source(name='cred_source', source_type=Source.NETWORK_SOURCE_TYPE, hosts=['1.2.3.4']) source.save() source.credentials.add(cred) source.save() scan = Scan(name='test_scan', scan_type=ScanTask.SCAN_TYPE_CONNECT) scan.save() scan.sources.add(source) url = reverse('source-detail', args=(source.id,)) response = self.client.delete(url, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) response_json = response.json() self.assertEqual( response_json['detail'], messages.SOURCE_DELETE_NOT_VALID_W_SCANS) self.assertEqual(response_json['scans'][0]['name'], 'test_scan')
def test_format_source(self): """Test the format source method.""" start = datetime.now() source = Source( name='source1', hosts=json.dumps(['1.2.3.4']), source_type='network', port=22) source.save() end = datetime.now() scan_job, scan_task = create_scan_job(source) scan_task.update_stats('', sys_count=10, sys_scanned=9, sys_failed=1) scan_job.start_time = start scan_job.end_time = end scan_job.status = ScanTask.COMPLETED scan_job.save() serializer = SourceSerializer(source) json_source = serializer.data out = format_source(json_source) expected = {'id': 1, 'name': 'source1', 'source_type': 'network', 'port': 22, 'hosts': ['1.2.3.4'], 'connection': {'id': 1, 'start_time': start, 'end_time': end, 'status': 'completed', 'systems_count': 10, 'systems_scanned': 9, 'systems_failed': 1}} self.assertEqual(out, expected)
class ScanTaskTest(TestCase): """Test the basic ScanJob infrastructure.""" def setUp(self): """Create test setup.""" self.cred = Credential.objects.create(name='cred1', username='******', password='******', sudo_password=None, ssh_keyfile=None) self.cred_for_upload = self.cred.id self.source = Source(name='source1', source_type='network', port=22) self.source.save() self.source.credentials.add(self.cred) def test_successful_create(self): """Create a scan task and serialize it.""" task = ScanTask.objects.create(source=self.source, scan_type=ScanTask.SCAN_TYPE_CONNECT, status=ScanTask.PENDING) serializer = ScanTaskSerializer(task) json_task = serializer.data self.assertEqual( { 'source': 1, 'scan_type': ScanTask.SCAN_TYPE_CONNECT, 'status': 'pending' }, json_task)
def test_format_source(self): """Test the format source method.""" start = datetime.now() source = Source( name='source1', hosts=json.dumps(['1.2.3.4']), source_type='network', port=22) source.save() end = datetime.now() scan_job, scan_task = create_scan_job(source) scan_task.update_stats( '', sys_count=10, sys_scanned=9, sys_failed=1, sys_unreachable=0) scan_job.start_time = start scan_job.end_time = end scan_job.status = ScanTask.COMPLETED scan_job.save() source.most_recent_connect_scan = scan_job source.save() serializer = SourceSerializer(source) json_source = serializer.data out = format_source(json_source) # pylint: disable=line-too-long expected = {'id': 1, 'name': 'source1', 'source_type': 'network', 'port': 22, 'hosts': ['1.2.3.4'], 'connection': {'id': 1, 'start_time': start, 'end_time': end, 'systems_count': 10, 'systems_scanned': 9, 'systems_failed': 1, 'systems_unreachable': 0, 'status_details': {'job_status_message': 'Job is pending.'}, 'status': 'completed', 'source_systems_count': 10, 'source_systems_scanned': 9, 'source_systems_failed': 1, 'source_systems_unreachable': 0}} # noqa self.assertEqual(out, expected)
def _generate_sources(self): for i in range(10): lat = random.randint(-90, 90) + round(random.random(), 6) lng = random.randint(-180, 180) + round(random.random(), 6) source_type = random.choice(["surface", "ground", "frozen", "other"]) source_health = random.randint(0, 100) pathogen_pollution = random.randint(0,100) inorganic_pollution = random.randint(0,100) organic_pollution = random.randint(0,100) macroscopic_pollution = random.randint(0,100) thermal_pollution = random.randint(0,100) climate_condition = random.randint(0,100) depletion_risk = random.randint(0,100) stress = random.randint(0,100) new_source = Source( author = random.choice(IzumiUser.objects.all()), date_created = timezone.now(), last_updated = timezone.now(), latitude=lat, longitude=lng, source_type=source_type, pathogen_pollution = pathogen_pollution, inorganic_pollution = inorganic_pollution, organic_pollution = organic_pollution, macroscopic_pollution = macroscopic_pollution, thermal_pollution = thermal_pollution, climate_condition = climate_condition, depletion_risk = depletion_risk, stress = stress ) new_source.save()
class SatelliteFactoryTest(TestCase): """Tests Satellite factory functions.""" def setUp(self): """Create test case setup.""" self.cred = Credential(name='cred1', cred_type=Credential.SATELLITE_CRED_TYPE, username='******', password='******', become_password=None, become_method=None, become_user=None, ssh_keyfile=None) self.cred.save() self.source = Source(name='source1', port=443, hosts='["1.2.3.4"]') self.source.save() self.source.credentials.add(self.cred) self.scan_job, self.scan_task = create_scan_job( self.source, scan_type=ScanTask.SCAN_TYPE_CONNECT) def tearDown(self): """Cleanup test case setup.""" pass def test_create_sat_none(self): """Test the method to fail to create a Sat interface.""" satellite_version = None api_version = 1 api = create(satellite_version, api_version, self.scan_task) self.assertEqual(api, None) def test_create_sat5(self): """Test the method to create a Sat 5 interface.""" satellite_version = SATELLITE_VERSION_5 api_version = 1 api = create(satellite_version, api_version, self.scan_task) self.assertEqual(api.__class__, SatelliteFive) def test_create_sat6_v1(self): """Test the method to create a Sat 6 interface.""" satellite_version = SATELLITE_VERSION_6 api_version = 1 api = create(satellite_version, api_version, self.scan_task) self.assertEqual(api.__class__, SatelliteSixV1) def test_create_sat6_v2(self): """Test the method to create a Sat 6 interface.""" satellite_version = SATELLITE_VERSION_6 api_version = 2 api = create(satellite_version, api_version, self.scan_task) self.assertEqual(api.__class__, SatelliteSixV2) def test_create_sat6_unknown(self): """Test the method to create a Sat 6 interface.""" satellite_version = SATELLITE_VERSION_6 api_version = 9 api = create(satellite_version, api_version, self.scan_task) self.assertEqual(api, None)
def test_source_name_in_metadata(self): """Test that adding facts includes source_name in metadata.""" source = Source(name='source1', source_type='network', port=22) source.save() sourcetopass = {'source_id': 1, 'source_type': 'network'} fingerprint = {'metadata': {}} result = _process_network_fact(sourcetopass, fingerprint) self.assertEqual( result['metadata']['infrastructure_type']['source_name'], 'source1')
def save_sources(source_list): for source in source_list[u'sources']: data = { 'name' : source[r'name'], 'category' : source[u'category'], 'slug' : source[u'id'] } s = Source(**data) s.save()
class VCenterUtilsTest(TestCase): """Tests VCenter utils functions.""" def setUp(self): """Create test case setup.""" self.cred = Credential( name='cred1', username='******', password='******', become_password=None, ssh_keyfile=None) self.cred.save() options = SourceOptions(disable_ssl=True) options.save() self.source = Source( name='source1', port=22, hosts='["1.2.3.4"]') self.source.options = options self.source.save() self.source.credentials.add(self.cred) self.scan_task = ScanTask(scan_type=ScanTask.SCAN_TYPE_INSPECT, source=self.source, sequence_number=2) self.scan_task.save() self.scan_job = ScanJob(scan_type=ScanTask.SCAN_TYPE_INSPECT) self.scan_job.save() self.scan_job.tasks.add(self.scan_task) self.conn_results = JobConnectionResult() self.conn_results.save() self.scan_job.connection_results = self.conn_results self.scan_job.save() def tearDown(self): """Cleanup test case setup.""" pass def test_vcenter_connect(self): """Test the connection method.""" mock_vcenter = Mock() with patch('scanner.vcenter.utils.SmartConnectNoSSL', return_value=mock_vcenter) as mock_smart_connect: vcenter = vcenter_connect(self.scan_task) self.assertEqual(mock_vcenter, vcenter) mock_smart_connect.assert_called_once_with( host=ANY, user=ANY, pwd=ANY, port=ANY)
def test_cred_delete_with_source(self): """Tests delete when cred used by source.""" cred = Credential(name='cred2', username='******', password='******') cred.save() source = Source(name='cred_source', source_type=Source.NETWORK_SOURCE_TYPE, hosts=['1.2.3.4']) source.save() source.credentials.add(cred) source.save() url = reverse('cred-detail', args=(cred.pk, )) resp = self.client.delete(url, format='json') self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST) response_json = resp.json() self.assertEqual(response_json['detail'], messages.CRED_DELETE_NOT_VALID_W_SOURCES) self.assertEqual(response_json['sources'][0]['name'], 'cred_source')
def test_404_if_not_fingerprint_job(self): """Test report job status only returns merge jobs.""" source = Source(name='source1', hosts=json.dumps(['1.2.3.4']), source_type='network', port=22) source.save() scan_job, _ = create_scan_job(source, scan_type=ScanTask.SCAN_TYPE_INSPECT) url = '/api/v1/reports/merge/jobs/{}/'.format(scan_job.id) get_response = self.client.get(url) self.assertEqual(get_response.status_code, status.HTTP_404_NOT_FOUND) scan_job.scan_type = ScanTask.SCAN_TYPE_FINGERPRINT scan_job.save() url = '/api/v1/reports/merge/jobs/{}/'.format(scan_job.id) get_response = self.client.get(url) self.assertEqual(get_response.status_code, status.HTTP_200_OK)
def create_source(request): form = AddSourceForm(request.POST or None) if request.method == 'POST' and form.is_valid(): new_source = Source( author=IzumiUser.objects.get(user=request.user), date_created=timezone.now(), last_updated=timezone.now(), latitude=form.cleaned_data.get('latitude'), longitude=form.cleaned_data.get('longitude'), source_type=form.cleaned_data.get('source_type'), pathogen_pollution=form.cleaned_data.get('pathogen_pollution'), inorganic_pollution=form.cleaned_data.get('inorganic_pollution'), organic_pollution=form.cleaned_data.get('organic_pollution'), macroscopic_pollution=form.cleaned_data.get( 'macroscopic_pollution'), thermal_pollution=form.cleaned_data.get('thermal_pollution'), ) new_source.save() return HttpResponseRedirect('/') context = {'isLoggedIn': request.user.is_authenticated, 'form': form} return render(request, 'add.html', context)
def setUp(self): """Create test case setup.""" cred = Credential(name='cred1', username='******', password='******', become_password=None, ssh_keyfile=None) cred.save() source = Source(name='source1', port=22, hosts='["1.2.3.4"]') source.save() source.credentials.add(cred) self.scan_job, self.scan_task = create_scan_job( source, ScanTask.SCAN_TYPE_INSPECT) self.connect_scan_task = self.scan_task.prerequisites.first() self.connect_scan_task.update_stats('TEST_VC.', sys_count=5) self.connect_scan_task.complete() # Create task runner self.runner = InspectTaskRunner(scan_job=self.scan_job, scan_task=self.scan_task)
def setUp(self): """Create test case setup.""" self.cred = Credential( name='cred1', username='******', password='******', become_password=None, ssh_keyfile=None) self.cred.save() source = Source( name='source1', port=22, hosts='["1.2.3.4"]') source.save() source.credentials.add(self.cred) self.scan_job, self.scan_task = create_scan_job( source, ScanTask.SCAN_TYPE_CONNECT) # Create runner self.runner = ConnectTaskRunner(scan_job=self.scan_job, scan_task=self.scan_task)
class ScanTest(TestCase): """Test the basic ScanJob infrastructure.""" def setUp(self): """Create test setup.""" management.call_command('flush', '--no-input') self.cred = Credential.objects.create(name='cred1', username='******', password='******', become_password=None, ssh_keyfile=None) self.cred_for_upload = self.cred.id self.source = Source(name='source1', source_type='network', port=22) self.source.save() self.source2 = Source(name='source2', source_type='network', port=22) self.source2.save() self.source2.credentials.add(self.cred) self.concurrency = ScanOptions.get_default_forks() def create(self, data): """Call the create endpoint.""" url = reverse('scan-list') return self.client.post(url, json.dumps(data), 'application/json') def create_expect_400(self, data, expected_response): """We will do a lot of create tests that expect HTTP 400s.""" response = self.create(data) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) response_json = response.json() if response.status_code != status.HTTP_400_BAD_REQUEST: print('Cause of failure: ') print('expected: %s' % expected_response) print('actual: %s' % response_json) self.assertEqual(response_json, expected_response) def create_expect_201(self, data): """Create a scan, return the response as a dict.""" response = self.create(data) response_json = response.json() if response.status_code != status.HTTP_201_CREATED: print('Cause of failure: ') print(response_json) self.assertEqual(response.status_code, status.HTTP_201_CREATED) return response_json def test_successful_create(self): """A valid create request should succeed.""" data = { 'name': 'test', 'sources': [self.source.id], 'scan_type': ScanTask.SCAN_TYPE_CONNECT } response = self.create_expect_201(data) self.assertIn('id', response) def test_create_no_name(self): """A create request must have a name.""" self.create_expect_400({'sources': [self.source.id]}, {'name': ['This field is required.']}) def test_create_no_source(self): """A create request must have a source.""" self.create_expect_400({'name': 'test'}, {'sources': ['This field is required.']}) def test_create_invalid_scan_type(self): """A create request must have a valid scan_type.""" data = { 'name': 'test', 'sources': [self.source.id], 'scan_type': 'foo', 'options': { 'disabled_optional_products': { 'jboss_eap': True, 'jboss_fuse': True, 'jboss_brms': True, 'jboss_ws': True } } } self.create_expect_400( data, { 'scan_type': [ 'foo, is an invalid choice. ' 'Valid values are connect,inspect.' ] }) def test_create_blank_scan_type(self): """A create request must not have a blank scan_type.""" data = {'name': 'test', 'sources': [self.source.id], 'scan_type': ''} self.create_expect_400( data, { 'scan_type': [ 'This field may not be blank. ' 'Valid values are connect,inspect.' ] }) def test_create_invalid_srcs_type(self): """A create request must have integer ids.""" data = { 'name': 'test', 'sources': ['foo'], 'scan_type': ScanTask.SCAN_TYPE_CONNECT } self.create_expect_400( data, {'sources': ['Source identifiers must be integer values.']}) def test_create_invalid_srcs_id(self): """A create request must have vaild ids.""" data = { 'name': 'test', 'sources': [100000], 'scan_type': ScanTask.SCAN_TYPE_CONNECT } self.create_expect_400( data, { 'sources': ['Source with id=100000 could ' 'not be found in database.'] }) def test_create_default_host_type(self): """A valid create request should succeed with defaulted type.""" data = { 'name': 'test', 'sources': [self.source.id], 'options': { 'disabled_optional_products': { 'jboss_eap': True, 'jboss_fuse': True, 'jboss_brms': True, 'jboss_ws': True } } } response = self.create_expect_201(data) self.assertIn('id', response) self.assertIn('scan_type', response) self.assertEqual(response['scan_type'], ScanTask.SCAN_TYPE_INSPECT) def test_create_invalid_source(self): """The Source name must valid.""" self.create_expect_400({ 'name': 'test', 'sources': -1 }, {'sources': ['Expected a list of items but got type "int".']}) def test_create_invalid_forks(self): """Test valid number of forks.""" data = { 'name': 'test', 'sources': [self.source.id], 'options': { 'max_concurrency': -5, 'disabled_optional_products': { 'jboss_eap': True } } } self.create_expect_400( data, { 'options': { 'max_concurrency': ['Ensure this value is greater than or equal ' 'to 1.'] } }) def test_create_invalid_disable_optional_products_type(self): """Test invalid type for disabled_optional_products type.""" data = { 'name': 'test', 'sources': [self.source.id], 'options': { 'disabled_optional_products': 'foo' } } self.create_expect_400( data, { 'options': { 'disabled_optional_products': { 'non_field_errors': ['Invalid data. Expected a dictionary, but got str.'] } } }) def test_filtered_list(self): """List filtered Scan objects.""" data_default = {'name': 'test1', 'sources': [self.source.id]} data_discovery = { 'name': 'test2', 'sources': [self.source.id], 'scan_type': ScanTask.SCAN_TYPE_CONNECT } self.create_expect_201(data_default) self.create_expect_201(data_discovery) url = reverse('scan-list') response = self.client.get(url, {'scan_type': ScanTask.SCAN_TYPE_CONNECT}) self.assertEqual(response.status_code, status.HTTP_200_OK) content = response.json() results1 = [{ 'id': 2, 'name': 'test2', 'sources': [{ 'id': 1, 'name': 'source1', 'source_type': 'network' }], 'scan_type': 'connect' }] expected = { 'count': 1, 'next': None, 'previous': None, 'results': results1 } self.assertEqual(content, expected) def test_retrieve(self): """Get Scan details by primary key.""" data_discovery = { 'name': 'test', 'sources': [self.source.id], 'scan_type': ScanTask.SCAN_TYPE_CONNECT } initial = self.create_expect_201(data_discovery) url = reverse('scan-detail', args=(initial['id'], )) response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertIn('sources', response.json()) sources = response.json()['sources'] self.assertEqual(sources, [{ 'id': 1, 'name': 'source1', 'source_type': 'network' }]) def test_retrieve_bad_id(self): """Get Scan details by bad primary key.""" url = reverse('scan-detail', args=('string', )) response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_update(self): """Completely update a scan.""" data_discovery = { 'name': 'test', 'sources': [self.source.id], 'scan_type': ScanTask.SCAN_TYPE_CONNECT, 'options': { 'disabled_optional_products': { 'jboss_eap': True, 'jboss_fuse': True, 'jboss_brms': True, 'jboss_ws': True } } } initial = self.create_expect_201(data_discovery) data = { 'name': 'test2', 'sources': [self.source2.id], 'scan_type': ScanTask.SCAN_TYPE_INSPECT, 'options': { 'disabled_optional_products': { 'jboss_eap': False, 'jboss_fuse': True, 'jboss_brms': True, 'jboss_ws': True } } } url = reverse('scan-detail', args=(initial['id'], )) response = self.client.put(url, json.dumps(data), content_type='application/json', format='json') response_json = response.json() self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response_json.get('scan_type'), ScanTask.SCAN_TYPE_INSPECT) self.assertEqual(response_json.get('name'), 'test2') self.assertFalse(response_json.get('options').get('jboss_eap')) self.assertEqual(response_json.get('sources'), [self.source2.id]) def test_partial_update(self): """Test partial update a scan.""" data_discovery = { 'name': 'test', 'sources': [self.source.id], 'scan_type': ScanTask.SCAN_TYPE_CONNECT, 'options': { 'disabled_optional_products': { 'jboss_eap': True, 'jboss_fuse': True, 'jboss_brms': True, 'jboss_ws': True } } } initial = self.create_expect_201(data_discovery) data = {'scan_type': ScanTask.SCAN_TYPE_INSPECT} url = reverse('scan-detail', args=(initial['id'], )) response = self.client.patch(url, json.dumps(data), content_type='application/json', format='json') response_json = response.json() self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response_json.get('scan_type'), ScanTask.SCAN_TYPE_INSPECT) data = { 'name': 'test2', 'options': { 'disabled_optional_products': { 'jboss_eap': False, 'jboss_fuse': True, 'jboss_brms': True, 'jboss_ws': True } } } response = self.client.patch(url, json.dumps(data), content_type='application/json', format='json') response_json = response.json() self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response_json.get('name'), 'test2') self.assertFalse(response_json.get('options').get('jboss_eap')) def test_partial_update_retains(self): """Test partial update retains unprovided info.""" data_discovery = { 'name': 'test', 'sources': [self.source.id], 'scan_type': ScanTask.SCAN_TYPE_CONNECT, 'options': { 'disabled_optional_products': { 'jboss_eap': True, 'jboss_fuse': True, 'jboss_brms': True, 'jboss_ws': True } } } initial = self.create_expect_201(data_discovery) data = {'scan_type': ScanTask.SCAN_TYPE_INSPECT} url = reverse('scan-detail', args=(initial['id'], )) response = self.client.patch(url, json.dumps(data), content_type='application/json', format='json') response_json = response.json() self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response_json.get('scan_type'), ScanTask.SCAN_TYPE_INSPECT) data = { 'name': 'test2', 'options': { 'enabled_extended_product_search': { 'jboss_eap': False, 'jboss_fuse': False, 'jboss_brms': True, 'jboss_ws': True, 'search_directories': ['/foo/bar/'] } } } response = self.client.patch(url, json.dumps(data), content_type='application/json', format='json') response_json = response.json() options = { 'disabled_optional_products': { 'jboss_eap': True, 'jboss_fuse': True, 'jboss_brms': True, 'jboss_ws': True }, 'max_concurrency': self.concurrency, 'enabled_extended_product_search': { 'jboss_eap': False, 'jboss_fuse': False, 'jboss_brms': True, 'jboss_ws': True, 'search_directories': ['/foo/bar/'] } } self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response_json.get('name'), 'test2') self.assertEqual(response_json.get('options'), options) def test_partial_update_sources(self): """Test partial update on sources.""" data_discovery = { 'name': 'test', 'sources': [self.source.id], 'scan_type': ScanTask.SCAN_TYPE_CONNECT } initial = self.create_expect_201(data_discovery) data = {'scan_type': ScanTask.SCAN_TYPE_INSPECT} url = reverse('scan-detail', args=(initial['id'], )) response = self.client.patch(url, json.dumps(data), content_type='application/json', format='json') response_json = response.json() self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response_json.get('scan_type'), ScanTask.SCAN_TYPE_INSPECT) data = {'name': 'test2', 'sources': [self.source2.id]} response = self.client.patch(url, json.dumps(data), content_type='application/json', format='json') response_json = response.json() self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response_json.get('name'), 'test2') self.assertEqual(response_json.get('sources'), [self.source2.id]) def test_partial_update_enabled(self): """Test partial update retains unprovided info.""" data_discovery = { 'name': 'test', 'sources': [self.source.id], 'scan_type': ScanTask.SCAN_TYPE_CONNECT, 'options': { 'enabled_extended_product_search': { 'jboss_eap': False, 'jboss_fuse': False, 'jboss_brms': True, 'jboss_ws': True } } } initial = self.create_expect_201(data_discovery) data = {'scan_type': ScanTask.SCAN_TYPE_INSPECT} url = reverse('scan-detail', args=(initial['id'], )) response = self.client.patch(url, json.dumps(data), content_type='application/json', format='json') response_json = response.json() self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response_json.get('scan_type'), ScanTask.SCAN_TYPE_INSPECT) data = { 'name': 'test2', 'options': { 'enabled_extended_product_search': { 'jboss_ws': False, 'search_directories': ['/foo/bar/'] } } } response = self.client.patch(url, json.dumps(data), content_type='application/json', format='json') response_json = response.json() options = { 'max_concurrency': self.concurrency, 'enabled_extended_product_search': { 'jboss_eap': False, 'jboss_fuse': False, 'jboss_brms': True, 'jboss_ws': False, 'search_directories': ['/foo/bar/'] } } self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response_json.get('name'), 'test2') self.assertEqual(response_json.get('options'), options) def test_partial_update_scan_type(self): """Test partial update retains unprovided info.""" data_discovery = { 'name': 'test', 'sources': [self.source.id], 'scan_type': ScanTask.SCAN_TYPE_CONNECT, 'options': { 'enabled_extended_product_search': { 'jboss_eap': False, 'jboss_fuse': False, 'jboss_brms': True, 'jboss_ws': True } } } initial = self.create_expect_201(data_discovery) url = reverse('scan-detail', args=(initial['id'], )) # test with extended options data = { 'name': 'test2', 'scan_type': ScanTask.SCAN_TYPE_INSPECT, 'options': { 'enabled_extended_product_search': { 'jboss_ws': False, 'search_directories': ['/foo/bar/'] } } } response = self.client.patch(url, json.dumps(data), content_type='application/json', format='json') response_json = response.json() options = { 'max_concurrency': self.concurrency, 'enabled_extended_product_search': { 'jboss_eap': False, 'jboss_fuse': False, 'jboss_brms': True, 'jboss_ws': False, 'search_directories': ['/foo/bar/'] } } self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response_json.get('name'), 'test2') self.assertEqual(response_json.get('options'), options) self.assertEqual(response_json.get('scan_type'), ScanTask.SCAN_TYPE_INSPECT) # test with max concurrency & scan type data = { 'name': 'test2', 'scan_type': ScanTask.SCAN_TYPE_CONNECT, 'options': { 'max_concurrency': 40 } } response = self.client.patch(url, json.dumps(data), content_type='application/json', format='json') response_json = response.json() options = { 'max_concurrency': 40, 'enabled_extended_product_search': { 'jboss_eap': False, 'jboss_fuse': False, 'jboss_brms': True, 'jboss_ws': False, 'search_directories': ['/foo/bar/'] } } self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response_json.get('name'), 'test2') self.assertEqual(response_json.get('options'), options) self.assertEqual(response_json.get('scan_type'), ScanTask.SCAN_TYPE_CONNECT) def test_expand_scan(self): """Test view expand_scan.""" scan_job, scan_task = create_scan_job( self.source, scan_type=ScanTask.SCAN_TYPE_INSPECT) scan_task.update_stats('TEST_VC.', sys_count=2, sys_failed=1, sys_scanned=1, sys_unreachable=0) serializer = ScanSerializer(scan_job.scan) json_scan = serializer.data json_scan = expand_scan(json_scan) self.assertEqual( json_scan.get('sources').first().get('name'), 'source1') self.assertEqual( json_scan.get('most_recent'), { 'id': 1, 'scan_type': 'inspect', 'status': 'pending', 'status_details': { 'job_status_message': 'Job is pending.' } }) def test_delete(self): """Delete a scan.""" data_discovery = { 'name': 'test', 'sources': [self.source.id], 'scan_type': ScanTask.SCAN_TYPE_CONNECT, 'options': { 'disabled_optional_products': { 'jboss_eap': True, 'jboss_fuse': True, 'jboss_brms': True, 'jboss_ws': True } } } response = self.create_expect_201(data_discovery) url = reverse('scan-detail', args=(response['id'], )) response = self.client.delete(url, format='json') self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) def test_get_extra_vars_missing_options(self): """Tests the get_default_extra_vars.""" extra_vars = ScanOptions.get_default_extra_vars() expected_vars = { 'jboss_eap': True, 'jboss_fuse': True, 'jboss_brms': True, 'jboss_ws': True, 'jboss_eap_ext': False, 'jboss_fuse_ext': False, 'jboss_brms_ext': False, 'jboss_ws_ext': False } self.assertEqual(extra_vars, expected_vars)
class ScanTaskTest(TestCase): """Test the basic ScanJob infrastructure.""" def setUp(self): """Create test setup.""" self.cred = Credential.objects.create(name='cred1', username='******', password='******', become_password=None, ssh_keyfile=None) self.cred_for_upload = self.cred.id self.source = Source(name='source1', source_type='network', port=22) self.source.save() self.source.credentials.add(self.cred) def test_successful_create(self): """Create a scan task and serialize it.""" task = ScanTask.objects.create(source=self.source, scan_type=ScanTask.SCAN_TYPE_CONNECT, status=ScanTask.PENDING) serializer = ScanTaskSerializer(task) json_task = serializer.data self.assertEqual( { 'source': 1, 'scan_type': ScanTask.SCAN_TYPE_CONNECT, 'status': 'pending', 'status_message': messages.ST_STATUS_MSG_PENDING }, json_task) def test_successful_start(self): """Create a scan task and start it.""" task = ScanTask.objects.create(source=self.source, scan_type=ScanTask.SCAN_TYPE_CONNECT, status=ScanTask.PENDING) start_time = datetime.utcnow() task.start() task.save() self.assertEqual(messages.ST_STATUS_MSG_RUNNING, task.status_message) self.assertEqual(task.status, ScanTask.RUNNING) self.assertEqual(start_time.replace(microsecond=0), task.start_time.replace(microsecond=0)) def test_successful_restart(self): """Create a scan task and restart it.""" task = ScanTask.objects.create(source=self.source, scan_type=ScanTask.SCAN_TYPE_CONNECT, status=ScanTask.PENDING) task.restart() task.save() self.assertEqual(messages.ST_STATUS_MSG_RESTARTED, task.status_message) self.assertEqual(task.status, ScanTask.PENDING) def test_successful_pause(self): """Create a scan task and pause it.""" task = ScanTask.objects.create(source=self.source, scan_type=ScanTask.SCAN_TYPE_CONNECT, status=ScanTask.PENDING) task.pause() task.save() self.assertEqual(messages.ST_STATUS_MSG_PAUSED, task.status_message) self.assertEqual(task.status, ScanTask.PAUSED) def test_successful_cancel(self): """Create a scan task and cancel it.""" task = ScanTask.objects.create(source=self.source, scan_type=ScanTask.SCAN_TYPE_CONNECT, status=ScanTask.PENDING) end_time = datetime.utcnow() task.cancel() task.save() self.assertEqual(messages.ST_STATUS_MSG_CANCELED, task.status_message) self.assertEqual(task.status, ScanTask.CANCELED) self.assertEqual(end_time.replace(microsecond=0), task.end_time.replace(microsecond=0)) def test_successful_complete(self): """Create a scan task and complete it.""" task = ScanTask.objects.create(source=self.source, scan_type=ScanTask.SCAN_TYPE_CONNECT, status=ScanTask.PENDING) end_time = datetime.utcnow() task.complete('great') task.save() self.assertEqual('great', task.status_message) self.assertEqual(task.status, ScanTask.COMPLETED) self.assertEqual(end_time.replace(microsecond=0), task.end_time.replace(microsecond=0)) def test_scantask_fail(self): """Create a scan task and fail it.""" task = ScanTask.objects.create(source=self.source, scan_type=ScanTask.SCAN_TYPE_CONNECT, status=ScanTask.PENDING) # pylint: disable=invalid-name MSG = 'Test Fail.' end_time = datetime.utcnow() task.fail(MSG) task.save() self.assertEqual(MSG, task.status_message) self.assertEqual(task.status, ScanTask.FAILED) self.assertEqual(end_time.replace(microsecond=0), task.end_time.replace(microsecond=0)) def test_scantask_increment(self): """Test scan task increment feature.""" task = ScanTask.objects.create(source=self.source, scan_type=ScanTask.SCAN_TYPE_CONNECT, status=ScanTask.PENDING) # pylint: disable=invalid-name task.save() task.increment_stats('foo', increment_sys_count=True, increment_sys_scanned=True, increment_sys_failed=True, increment_sys_unreachable=True) self.assertEqual(1, task.systems_count) self.assertEqual(1, task.systems_scanned) self.assertEqual(1, task.systems_failed) self.assertEqual(1, task.systems_unreachable) task.increment_stats('foo', increment_sys_count=True, increment_sys_scanned=True, increment_sys_failed=True, increment_sys_unreachable=True) self.assertEqual(2, task.systems_count) self.assertEqual(2, task.systems_scanned) self.assertEqual(2, task.systems_failed) self.assertEqual(2, task.systems_unreachable) def test_calculate_counts(self): """Test calculate counts.""" task = ScanTask.objects.create(source=self.source, scan_type=ScanTask.SCAN_TYPE_CONNECT, status=ScanTask.PENDING) # pylint: disable=invalid-name task.save() task.increment_stats('foo', increment_sys_count=True, increment_sys_scanned=True, increment_sys_failed=True, increment_sys_unreachable=True) systems_count,\ systems_scanned,\ systems_failed,\ systems_unreachable = task.calculate_counts() self.assertEqual(systems_count, 1) self.assertEqual(systems_scanned, 1) self.assertEqual(systems_failed, 1) self.assertEqual(systems_unreachable, 1)
class InspectTaskRunnerTest(TestCase): """Tests Satellite connect capabilities.""" def setUp(self): """Create test case setup.""" self.cred = Credential(name='cred1', cred_type=Credential.SATELLITE_CRED_TYPE, username='******', password='******') self.cred.save() self.source = Source(name='source1', port=443, hosts='["1.2.3.4"]') self.source.save() self.source.credentials.add(self.cred) def tearDown(self): """Cleanup test case setup.""" pass def create_scan_job(self): """Create scan job for tests.""" scan_job, inspect_task = create_scan_job(self.source, ScanTask.SCAN_TYPE_INSPECT) inspect_task.update_stats('TEST_SAT.', sys_scanned=0) return scan_job, inspect_task def test_run_failed_prereq(self): """Test the running connect task with no source options.""" scan_job, inspect_task = self.create_scan_job() connect_task = inspect_task.prerequisites.first() connect_task.status = ScanTask.FAILED connect_task.save() task = InspectTaskRunner(scan_job, inspect_task) status = task.run(Value('i', ScanJob.JOB_RUN)) self.assertEqual(status[1], ScanTask.FAILED) def test_run_sat5_bad_status(self): """Test the running connect task for Satellite 5.""" scan_job, inspect_task = self.create_scan_job() task = InspectTaskRunner(scan_job, inspect_task) with patch('scanner.satellite.connect.utils.status', return_value=(401, None, SATELLITE_VERSION_5)) as mock_sat_status: status = task.run(Value('i', ScanJob.JOB_RUN)) mock_sat_status.assert_called_once_with(ANY) self.assertEqual(status[1], ScanTask.FAILED) def test_run_sat6_bad_status(self): """Test the running connect task for Sat 6 with bad status.""" scan_job, inspect_task = self.create_scan_job() task = InspectTaskRunner(scan_job, inspect_task) with patch('scanner.satellite.connect.utils.status', return_value=(401, None, SATELLITE_VERSION_6)) as mock_sat_status: status = task.run(Value('i', ScanJob.JOB_RUN)) mock_sat_status.assert_called_once_with(ANY) self.assertEqual(status[1], ScanTask.FAILED) def test_run_sat6_bad_api_version(self): """Test the running connect task for Sat6 with bad api version.""" scan_job, inspect_task = self.create_scan_job() task = InspectTaskRunner(scan_job, inspect_task) with patch('scanner.satellite.connect.utils.status', return_value=(200, 3, SATELLITE_VERSION_6)) as mock_sat_status: status = task.run(Value('i', ScanJob.JOB_RUN)) mock_sat_status.assert_called_once_with(ANY) self.assertEqual(status[1], ScanTask.FAILED) def test_run_with_conn_err(self): """Test the running connect task with connection error.""" scan_job, inspect_task = self.create_scan_job() task = InspectTaskRunner(scan_job, inspect_task) with patch('scanner.satellite.connect.utils.status', side_effect=mock_conn_exception) as mock_sat_status: status = task.run(Value('i', ScanJob.JOB_RUN)) mock_sat_status.assert_called_once_with(ANY) self.assertEqual(status[1], ScanTask.FAILED) def test_run_with_auth_err(self): """Test the running connect task with satellite auth error.""" scan_job, inspect_task = self.create_scan_job() task = InspectTaskRunner(scan_job, inspect_task) with patch('scanner.satellite.connect.utils.status', side_effect=mock_sat_auth_exception) as mock_sat_status: status = task.run(Value('i', ScanJob.JOB_RUN)) mock_sat_status.assert_called_once_with(ANY) self.assertEqual(status[1], ScanTask.FAILED) def test_run_with_sat_err(self): """Test the running connect task with satellite error.""" scan_job, inspect_task = self.create_scan_job() task = InspectTaskRunner(scan_job, inspect_task) with patch('scanner.satellite.connect.utils.status', side_effect=mock_sat_exception) as mock_sat_status: status = task.run(Value('i', ScanJob.JOB_RUN)) mock_sat_status.assert_called_once_with(ANY) self.assertEqual(status[1], ScanTask.FAILED) def test_run_with_timeout(self): """Test the running connect task with timeout error.""" scan_job, inspect_task = self.create_scan_job() task = InspectTaskRunner(scan_job, inspect_task) with patch('scanner.satellite.connect.utils.status', side_effect=mock_timeout_error) as mock_sat_status: status = task.run(Value('i', ScanJob.JOB_RUN)) mock_sat_status.assert_called_once_with(ANY) self.assertEqual(status[1], ScanTask.FAILED) def test_run_with_excep(self): """Test the running connect task with general exception.""" scan_job, inspect_task = self.create_scan_job() task = InspectTaskRunner(scan_job, inspect_task) with patch('scanner.satellite.connect.utils.status', side_effect=mock_exception) as mock_sat_status: with self.assertRaises(Exception): status = task.run(Value('i', ScanJob.JOB_RUN)) mock_sat_status.assert_called_once_with(ANY) self.assertEqual(status[1], ScanTask.FAILED) def test_run_with_sat(self): """Test the running connect task with satellite.""" scan_job, inspect_task = self.create_scan_job() task = InspectTaskRunner(scan_job, inspect_task) with patch('scanner.satellite.connect.utils.status', return_value=(200, 2, SATELLITE_VERSION_6)) as mock_sat_status: with patch.object(SatelliteSixV2, 'hosts_facts') as mock_facts: status = task.run(Value('i', ScanJob.JOB_RUN)) mock_sat_status.assert_called_once_with(ANY) mock_facts.assert_called_once_with(ANY) self.assertEqual(status[1], ScanTask.COMPLETED) def test_run_with_sat_cancel(self): """Test the running connect task with satellite cancelled.""" scan_job, inspect_task = self.create_scan_job() task = InspectTaskRunner(scan_job, inspect_task) status = task.run(Value('i', ScanJob.JOB_TERMINATE_CANCEL)) self.assertEqual(status[1], ScanTask.CANCELED) def test_run_with_sat_pause(self): """Test the running connect task with satellite paused.""" scan_job, inspect_task = self.create_scan_job() task = InspectTaskRunner(scan_job, inspect_task) status = task.run(Value('i', ScanJob.JOB_TERMINATE_PAUSE)) self.assertEqual(status[1], ScanTask.PAUSED)
class SatelliteSixV2Test(TestCase): """Tests Satellite 6 v2 functions.""" def setUp(self): """Create test case setup.""" self.cred = Credential(name='cred1', cred_type=Credential.SATELLITE_CRED_TYPE, username='******', password='******', become_password=None, become_method=None, become_user=None, ssh_keyfile=None) self.cred.save() self.source = Source(name='source1', port=443, hosts='["1.2.3.4"]') self.source.save() self.source.credentials.add(self.cred) self.scan_job, self.scan_task = create_scan_job( self.source, ScanTask.SCAN_TYPE_INSPECT) self.scan_task.update_stats('TEST_SAT.', sys_scanned=0) self.api = SatelliteSixV2(self.scan_task) def tearDown(self): """Cleanup test case setup.""" pass def test_host_count(self): """Test the method host_count.""" hosts_url = 'https://{sat_host}:{port}/api/v2/hosts' with requests_mock.Mocker() as mocker: url = construct_url(url=hosts_url, sat_host='1.2.3.4', org_id=1) jsonresult = { 'results': [{ 'name': 'sys1' }, { 'name': 'sys2' }, { 'name': 'sys3' }], 'per_page': 100, 'total': 3 } mocker.get(url, status_code=200, json=jsonresult) systems_count = self.api.host_count() self.assertEqual(systems_count, 3) def test_host_count_with_err(self): """Test the method host_count with error.""" hosts_url = 'https://{sat_host}:{port}/api/v2/hosts' with requests_mock.Mocker() as mocker: url = construct_url(url=hosts_url, sat_host='1.2.3.4', org_id=1) jsonresult = { 'results': [{ 'name': 'sys1' }, { 'name': 'sys2' }, { 'name': 'sys3' }], 'per_page': 100, 'total': 3 } mocker.get(url, status_code=500, json=jsonresult) with self.assertRaises(SatelliteException): self.api.host_count() def test_hosts(self): """Test the method hosts.""" hosts_url = 'https://{sat_host}:{port}/api/v2/hosts' with requests_mock.Mocker() as mocker: url = construct_url(url=hosts_url, sat_host='1.2.3.4', org_id=1) jsonresult = { 'results': [{ 'name': 'sys1' }, { 'name': 'sys2' }, { 'name': 'sys3' }], 'per_page': 100, 'total': 3 } mocker.get(url, status_code=200, json=jsonresult) systems_count = self.api.host_count() hosts = self.api.hosts() self.assertEqual(systems_count, 3) self.assertEqual(len(hosts), 3) self.assertEqual(hosts, ['sys1', 'sys2', 'sys3']) def test_hosts_with_err(self): """Test the method hosts with error.""" hosts_url = 'https://{sat_host}:{port}/api/v2/hosts' with requests_mock.Mocker() as mocker: url = construct_url(url=hosts_url, sat_host='1.2.3.4', org_id=1) jsonresult = { 'results': [{ 'name': 'sys1' }, { 'name': 'sys2' }, { 'name': 'sys3' }], 'per_page': 100, 'total': 3 } mocker.get(url, status_code=500, json=jsonresult) with self.assertRaises(SatelliteException): self.api.hosts() def test_host_fields_with_err(self): """Test the method host_fields with error.""" host_field_url = 'https://{sat_host}:{port}/api/v2/hosts/{host_id}' with requests_mock.Mocker() as mocker: url = construct_url(url=host_field_url, sat_host='1.2.3.4', host_id=1) mocker.get(url, status_code=500) with self.assertRaises(SatelliteException): host_fields(self.scan_task, 2, host_field_url, None, 1) def test_host_fields(self): """Test the method host_fields.""" host_field_url = 'https://{sat_host}:{port}/api/v2/hosts/{host_id}' with requests_mock.Mocker() as mocker: url = construct_url(url=host_field_url, sat_host='1.2.3.4', host_id=1) jsonresult = { 'architecture_id': 1, 'architecture_name': 'x86_64', 'operatingsystem_name': 'RedHat 7.4', 'uuid': None, 'created_at': '2017-12-04 13:19:57 UTC', 'updated_at': '2017-12-04 13:21:47 UTC', 'organization_name': 'ACME', 'location_name': 'Raleigh', 'name': 'mac52540071bafe.prov.lan', 'virtual_host': { 'uuid': '100', 'name': 'vhost1' }, 'virtual_guests': [{ 'name': 'foo' }], 'content_facet_attributes': { 'id': 11, 'katello_agent_installed': False }, 'subscription_facet_attributes': { 'uuid': '00c7a108-48ec-4a97-835c-aa3369777f64', 'last_checkin': '2018-01-04 17:36:07 UTC', 'registered_at': '2017-12-04 13:33:52 UTC', 'registered_by': 'sat-r220-07.lab.eng.rdu2.redhat.com', 'virtual_host': { 'uuid': '100', 'name': 'vhost1' }, 'virtual_guests': [{ 'name': 'foo' }], }, 'facts': { 'memorysize_mb': '992.45', 'memorysize': '992.45 MB', 'hostname': 'fdi', 'type': 'Other', 'architecture': 'x86_64', 'is_virtual': 'true', 'virtual': 'kvm', 'net::interface::ipv4_address': '192.168.99.123', 'net::interface::mac_address': 'fe80::5054:ff:fe24:946e', }, } mocker.get(url, status_code=200, json=jsonresult) host_info = host_fields(self.scan_task, 2, host_field_url, None, 1) expected = { 'uuid': '00c7a108-48ec-4a97-835c-aa3369777f64', 'hostname': 'mac52540071bafe.prov.lan', 'registered_by': 'sat-r220-07.lab.eng.rdu2.redhat.com', 'registration_time': '2017-12-04 13:33:52 UTC', 'last_checkin_time': '2018-01-04 17:36:07 UTC', 'katello_agent_installed': False, 'os_release': 'RedHat 7.4', 'organization': 'ACME', 'virtual_host': '100', 'virtual_host_name': 'vhost1', 'virt_type': None, 'kernel_version': None, 'architecture': None, 'is_virtualized': None, 'cores': None, 'num_sockets': None, 'num_virtual_guests': 1, 'virtual': 'hypervisor', 'location': 'Raleigh', 'ip_addresses': ['192.168.99.123'], 'mac_addresses': ['fe80::5054:ff:fe24:946e'], 'os_name': 'RedHat', 'os_version': '7.4' } self.assertEqual(host_info, expected) def test_host_subs_with_err(self): """Test the host subscriptons method with bad status code.""" sub_url = 'https://{sat_host}:{port}/' \ 'api/v2/hosts/{host_id}/subscriptions' with requests_mock.Mocker() as mocker: url = construct_url(url=sub_url, sat_host='1.2.3.4', host_id=1) mocker.get(url, status_code=500) with self.assertRaises(SatelliteException): host_subscriptions(self.scan_task, sub_url, None, 1) def test_host_subs_err_nojson(self): """Test the host subscriptons method with bad code and not json.""" sub_url = 'https://{sat_host}:{port}/' \ 'api/v2/hosts/{host_id}/subscriptions' with requests_mock.Mocker() as mocker: url = construct_url(url=sub_url, sat_host='1.2.3.4', host_id=1) mocker.get(url, status_code=404, text='error message') subs = host_subscriptions(self.scan_task, sub_url, None, 1) self.assertEqual(subs, {'entitlements': []}) def test_host_not_subscribed(self): """Test the host subscriptons method for not subscribed error.""" sub_url = 'https://{sat_host}:{port}/' \ 'api/v2/hosts/{host_id}/subscriptions' with requests_mock.Mocker() as mocker: url = construct_url(url=sub_url, sat_host='1.2.3.4', host_id=1) err_msg = { 'displayMessage': 'Host has not been registered ' 'with subscription-manager', 'errors': ['Host has not been registered' ' with subscription-manager'] } # noqa mocker.get(url, status_code=400, json=err_msg) subs = host_subscriptions(self.scan_task, sub_url, None, 1) self.assertEqual(subs, {'entitlements': []}) def test_host_subscriptons(self): """Test the host subscriptons method.""" sub_url = 'https://{sat_host}:{port}/' \ 'api/v2/hosts/{host_id}/subscriptions' with requests_mock.Mocker() as mocker: url = construct_url(url=sub_url, sat_host='1.2.3.4', host_id=1) jsonresult = { 'results': [{ 'amount': 1, 'name': 'Satellite Tools 6.3', 'start_date': '2017-12-01 14:50:59 UTC', 'end_date': '2047-11-24 14:50:59 UTC', 'product_name': 'Satellite Tools 6.3', }, { 'quantity_consumed': 1, 'name': 'Employee SKU', 'start_date': '2016-03-24 04:00:00 UTC', 'end_date': '2022-01-01 04:59:59 UTC', 'account_number': 1212729, 'contract_number': 10913844, 'type': 'ENTITLEMENT_DERIVED', 'product_name': 'Employee SKU', }] } mocker.get(url, status_code=200, json=jsonresult) subs = host_subscriptions(self.scan_task, sub_url, None, 1) expected = { 'entitlements': [{ 'derived_entitlement': False, 'name': 'Satellite Tools 6.3', 'amount': 1, 'account_number': None, 'contract_number': None, 'start_date': '2017-12-01 14:50:59 UTC', 'end_date': '2047-11-24 14:50:59 UTC' }, { 'derived_entitlement': True, 'name': 'Employee SKU', 'amount': 1, 'account_number': 1212729, 'contract_number': 10913844, 'start_date': '2016-03-24 04:00:00 UTC', 'end_date': '2022-01-01 04:59:59 UTC' }] } self.assertEqual(subs, expected) def test_host_details_err(self): """Test host_details method for error mark a failed system.""" with patch('scanner.satellite.six.host_fields', side_effect=mock_sat_exception) as mock_fields: detail = self.api.host_details(1, 'sys1') inspect_result = self.scan_task.inspection_result self.assertEqual(len(inspect_result.systems.all()), 1) sys_result = inspect_result.systems.all().first() self.assertEqual(sys_result.status, SystemInspectionResult.FAILED) self.assertEqual(detail, {}) mock_fields.assert_called_once_with(ANY, ANY, ANY, ANY, ANY) def test_host_details(self): """Test host_details method with mock data.""" fields_return_value = { 'uuid': '00c7a108-48ec-4a97-835c-aa3369777f64', 'hostname': 'mac52540071bafe.prov.lan', 'registered_by': 'sat-r220-07.lab.eng.rdu2.redhat.com', 'registration_time': '2017-12-04 13:33:52 UTC', 'last_checkin_time': '2018-01-04 17:36:07 UTC', 'katello_agent_installed': False, 'os_name': 'RedHat 7.4', 'organization': 'ACME', 'virtual_host': '100', 'virtual_host_name': 'vhost1', 'virt_type': None, 'kernel_version': None, 'architecture': None, 'is_virtualized': None, 'cores': None, 'num_sockets': None, 'num_virtual_guests': 1, 'virtual': 'hypervisor', 'location': 'Raleigh', 'ip_addresses': ['192.168.99.123'], 'ipv6_addresses': ['fe80::5054:ff:fe24:946e'] } subs_return_value = { 'entitlements': [{ 'derived_entitlement': False, 'name': 'Satellite Tools 6.3', 'amount': 1, 'account_number': None, 'contract_number': None, 'start_date': '2017-12-01 14:50:59 UTC', 'end_date': '2047-11-24 14:50:59 UTC' }, { 'derived_entitlement': True, 'name': 'Employee SKU', 'amount': 1, 'account_number': 1212729, 'contract_number': 10913844, 'start_date': '2016-03-24 04:00:00 UTC', 'end_date': '2022-01-01 04:59:59 UTC' }] } expected = { 'uuid': '00c7a108-48ec-4a97-835c-aa3369777f64', 'hostname': 'mac52540071bafe.prov.lan', 'registered_by': 'sat-r220-07.lab.eng.rdu2.redhat.com', 'registration_time': '2017-12-04 13:33:52 UTC', 'last_checkin_time': '2018-01-04 17:36:07 UTC', 'katello_agent_installed': False, 'os_name': 'RedHat 7.4', 'organization': 'ACME', 'virtual_host': '100', 'virtual_host_name': 'vhost1', 'virt_type': None, 'kernel_version': None, 'architecture': None, 'is_virtualized': None, 'cores': None, 'num_sockets': None, 'num_virtual_guests': 1, 'virtual': 'hypervisor', 'location': 'Raleigh', 'ip_addresses': ['192.168.99.123'], 'ipv6_addresses': ['fe80::5054:ff:fe24:946e'], 'entitlements': [{ 'derived_entitlement': False, 'name': 'Satellite Tools 6.3', 'amount': 1, 'account_number': None, 'contract_number': None, 'start_date': '2017-12-01 14:50:59 UTC', 'end_date': '2047-11-24 14:50:59 UTC' }, { 'derived_entitlement': True, 'name': 'Employee SKU', 'amount': 1, 'account_number': 1212729, 'contract_number': 10913844, 'start_date': '2016-03-24 04:00:00 UTC', 'end_date': '2022-01-01 04:59:59 UTC' }] } with patch('scanner.satellite.six.host_fields', return_value=fields_return_value) as mock_fields: with patch('scanner.satellite.six.host_subscriptions', return_value=subs_return_value) as mock_subs: details = self.api.host_details(host_id=1, host_name='sys1') self.assertEqual(details, expected) mock_fields.assert_called_once_with(ANY, ANY, ANY, ANY, ANY) mock_subs.assert_called_once_with(ANY, ANY, ANY, ANY) def test_host_details_skip(self): """Test host_details method for already captured data.""" # pylint: disable=no-member sys_result = SystemInspectionResult( name='sys1', status=SystemInspectionResult.SUCCESS) sys_result.save() inspect_result = self.scan_task.inspection_result inspect_result.systems.add(sys_result) inspect_result.save() detail = self.api.host_details(1, 'sys1') self.assertEqual(len(inspect_result.systems.all()), 1) self.assertEqual(detail, {}) def test_hosts_facts_with_err(self): """Test the hosts_facts method.""" hosts_url = 'https://{sat_host}:{port}/api/v2/hosts' with requests_mock.Mocker() as mocker: url = construct_url(url=hosts_url, sat_host='1.2.3.4') mocker.get(url, status_code=500) with self.assertRaises(SatelliteException): self.api.hosts_facts() def test_hosts_facts(self): """Test the hosts_facts method.""" hosts_url = 'https://{sat_host}:{port}/api/v2/hosts' with requests_mock.Mocker() as mocker: url = construct_url(url=hosts_url, sat_host='1.2.3.4') jsonresult = { 'total': 1, 'subtotal': 1, 'page': 1, 'per_page': 100, 'results': [{ 'id': 10, 'name': 'sys10' }] } # noqa mocker.get(url, status_code=200, json=jsonresult) detail_return_value = {} with patch.object(SatelliteSixV2, 'host_details', return_value=detail_return_value) as mock_detail: self.api.hosts_facts() mock_detail.assert_called_once_with(ANY, ANY)
class ConnectTaskRunnerTest(TestCase): """Tests against the ConnectTaskRunner class and functions.""" runner = None def setUp(self): """Create test case setup.""" self.cred = Credential(name='cred1', username='******', password='******', become_password=None, ssh_keyfile=None) self.cred.save() self.source = Source(name='source1', port=22, hosts='["1.2.3.4"]') self.source.save() self.source.credentials.add(self.cred) self.scan_job, self.scan_task = create_scan_job( self.source, ScanTask.SCAN_TYPE_CONNECT) # Create runner self.runner = ConnectTaskRunner(scan_job=self.scan_job, scan_task=self.scan_task) def tearDown(self): """Cleanup test case setup.""" pass def test_store_connect_data(self): """Test the connection data method.""" vm_names = ['vm1', 'vm2'] # pylint: disable=protected-access self.runner._store_connect_data(vm_names, self.cred, self.source) self.assertEqual( len(self.scan_job.connection_results.task_results.all()), 1) def test_get_vm_names(self): """Test the get vm names method.""" children = [] for ident in range(1, 3): name = 'vm' + str(ident) config = Mock() config.name = name summary = Mock() summary.config = config child = Mock() child.summary = summary children.append(child) vm_container_view = Mock(view=children) vm_names = get_vm_names(vm_container_view) self.assertTrue(isinstance(vm_names, list)) self.assertEqual(vm_names, ['vm1', 'vm2']) def test_get_vm_container(self): """Get the VM container.""" vcenter = Mock() content = Mock() content.rootFolder = Mock() view_manager = Mock() container_view = Mock() view_manager.CreateContainerView = Mock(return_value=container_view) content.viewManager = view_manager vcenter.RetrieveContent = Mock(return_value=content) c_view = get_vm_container(vcenter) self.assertEqual(c_view, container_view) def test_connect(self): """Test the VCenter connect method.""" with patch('scanner.vcenter.connect.vcenter_connect', return_value=Mock()) as mock_vcenter_connect: with patch('scanner.vcenter.connect.get_vm_container', return_value=Mock()) as mock_get_vm_container: with patch('scanner.vcenter.connect.get_vm_names', return_value=['vm1', 'vm2']) as mock_names: vm_names = self.runner.connect() self.assertEqual(vm_names, set(['vm1', 'vm2'])) mock_vcenter_connect.assert_called_once_with(ANY) mock_get_vm_container.assert_called_once_with(ANY) mock_names.assert_called_once_with(ANY) def test_get_result_none(self): """Test get result method when no results exist.""" results = self.scan_task.get_result().systems.first() self.assertEqual(results, None) def test_get_result(self): """Test get result method when results exist.""" conn_result = self.scan_task.connection_result results = self.scan_task.get_result() self.assertEqual(results, conn_result) def test_failed_run(self): """Test the run method.""" with patch.object(ConnectTaskRunner, 'connect', side_effect=invalid_login) as mock_connect: status = self.runner.run() self.assertEqual(ScanTask.FAILED, status[1]) mock_connect.assert_called_once_with() def test_unreachable_run(self): """Test the run method with unreachable.""" with patch.object(ConnectTaskRunner, 'connect', side_effect=unreachable_host) as mock_connect: status = self.runner.run() self.assertEqual(ScanTask.FAILED, status[1]) mock_connect.assert_called_once_with() def test_run(self): """Test the run method.""" with patch.object(ConnectTaskRunner, 'connect', return_value=['vm1', 'vm2']) as mock_connect: status = self.runner.run() self.assertEqual(ScanTask.COMPLETED, status[1]) mock_connect.assert_called_once_with()
class InspectTaskRunnerTest(TestCase): """Tests against the InspectTaskRunner class and functions.""" runner = None def setUp(self): """Create test case setup.""" self.cred = Credential(name='cred1', username='******', password='******', sudo_password=None, ssh_keyfile=None) self.cred.save() self.source = Source(name='source1', port=22) self.source.save() self.source.credentials.add(self.cred) self.host = HostRange(host_range='1.2.3.4', source_id=self.source.id) self.host.save() self.source.hosts.add(self.host) self.scan_task = ScanTask(scan_type=ScanTask.SCAN_TYPE_INSPECT, source=self.source, sequence_number=2) self.scan_task.save() self.conn_task = ScanTask(scan_type=ScanTask.SCAN_TYPE_CONNECT, source=self.source, sequence_number=1) self.conn_task.systems_count = 5 self.conn_task.status = ScanTask.COMPLETED self.conn_task.save() self.scan_task.prerequisites.add(self.conn_task) self.scan_task.save() self.scan_job = ScanJob(scan_type=ScanTask.SCAN_TYPE_INSPECT) self.scan_job.save() self.scan_job.tasks.add(self.scan_task) self.inspect_results = InspectionResults(scan_job=self.scan_job) self.inspect_results.save() self.runner = InspectTaskRunner(scan_job=self.scan_job, scan_task=self.scan_task, inspect_results=self.inspect_results) def tearDown(self): """Cleanup test case setup.""" pass def test_get_nics(self): """Test the get_nics method.""" guest = Mock() nics = [] for k in range(0, 2): nic = Mock() network = Mock() nic.network = network nic.macAddress = 'mac' + str(k) ip_config = Mock() ip_addr = Mock() ip_addr.ipAddress = 'ip' + str(k) addresses = [ip_addr] ip_config.ipAddress = addresses nic.ipConfig = ip_config nics.append(nic) guest.net = nics mac_addresses, ip_addresses = get_nics(guest) self.assertEqual(mac_addresses, ['mac0', 'mac1']) self.assertEqual(ip_addresses, ['ip0', 'ip1']) def test_vmsummary(self): """Test the vmsummary method.""" summary = Mock() guest = Mock() config = Mock() runtime = Mock() sum_guest = Mock() config.uuid = '1111' config.memorySizeMB = 1024 config.numCpu = 2 config.guestFullName = 'Red Hat 7' runtime.powerState = 'powerOn' sum_guest.hostName = 'hostname' summary.config = config summary.runtime = runtime summary.guest = sum_guest with patch('scanner.vcenter.inspect.get_nics', return_value=(['mac1'], ['ip1'])) as mock_get_nics: expected = { 'cpu': '2', 'hostname': 'hostname', 'ip_address': 'ip1', 'mac': 'mac1', 'mem': '1.0', 'ostype': 'Red Hat 7', 'state': 'powerOn', 'uuid': '1111' } vm_summary = vmsummary(summary, guest) mock_get_nics.assert_called_once_with(ANY) self.assertEqual(vm_summary, expected) def test_get_results_none(self): """Test get results method when no results exist.""" results = self.runner.get_results() self.assertEqual(results, None) def test_get_results(self): """Test get results method when results exist.""" inspect_result = InspectionResult(source=self.source, scan_task=self.scan_task) inspect_result.save() self.inspect_results.results.add(inspect_result) self.inspect_results.save() results = self.runner.get_results() self.assertEqual(results, inspect_result) def test_get_vm_info(self): """Test the get vm info method.""" data_center = 'dc1' cluster = 'cluster1' host = 'host1' virtual_machine = Mock() summary = Mock() config = Mock() config.name = 'vm1' summary.config = config virtual_machine.summary = summary vm_summary = { 'cpu': '2', 'hostname': 'hostname', 'ip_address': 'ip1', 'mac': 'mac1', 'mem': '1.0', 'ostype': 'Red Hat 7', 'state': 'powerOn', 'uuid': '1111' } with patch('scanner.vcenter.inspect.vmsummary', return_value=vm_summary): self.scan_task.systems_count = 5 self.scan_task.systems_failed = 0 self.scan_task.systems_scanned = 0 self.scan_task.save() self.runner.get_vm_info(data_center, cluster, host, virtual_machine) # pylint: disable=too-many-locals def test_recurse_datacenter(self): """Test the recurse_datacenter method.""" vcenter = Mock() content = Mock() root_folder = Mock() child_entity = [] for k in range(0, 2): child = Mock() child.name = 'dc' + str(k) host_folder = Mock() clusters = [] for j in range(0, 1): cluster = Mock() cluster.name = 'cluster' + str(j) host = Mock() h_summary = Mock() h_config = Mock() h_config.name = 'host1' h_summary.config = h_config host.summary = h_summary host.vm = [Mock()] hosts = [host] cluster.host = hosts clusters.append(cluster) host_folder.childEntity = clusters child.hostFolder = host_folder child_entity.append(child) root_folder.childEntity = child_entity content.rootFolder = root_folder vcenter.RetrieveContent = Mock(return_value=content) with patch.object(InspectTaskRunner, 'get_vm_info') as mock_get_vm_info: self.runner.recurse_datacenter(vcenter) mock_get_vm_info.assert_called_with(ANY, ANY, ANY, ANY) def test_inspect(self): """Test the inspect method.""" with patch('scanner.vcenter.inspect.vcenter_connect', return_value=Mock()) as mock_vcenter_connect: with patch.object(InspectTaskRunner, 'recurse_datacenter') as mock_recurse: self.runner.connect_scan_task = self.conn_task self.runner.inspect() mock_vcenter_connect.assert_called_once_with(ANY) mock_recurse.assert_called_once_with(ANY) def test_failed_run(self): """Test the run method.""" with patch.object(InspectTaskRunner, 'inspect', side_effect=invalid_login) as mock_connect: status = self.runner.run() self.assertEqual(ScanTask.FAILED, status) mock_connect.assert_called_once_with() def test_prereq_failed(self): """Test the run method.""" self.conn_task.status = ScanTask.FAILED self.conn_task.save() status = self.runner.run() self.assertEqual(ScanTask.FAILED, status) def test_run(self): """Test the run method.""" with patch.object(InspectTaskRunner, 'inspect') as mock_connect: status = self.runner.run() self.assertEqual(ScanTask.COMPLETED, status) mock_connect.assert_called_once_with()
class NetworkInspectScannerTest(TestCase): """Tests network inspect scan task class.""" # pylint: disable=too-many-instance-attributes, protected-access def setUp(self): """Create test case setup.""" self.cred = Credential(name='cred1', username='******', password='******', ssh_keyfile=None, become_method=None, become_user=None, become_password=None) self.cred.save() hc_serializer = CredentialSerializer(self.cred) self.cred_data = hc_serializer.data self.source = Source(name='source1', port=22, hosts='["1.2.3.4"]') self.source.save() self.source.credentials.add(self.cred) self.host_list = [('1.2.3.4', self.cred_data)] # setup scan options self.scan_job, self.scan_task = create_scan_job( self.source, ScanTask.SCAN_TYPE_INSPECT) self.connect_scan_task = self.scan_task.prerequisites.first() self.connect_scan_task.update_stats('TEST NETWORK CONNECT.', sys_failed=0) success_sys = SystemConnectionResult( name='1.2.3.4', credential=self.cred, status=SystemConnectionResult.SUCCESS) success_sys.save() failed_sys = SystemConnectionResult( name='1.1.1.2', status=SystemConnectionResult.FAILED) failed_sys.save() conn_result = self.connect_scan_task.connection_result conn_result.systems.add(success_sys) conn_result.systems.add(failed_sys) conn_result.save() self.connect_scan_task.update_stats('TEST_VC.', sys_count=2, sys_failed=1, sys_scanned=1) self.connect_scan_task.complete() self.scan_task.update_stats('TEST NETWORK INSPECT.', sys_failed=0) self.fact_endpoint = 'http://testserver' + reverse('facts-list') self.scan_job.save() scan_data_log.disable_log_for_test() def test_scan_inventory(self): """Test construct ansible inventory dictionary.""" serializer = SourceSerializer(self.source) source = serializer.data connection_port = source['port'] inventory_dict = _construct_scan_inventory(self.host_list, connection_port, 50) expected = { 'all': { 'children': { 'group_0': { 'hosts': { '1.2.3.4': { 'ansible_user': '******', 'ansible_ssh_pass': '******', 'ansible_host': '1.2.3.4' } } } }, 'vars': { 'ansible_port': 22 } } } self.assertEqual(inventory_dict[1], expected) def test_scan_inventory_grouping(self): """Test construct ansible inventory dictionary.""" serializer = SourceSerializer(self.source) source = serializer.data connection_port = source['port'] hc_serializer = CredentialSerializer(self.cred) cred = hc_serializer.data inventory_dict = _construct_scan_inventory([('1.2.3.1', cred), ('1.2.3.2', cred), ('1.2.3.3', cred), ('1.2.3.4', cred)], connection_port, 1) expected = { 'all': { 'children': { 'group_0': { 'hosts': { '1.2.3.1': { 'ansible_user': '******', 'ansible_ssh_pass': '******', 'ansible_host': '1.2.3.1' } } }, 'group_1': { 'hosts': { '1.2.3.2': { 'ansible_user': '******', 'ansible_ssh_pass': '******', 'ansible_host': '1.2.3.2' } } }, 'group_2': { 'hosts': { '1.2.3.3': { 'ansible_user': '******', 'ansible_ssh_pass': '******', 'ansible_host': '1.2.3.3' } } }, 'group_3': { 'hosts': { '1.2.3.4': { 'ansible_user': '******', 'ansible_ssh_pass': '******', 'ansible_host': '1.2.3.4' } } } }, 'vars': { 'ansible_port': 22 } } } self.assertEqual(inventory_dict[1], expected) @patch('scanner.network.utils.TaskQueueManager.run', side_effect=mock_run_failed) def test_inspect_scan_failure(self, mock_run): """Test scan flow with mocked manager and failure.""" scanner = InspectTaskRunner(self.scan_job, self.scan_task) # Init for unit test as run is not called scanner.connect_scan_task = self.connect_scan_task with self.assertRaises(AnsibleError): scanner._inspect_scan(self.host_list) mock_run.assert_called() @patch('scanner.network.inspect.InspectTaskRunner._inspect_scan', side_effect=mock_scan_error) def test_inspect_scan_error(self, mock_scan): """Test scan flow with mocked manager and failure.""" scanner = InspectTaskRunner(self.scan_job, self.scan_task) scan_task_status = scanner.run() mock_scan.assert_called_with(self.host_list) self.assertEqual(scan_task_status[1], ScanTask.FAILED) @patch('scanner.network.utils.TaskQueueManager.run', side_effect=mock_run_success) def test_inspect_scan_fail_no_facts(self, mock_run): """Test running a inspect scan with mocked connection.""" expected = ([('1.2.3.4', {'name': 'cred1'})], []) mock_run.return_value = expected with requests_mock.Mocker() as mocker: mocker.post(self.fact_endpoint, status_code=201, json={'id': 1}) scanner = InspectTaskRunner(self.scan_job, self.scan_task) scan_task_status = scanner.run() mock_run.assert_called_with(ANY) self.assertEqual(scan_task_status[1], ScanTask.FAILED) def test_populate_callback(self): """Test the population of the callback object for inspect scan.""" callback = InspectResultCallback(scan_task=self.scan_task) host = Mock() host.name = '1.2.3.4' task = Mock(args={'_raw_params': 'command line'}) result = Mock(_host=host, _result={'rc': 3}, _task=task) callback.v2_runner_on_unreachable(result) def test_ssh_crash(self): """Simulate an ssh crash.""" scanner = InspectTaskRunner(self.scan_job, self.scan_task) path = os.path.abspath( os.path.join(os.path.dirname(__file__), '../../../test_util/crash.py')) _, result = scanner._inspect_scan(self.host_list, base_ssh_executable=path) self.assertEqual(result, ScanTask.FAILED) def test_ssh_hang(self): """Simulate an ssh hang.""" scanner = InspectTaskRunner(self.scan_job, self.scan_task) path = os.path.abspath( os.path.join(os.path.dirname(__file__), '../../../test_util/hang.py')) scanner._inspect_scan(self.host_list, roles=['redhat_release'], base_ssh_executable=path, ssh_timeout='0.1s')
class EngineTest(TestCase): """Tests Engine class.""" def setUp(self): """Create test case setup.""" self.server_id = ServerInformation.create_or_retreive_server_id() self.source = Source(name='source1', hosts=json.dumps(['1.2.3.4']), source_type='network', port=22) self.source.save() scan_job, _ = create_scan_job(self.source) self.fp_task = scan_job.tasks.last() # pylint: disable=no-member self.fp_task_runner = FingerprintTaskRunner(scan_job, self.fp_task) # pylint: disable=no-self-use,too-many-arguments,too-many-lines # pylint: disable=too-many-locals,too-many-branches,invalid-name # pylint: disable=protected-access, W0102 ################################################################ # Helper functions ################################################################ def _create_network_fc_json( self, report_id=1, source_name='source1', source_type=Source.NETWORK_SOURCE_TYPE, cpu_count=1, etc_release_name='RHEL', etc_release_version='7.4 (Maipo)', etc_release_release='RHEL 7.4 (Maipo)', ifconfig_ip_addresses=None, ifconfig_mac_addresses=None, dmi_system_uuid=1234, subman_virt_uuid=4567, subman_consumed=SUBMAN_CONSUMED, connection_uuid='a037f26f-2988-57bd-85d8-de7617a3aab0', connection_host='1.2.3.4', connection_port=22, cpu_socket_count=2, cpu_core_count=2, date_yum_history='2017-07-18', date_filesystem_create='2017-06-17', date_anaconda_log='2017-05-17', date_machine_id='2017-04-17', system_purpose_json=None, virt_virt='virt-guest', virt_type='vmware', virt_num_guests=1, virt_num_running_guests=1, virt_what_type='vt', is_redhat=True, redhat_certs='fake certs', redhat_package_count=100, architecture='x86_64', user_has_sudo=True): """Create an in memory DetailsReport for tests.""" # pylint: disable=too-many-statements fact = {} if source_name: fact['source_name'] = source_name if source_type: fact['source_type'] = source_type if cpu_count: fact['cpu_count'] = cpu_count if etc_release_name: fact['etc_release_name'] = etc_release_name if etc_release_version: fact['etc_release_version'] = etc_release_version if etc_release_release: fact['etc_release_release'] = etc_release_release if ifconfig_ip_addresses: fact['ifconfig_ip_addresses'] = ifconfig_ip_addresses else: fact['ifconfig_ip_addresses'] = ['1.2.3.4', '2.3.4.5'] if ifconfig_mac_addresses: fact['ifconfig_mac_addresses'] = \ list(map(lambda x: x.lower(), ifconfig_mac_addresses)) else: fact['ifconfig_mac_addresses'] = ['mac1', 'mac2'] if dmi_system_uuid: fact['dmi_system_uuid'] = dmi_system_uuid if subman_virt_uuid: fact['subman_virt_uuid'] = subman_virt_uuid if subman_consumed: fact['subman_consumed'] = subman_consumed if connection_uuid: fact['connection_uuid'] = connection_uuid if connection_host: fact['connection_host'] = connection_host fact['uname_hostname'] = connection_host if connection_port: fact['connection_port'] = connection_port if cpu_socket_count: fact['cpu_socket_count'] = cpu_socket_count if cpu_core_count: fact['cpu_core_count'] = cpu_core_count if date_anaconda_log: fact['date_anaconda_log'] = date_anaconda_log if date_yum_history: fact['date_yum_history'] = date_yum_history if date_filesystem_create: fact['date_filesystem_create'] = date_filesystem_create if date_machine_id: fact['date_machine_id'] = date_machine_id if system_purpose_json: fact['system_purpose_json'] = system_purpose_json if virt_virt: fact['virt_virt'] = virt_virt if virt_type: fact['virt_type'] = virt_type if virt_num_guests: fact['virt_num_guests'] = virt_num_guests if virt_num_running_guests: fact['virt_num_running_guests'] = virt_num_running_guests if virt_what_type: fact['virt_what_type'] = virt_what_type if is_redhat: fact['redhat_packages_gpg_is_redhat'] = is_redhat if redhat_certs: fact['redhat_packages_certs'] = redhat_certs if redhat_package_count: fact['redhat_packages_gpg_num_rh_packages'] = \ redhat_package_count if architecture: fact['uname_processor'] = architecture fact['user_has_sudo'] = user_has_sudo details_report = {'id': report_id, 'facts': [fact]} return details_report def _create_vcenter_fc_json(self, report_id=1, source_name='source2', source_type=Source.VCENTER_SOURCE_TYPE, vm_cpu_count=2, vm_os='RHEL 7.3', vm_mac_addresses=None, vm_ip_addresses=None, vm_name='TestMachine', vm_state='On', vm_uuid='a037f26f-2988-57bd-85d8-de7617a3aab0', vm_dns_name='site.com', vm_host_name='1.2.3.4', vm_host_cpu_count=8, vm_host_core_count=8, vm_datacenter='NY', vm_cluster='23sd', architecture='x86_64', is_redhat=True): """Create an in memory DetailsReport for tests.""" fact = {} if source_name: fact['source_name'] = source_name if source_type: fact['source_type'] = source_type if vm_cpu_count: fact['vm.cpu_count'] = vm_cpu_count if vm_os: fact['vm.os'] = vm_os if vm_ip_addresses: fact['vm.ip_addresses'] = vm_ip_addresses else: fact['vm.ip_addresses'] = ['1.2.3.4', '2.3.4.5'] if vm_mac_addresses: fact['vm.mac_addresses'] = \ list(map(lambda x: x.lower(), vm_mac_addresses)) else: fact['vm.mac_addresses'] = ['mac1', 'mac2'] if vm_name: fact['vm.name'] = vm_name if vm_state: fact['vm.state'] = vm_state if vm_uuid: fact['vm.uuid'] = vm_uuid if vm_dns_name: fact['vm.dns_name'] = vm_dns_name if vm_host_name: fact['vm.host.name'] = vm_host_name if vm_host_cpu_count: fact['vm.host.cpu_count'] = vm_host_cpu_count if vm_host_core_count: fact['vm.host.cpu_cores'] = vm_host_core_count if vm_datacenter: fact['vm.datacenter'] = vm_datacenter if vm_cluster: fact['vm.cluster'] = vm_cluster if architecture: fact['uname_processor'] = architecture if 'red hat enterprise linux' in vm_os.lower() or \ 'rhel' in vm_os.lower(): fact['is_redhat'] = is_redhat details_report = {'id': report_id, 'facts': [fact]} return details_report def _create_satellite_fc_json(self, report_id=1, source_name='source3', source_type=Source.SATELLITE_SOURCE_TYPE, hostname='9.8.7.6', os_name='RHEL', os_release='RHEL 7.3', os_version=7.3, mac_addresses=None, ip_addresses=None, cores=32, registration_time='2017-03-18', uuid='a037f26f-2988-57bd-85d8-de7617a3aab0', virt_type='lxc', is_virtualized=True, virtual_host='9.3.4.6', num_sockets=8, entitlements=SAT_ENTITLEMENTS, architecture='x86_64', is_redhat=True): """Create an in memory DetailsReport for tests.""" fact = {} if source_name: fact['source_name'] = source_name if source_type: fact['source_type'] = source_type if hostname: fact['hostname'] = hostname if os_name: fact['os_name'] = os_name if os_release: fact['os_release'] = os_release if os_version: fact['os_version'] = os_version if ip_addresses: fact['ip_addresses'] = ip_addresses else: fact['ip_addresses'] = ['1.2.3.4', '2.3.4.5'] if mac_addresses: fact['mac_addresses'] = \ list(map(lambda x: x.lower(), mac_addresses)) else: fact['mac_addresses'] = ['mac1', 'mac2'] if registration_time: fact['registration_time'] = registration_time if cores: fact['cores'] = cores if uuid: fact['uuid'] = uuid if virt_type: fact['virt_type'] = virt_type if is_virtualized: fact['is_virtualized'] = is_virtualized if virtual_host: fact['virtual_host'] = virtual_host if num_sockets: fact['num_sockets'] = num_sockets if entitlements: fact['entitlements'] = entitlements if architecture: fact['architecture'] = architecture if 'red hat enterprise linux' in os_name.lower() or \ 'rhel' in os_name.lower(): fact['is_redhat'] = is_redhat details_report = {'id': report_id, 'facts': [fact]} return details_report def _validate_network_result(self, fingerprint, fact): """Help to validate fields.""" self.assertEqual(fact.get('connection_host'), fingerprint.get('name')) self.assertEqual(fact.get('etc_release_name'), fingerprint.get('os_name')) self.assertEqual(fact.get('etc_release_release'), fingerprint.get('os_release')) self.assertEqual(fact.get('etc_release_version'), fingerprint.get('os_version')) self.assertListEqual(fact.get('ifconfig_ip_addresses'), fingerprint.get('ip_addresses')) self.assertListEqual(fact.get('ifconfig_mac_addresses'), fingerprint.get('mac_addresses')) self.assertEqual(fact.get('cpu_count'), fingerprint.get('cpu_count')) self.assertEqual(fact.get('dmi_system_uuid'), fingerprint.get('bios_uuid')) self.assertEqual(fact.get('subman_virt_uuid'), fingerprint.get('subscription_manager_id')) self.assertEqual(fact.get('cpu_socket_count'), fingerprint.get('cpu_socket_count')) self.assertEqual(fact.get('cpu_core_count'), fingerprint.get('cpu_core_count')) self.assertEqual(fact.get('date_anaconda_log'), fingerprint.get('date_anaconda_log')) self.assertEqual(fact.get('date_yum_history'), fingerprint.get('date_yum_history')) self.assertEqual(fact.get('date_machine_id'), fingerprint.get('date_machine_id')) self.assertEqual(fact.get('date_filesystem_create'), fingerprint.get('date_filesystem_create')) self.assertEqual('virtualized', fingerprint.get('infrastructure_type')) self.assertEqual(fact.get('virt_type'), fingerprint.get('virtualized_type')) self.assertEqual(fact.get('uname_processor'), fingerprint.get('architecture')) self.assertEqual(fact.get('redhat_packages_certs'), fingerprint.get('redhat_certs')) self.assertEqual(fact.get('redhat_packages_gpg_is_redhat'), fingerprint.get('is_redhat')) self.assertEqual(fact.get('redhat_packages_gpg_num_rh_packages'), fingerprint.get('redhat_package_count')) system_purpose_json = fact.get('system_purpose_json', None) if system_purpose_json: self.assertEqual(system_purpose_json.get('role', None), fingerprint.get('system_role')) self.assertEqual(system_purpose_json.get('addons', None), fingerprint.get('system_addons')) self.assertEqual( system_purpose_json.get('service_level_agreement', None), fingerprint.get('system_service_level_agreement')) self.assertEqual(system_purpose_json.get('usage_type', None), fingerprint.get('system_usage_type')) else: self.assertIsNone(fingerprint.get('system_role')) self.assertIsNone(fingerprint.get('system_addons')) self.assertIsNone( fingerprint.get('system_service_level_agreement')) self.assertIsNone(fingerprint.get('system_usage_type')) def _validate_vcenter_result(self, fingerprint, fact): """Help to validate fields.""" if fact.get('vm.dns_name'): self.assertEqual(fact.get('vm.dns_name'), fingerprint.get('name')) else: self.assertEqual(fact.get('vm.name'), fingerprint.get('name')) self.assertEqual(fact.get('vm.os'), fingerprint.get('os_release')) self.assertEqual(fact.get('vm.ip_addresses'), fingerprint.get('ip_addresses')) self.assertEqual(fact.get('vm.mac_addresses'), fingerprint.get('mac_addresses')) self.assertEqual(fact.get('vm.cpu_count'), fingerprint.get('cpu_count')) self.assertEqual(fact.get('vm.state'), fingerprint.get('vm_state')) self.assertEqual(fact.get('vm.uuid'), fingerprint.get('vm_uuid')) self.assertEqual(fact.get('vm.dns_name'), fingerprint.get('vm_dns_name')) self.assertEqual(fact.get('vm.host.name'), fingerprint.get('vm_host')) self.assertEqual(fact.get('vm.host.cpu_count'), fingerprint.get('vm_host_socket_count')) self.assertEqual(fact.get('vm.host.cpu_cores'), fingerprint.get('vm_host_core_count')) self.assertEqual(fact.get('vm.datacenter'), fingerprint.get('vm_datacenter')) self.assertEqual(fact.get('vm.cluster'), fingerprint.get('vm_cluster')) self.assertEqual(fact.get('uname_processor'), fingerprint.get('architecture')) self.assertEqual(fact.get('is_redhat'), fingerprint.get('is_redhat')) def _validate_satellite_result(self, fingerprint, fact): """Help to validate fields.""" self.assertEqual(fact.get('hostname'), fingerprint.get('name')) self.assertEqual(fact.get('os_name'), fingerprint.get('os_name')) self.assertEqual(fact.get('os_release'), fingerprint.get('os_release')) self.assertEqual(fact.get('os_version'), fingerprint.get('os_version')) self.assertEqual(fact.get('cores'), fingerprint.get('cpu_count')) self.assertEqual(fact.get('ip_addresses'), fingerprint.get('ip_addresses')) self.assertEqual(fact.get('mac_addresses'), fingerprint.get('mac_addresses')) self.assertEqual(fact.get('registration_time'), fingerprint.get('registration_time')) self.assertEqual(fact.get('uuid'), fingerprint.get('subscription_manager_id')) if fact.get('hostname', '').endswith(tuple(['-' + str(num) for num in range(1, 10)])) \ and fact.get('hostname').startswith('virt-who-'): self.assertEqual('hypervisor', fingerprint.get('infrastructure_type')) else: self.assertEqual('virtualized', fingerprint.get('infrastructure_type')) self.assertEqual(fact.get('cores'), fingerprint.get('cpu_core_count')) self.assertEqual(fact.get('num_sockets'), fingerprint.get('cpu_socket_count')) self.assertEqual(fact.get('architecture'), fingerprint.get('architecture')) self.assertEqual(fact.get('is_redhat'), fingerprint.get('is_redhat')) def _create_network_fingerprint(self, *args, **kwargs): """Create test network fingerprint.""" n_details_report = self._create_network_fc_json(*args, **kwargs) nfact = n_details_report['facts'][0] source = { 'server_id': self.server_id, 'source_name': 'source1', 'source_type': Source.NETWORK_SOURCE_TYPE, 'facts': n_details_report['facts'] } nfingerprints = self.fp_task_runner._process_source(source) nfingerprint = nfingerprints[0] self._validate_network_result(nfingerprint, nfact) return nfingerprint def _create_vcenter_fingerprint(self, *args, **kwargs): """Create test network/vcenter fingerprints.""" v_details_report = self._create_vcenter_fc_json(*args, **kwargs) vfact = v_details_report['facts'][0] source = { 'server_id': self.server_id, 'source_name': 'source2', 'source_type': Source.VCENTER_SOURCE_TYPE, 'facts': v_details_report['facts'] } vfingerprints = self.fp_task_runner._process_source(source) vfingerprint = vfingerprints[0] self._validate_vcenter_result(vfingerprint, vfact) return vfingerprint def _create_satellite_fingerprint(self, *args, **kwargs): """Create test network/vcenter fingerprints.""" s_details_report = self._create_satellite_fc_json(*args, **kwargs) vfact = s_details_report['facts'][0] source = { 'server_id': self.server_id, 'source_name': 'source3', 'source_type': Source.SATELLITE_SOURCE_TYPE, 'facts': s_details_report['facts'] } sfingerprints = self.fp_task_runner._process_source(source) sfingerprint = sfingerprints[0] self._validate_satellite_result(sfingerprint, vfact) return sfingerprint ################################################################ # Test Source functions ################################################################ def test_process_network_source(self): """Test process network source.""" # Note the create method runs a validate system_purpose_json = None self._create_network_fingerprint( system_purpose_json=system_purpose_json) system_purpose_json = {} self._create_network_fingerprint( system_purpose_json=system_purpose_json) system_purpose_json['_version'] = 1 self._create_network_fingerprint( system_purpose_json=system_purpose_json) system_purpose_json['_version'] = 1 self._create_network_fingerprint( system_purpose_json=system_purpose_json) system_purpose_json['role'] = 'server' self._create_network_fingerprint( system_purpose_json=system_purpose_json) system_purpose_json['service_level_agreement'] = 'self-service' self._create_network_fingerprint( system_purpose_json=system_purpose_json) system_purpose_json['usage_type'] = 'dev' self._create_network_fingerprint( system_purpose_json=system_purpose_json) system_purpose_json['addons'] = ['ibm'] self._create_network_fingerprint( system_purpose_json=system_purpose_json) system_purpose_json['random_extra_field'] = ['redhat'] self._create_network_fingerprint( system_purpose_json=system_purpose_json) def test_process_network_system_intent(self): """Test process network system_purpose.""" details_report = self._create_network_fc_json() fact = details_report['facts'][0] source = { 'server_id': self.server_id, 'source_name': 'source1', 'source_type': Source.NETWORK_SOURCE_TYPE, 'facts': details_report['facts'] } fingerprints = self.fp_task_runner._process_source(source) fingerprint = fingerprints[0] self._validate_network_result(fingerprint, fact) def test_process_vcenter_source_with_dns(self): """Test process vcenter source that has a dns name.""" details_report = self._create_vcenter_fc_json() fact = details_report['facts'][0] source = { 'server_id': self.server_id, 'source_name': 'source1', 'source_type': Source.VCENTER_SOURCE_TYPE, 'facts': details_report['facts'] } fingerprints = self.fp_task_runner._process_source(source) fingerprint = fingerprints[0] self._validate_vcenter_result(fingerprint, fact) def test_process_vcenter_source_no_dns_name(self): """Test process vcenter source with no dns name.""" details_report = self._create_vcenter_fc_json( report_id=1, source_name='source2', source_type=Source.VCENTER_SOURCE_TYPE, vm_cpu_count=2, vm_os='RHEL 7.3', vm_mac_addresses=None, vm_ip_addresses=None, vm_name='TestMachine', vm_state='On', vm_uuid='a037f26f-2988-57bd-85d8-de7617a3aab0', vm_dns_name=None, ) fact = details_report['facts'][0] source = { 'server_id': self.server_id, 'source_name': 'source1', 'source_type': Source.VCENTER_SOURCE_TYPE, 'facts': details_report['facts'] } fingerprints = self.fp_task_runner._process_source(source) fingerprint = fingerprints[0] self._validate_vcenter_result(fingerprint, fact) def test_process_satellite_source(self): """Test process satellite source.""" details_report = self._create_satellite_fc_json() fact = details_report['facts'][0] source = { 'server_id': self.server_id, 'source_name': 'source1', 'source_type': Source.SATELLITE_SOURCE_TYPE, 'facts': details_report['facts'] } fingerprints = self.fp_task_runner._process_source(source) fingerprint = fingerprints[0] self._validate_satellite_result(fingerprint, fact) def test_process_satellite_source_hypervisor(self): """Test processing sat source for hypervisor infrastructure.""" details_report = self._create_satellite_fc_json( report_id=1, source_name='source3', source_type=Source.SATELLITE_SOURCE_TYPE, hostname='virt-who-9384389442-5') fact = details_report['facts'][0] source = { 'server_id': self.server_id, 'source_name': 'source1', 'source_type': Source.SATELLITE_SOURCE_TYPE, 'facts': details_report['facts'] } fingerprints = self.fp_task_runner._process_source(source) fingerprint = fingerprints[0] self._validate_satellite_result(fingerprint, fact) def test_process_satellite_source_not_hypervisor(self): """Test processing sat source for virtualized infrastructure.""" details_report = self._create_satellite_fc_json( report_id=1, source_name='source3', source_type=Source.SATELLITE_SOURCE_TYPE, hostname='virt-who-9384389442-0') fact = details_report['facts'][0] source = { 'server_id': self.server_id, 'source_name': 'source1', 'source_type': Source.SATELLITE_SOURCE_TYPE, 'facts': details_report['facts'] } fingerprints = self.fp_task_runner._process_source(source) fingerprint = fingerprints[0] self._validate_satellite_result(fingerprint, fact) ################################################################ # Test merge functions ################################################################ def test_merge_network_and_vcenter(self): """Test merge of two lists of fingerprints.""" nfingerprints = [ self._create_network_fingerprint(dmi_system_uuid='match', ifconfig_mac_addresses=['1']), self._create_network_fingerprint(dmi_system_uuid=1, ifconfig_mac_addresses=['2']) ] vfingerprints = [ self._create_vcenter_fingerprint(vm_uuid='match'), self._create_vcenter_fingerprint(vm_uuid=2) ] n_cpu_count = nfingerprints[0]['cpu_count'] v_cpu_count = vfingerprints[0]['cpu_count'] v_name = vfingerprints[0]['name'] self.assertNotEqual(n_cpu_count, v_cpu_count) reverse_priority_keys = {'cpu_count'} _, result_fingerprints = \ self.fp_task_runner._merge_fingerprints_from_source_types( NETWORK_VCENTER_MERGE_KEYS, nfingerprints, vfingerprints, reverse_priority_keys=reverse_priority_keys) self.assertEqual(len(result_fingerprints), 3) for result_fingerprint in result_fingerprints: if result_fingerprint.get('vm_uuid') == 'match': self.assertEqual(result_fingerprint.get('cpu_count'), v_cpu_count) self.assertNotEqual(result_fingerprint.get('cpu_count'), n_cpu_count) self.assertNotEqual(result_fingerprint.get('name'), v_name) def test_merge_network_and_vcenter_infrastructure_type(self): """Test if VCenter infrastructure_type is prefered over network.""" nfingerprints = [ self._create_network_fingerprint(dmi_system_uuid='match', ifconfig_mac_addresses=['1']) ] vfingerprints = [self._create_vcenter_fingerprint(vm_uuid='match')] # change infrastructure_type to bypass the validation nfingerprints[0]['infrastructure_type'] = 'unknown' vfingerprints[0]['infrastructure_type'] = 'virtualized' self.assertNotEqual(nfingerprints[0]['infrastructure_type'], vfingerprints[0]['infrastructure_type']) reverse_priority_keys = {'cpu_count', 'infrastructure_type'} _, result_fingerprints = \ self.fp_task_runner._merge_fingerprints_from_source_types( NETWORK_VCENTER_MERGE_KEYS, nfingerprints, vfingerprints, reverse_priority_keys=reverse_priority_keys) for result_fingerprint in result_fingerprints: if result_fingerprint.get('vm_uuid') == 'match': self.assertEqual(result_fingerprint.get('infrastructure_type'), 'virtualized') def test_merge_mac_address_case_insensitive(self): """Test if fingerprints will be merged with mixed mac addr.""" n_mac = ['00:50:56:A3:A2:E8', '00:50:56:c3:d2:m8'] v_mac = ['00:50:56:a3:a2:e8', '00:50:56:C3:D2:m8'] s_mac = ['00:50:56:A3:a2:E8', '00:50:56:C3:D2:M8'] self.assertNotEqual(v_mac, n_mac) self.assertNotEqual(v_mac, s_mac) nfingerprints = [ self._create_network_fingerprint(ifconfig_mac_addresses=n_mac) ] vfingerprints = [ self._create_vcenter_fingerprint(vm_mac_addresses=v_mac) ] sfingerprints = [ self._create_satellite_fingerprint(mac_addresses=s_mac) ] v_mac_addresses = vfingerprints[0]['mac_addresses'] n_mac_addresses = nfingerprints[0]['mac_addresses'] s_mac_addresses = sfingerprints[0]['mac_addresses'] self.assertEqual(v_mac_addresses, n_mac_addresses) self.assertEqual(v_mac_addresses, s_mac_addresses) _, result_fingerprints = \ self.fp_task_runner._merge_fingerprints_from_source_types( NETWORK_SATELLITE_MERGE_KEYS, nfingerprints, sfingerprints) self.assertEqual(len(result_fingerprints), 1) reverse_priority_keys = {'cpu_count', 'infrastructure_type'} _, result_fingerprints = \ self.fp_task_runner._merge_fingerprints_from_source_types( NETWORK_VCENTER_MERGE_KEYS, nfingerprints, vfingerprints, reverse_priority_keys=reverse_priority_keys) self.assertEqual(len(result_fingerprints), 1) def test_merge_net_sate_vcenter_infrastructure_type(self): """Test if VCenter infrastructure_type is prefered over the others.""" nfingerprints = [ self._create_network_fingerprint(dmi_system_uuid='match', ifconfig_mac_addresses=['1']) ] vfingerprints = [self._create_vcenter_fingerprint(vm_uuid='match')] sfingerprints = [self._create_satellite_fingerprint(uuid='match')] # change infrastructure_type to bypass the validation nfingerprints[0]['infrastructure_type'] = 'unknown' sfingerprints[0]['infrastructure_type'] = 'test' vfingerprints[0]['infrastructure_type'] = 'virtualized' _, result_fingerprints = \ self.fp_task_runner._merge_fingerprints_from_source_types( NETWORK_SATELLITE_MERGE_KEYS, nfingerprints, sfingerprints) for result_fingerprint in result_fingerprints: if result_fingerprint.get('vm_uuid') == 'match': self.assertEqual(result_fingerprint.get('infrastructure_type'), 'test') reverse_priority_keys = {'cpu_count', 'infrastructure_type'} _, result_fingerprints = \ self.fp_task_runner._merge_fingerprints_from_source_types( NETWORK_VCENTER_MERGE_KEYS, nfingerprints, vfingerprints, reverse_priority_keys=reverse_priority_keys) for result_fingerprint in result_fingerprints: if result_fingerprint.get('vm_uuid') == 'match': self.assertEqual(result_fingerprint.get('infrastructure_type'), 'virtualized') def test_merge_matching_fingerprints(self): """Test merge of two lists of fingerprints.""" nmetadata = { 'os_release': { 'source_name': 'source1', 'source_type': Source.NETWORK_SOURCE_TYPE, 'raw_fact_key': 'etc_release_release' }, 'bios_uuid': { 'source_name': 'source1', 'source_type': Source.NETWORK_SOURCE_TYPE, 'raw_fact_key': 'dmi_system_uuid' } } nsources = { 'source1': { 'source_name': 'source1', 'source_type': Source.NETWORK_SOURCE_TYPE } } nfingerprint_to_merge = { 'id': 1, 'os_release': 'RHEL 7', 'bios_uuid': 'match', 'metadata': nmetadata, 'sources': nsources } nfingerprint_no_match = { 'id': 2, 'os_release': 'RHEL 7', 'bios_uuid': '2345', 'metadata': nmetadata, 'sources': nsources } nfingerprint_no_key = { 'id': 3, 'os_release': 'RHEL 6', 'metadata': nmetadata, 'sources': nsources } nfingerprints = [ nfingerprint_to_merge, nfingerprint_no_match, nfingerprint_no_key ] vmetadata = { 'os_release': { 'source_name': 'source1', 'source_type': Source.NETWORK_SOURCE_TYPE, 'raw_fact_key': 'etc_release_release' }, 'vm_uuid': { 'source_name': 'source1', 'source_type': Source.NETWORK_SOURCE_TYPE, 'raw_fact_key': 'vm.uuid' } } vsources = { 'source1': { 'source_name': 'source1', 'source_type': Source.VCENTER_SOURCE_TYPE } } vfingerprint_to_merge = { 'id': 5, 'os_release': 'Windows 7', 'vm_uuid': 'match', 'metadata': vmetadata, 'sources': vsources } vfingerprint_no_match = { 'id': 6, 'os_release': 'RHEL 7', 'vm_uuid': '9876', 'metadata': vmetadata, 'sources': vsources } vfingerprint_no_key = { 'id': 7, 'os_release': 'RHEL 6', 'metadata': vmetadata, 'sources': vsources } vfingerprints = [ vfingerprint_to_merge, vfingerprint_no_match, vfingerprint_no_key ] _, merge_list, no_match_found_list = \ self.fp_task_runner._merge_matching_fingerprints( 'bios_uuid', nfingerprints, 'vm_uuid', vfingerprints) merged_sources = { 'source1': { 'source_name': 'source1', 'source_type': Source.NETWORK_SOURCE_TYPE } } # merge list should always contain all nfingerprints (base_list) self.assertEqual(len(merge_list), 3) self.assertTrue(nfingerprint_to_merge in merge_list) self.assertTrue(nfingerprint_no_match in merge_list) self.assertTrue(nfingerprint_no_key in merge_list) # assert VM property merged self.assertIsNotNone(nfingerprint_to_merge.get('vm_uuid')) # assert network os_release had priority self.assertEqual(nfingerprint_to_merge.get('os_release'), 'RHEL 7') self.assertEqual(nfingerprint_to_merge.get('sources'), merged_sources) # assert those that didn't match, don't have VM properties self.assertIsNone(nfingerprint_no_match.get('vm_uuid')) self.assertIsNone(nfingerprint_no_key.get('vm_uuid')) # no_match_found list should only contain vfingerprints # with no match self.assertEqual(len(no_match_found_list), 2) self.assertTrue(vfingerprint_no_match in no_match_found_list) self.assertTrue(vfingerprint_no_key in no_match_found_list) def test_remove_duplicate_fingerprints(self): """Test remove duplicate fingerprints created by index.""" fingerprints = [{ 'id': 1, 'os_release': 'RHEL 7', 'mac_addresses': ['1234', '2345'], 'sources': [] }, { 'id': 2, 'os_release': 'RHEL 7', 'mac_addresses': ['9876', '8765'], 'sources': [] }, { 'id': 3, 'os_release': 'RHEL 6', 'sources': [] }] index, no_key_found = \ self.fp_task_runner._create_index_for_fingerprints( 'mac_addresses', fingerprints) self.assertEqual(len(no_key_found), 1) self.assertEqual(no_key_found[0]['id'], 3) self.assertIsNotNone(no_key_found[0].get(FINGERPRINT_GLOBAL_ID_KEY)) self.assertEqual(len(index.keys()), 4) self.assertIsNotNone(index.get('1234')) self.assertIsNotNone(index.get('2345')) self.assertIsNotNone(index.get('9876')) self.assertIsNotNone(index.get('8765')) # deplicate but leave unique key leave_key_list = list(index.values()) unique_list = self.fp_task_runner._remove_duplicate_fingerprints( [FINGERPRINT_GLOBAL_ID_KEY], leave_key_list) self.assertEqual(len(unique_list), 2) self.assertIsNotNone(unique_list[0].get(FINGERPRINT_GLOBAL_ID_KEY)) # same test, but add value that doesn't have key leave_key_list = list(index.values()) leave_key_list.append({'id': 3, 'os_release': 'RHEL 6'}) unique_list = self.fp_task_runner._remove_duplicate_fingerprints( [FINGERPRINT_GLOBAL_ID_KEY], leave_key_list) self.assertEqual(len(unique_list), 3) # now pass flag to strip id key remove_key_list = list(index.values()) unique_list = self.fp_task_runner._remove_duplicate_fingerprints( [FINGERPRINT_GLOBAL_ID_KEY], remove_key_list, True) self.assertEqual(len(unique_list), 2) self.assertIsNone(unique_list[0].get(FINGERPRINT_GLOBAL_ID_KEY)) def test_create_index_for_fingerprints(self): """Test create index for fingerprints.""" fingerprints = [{ 'id': 1, 'os_release': 'RHEL 7', 'bios_uuid': '1234' }, { 'id': 2, 'os_release': 'RHEL 7', 'bios_uuid': '2345' }, { 'id': 3, 'os_release': 'RHEL 6' }] # Test that unique id not in objects index, no_key_found = \ self.fp_task_runner._create_index_for_fingerprints( 'bios_uuid', fingerprints, False) self.assertIsNone(no_key_found[0].get(FINGERPRINT_GLOBAL_ID_KEY)) # Tests with unique id in objects index, no_key_found = \ self.fp_task_runner._create_index_for_fingerprints( 'bios_uuid', fingerprints) self.assertEqual(len(no_key_found), 1) self.assertEqual(no_key_found[0]['id'], 3) self.assertIsNotNone(no_key_found[0].get(FINGERPRINT_GLOBAL_ID_KEY)) self.assertEqual(len(index.keys()), 2) self.assertIsNotNone(index.get('1234')) self.assertIsNotNone(index.get('2345')) def test_merge_fingerprint(self): """Test merging a vcenter and network fingerprint.""" nfingerprint = self._create_network_fingerprint() vfingerprint = self._create_vcenter_fingerprint() self.assertIsNone(nfingerprint.get('vm_state')) self.assertIsNone(nfingerprint.get('vm_uuid')) self.assertIsNone(nfingerprint.get('vm_dns_name')) self.assertIsNone(nfingerprint.get('vm_host_socket_count')) self.assertIsNone(nfingerprint.get('vm_datacenter')) self.assertIsNone(nfingerprint.get('vm_cluster')) self.assertIsNone(vfingerprint.get('os_name')) self.assertIsNone(vfingerprint.get('os_version')) self.assertIsNone(vfingerprint.get('bios_uuid')) self.assertIsNone(vfingerprint.get('subscription_manager_id')) self.assertIsNone(vfingerprint.get('cpu_socket_count')) self.assertIsNone(vfingerprint.get('cpu_core_count')) new_fingerprint = self.fp_task_runner._merge_fingerprint( nfingerprint, vfingerprint) self.assertIsNotNone(new_fingerprint.get('vm_state')) self.assertIsNotNone(new_fingerprint.get('vm_uuid')) self.assertIsNotNone(new_fingerprint.get('vm_dns_name')) self.assertIsNotNone(new_fingerprint.get('vm_host_socket_count')) self.assertIsNotNone(new_fingerprint.get('vm_datacenter')) self.assertIsNotNone(new_fingerprint.get('vm_cluster')) self.assertIsNotNone(new_fingerprint.get('name')) self.assertIsNotNone(new_fingerprint.get('os_name')) self.assertIsNotNone(new_fingerprint.get('os_version')) self.assertIsNotNone(new_fingerprint.get('bios_uuid')) self.assertIsNotNone(new_fingerprint.get('subscription_manager_id')) self.assertIsNotNone(new_fingerprint.get('cpu_socket_count')) self.assertIsNotNone(new_fingerprint.get('cpu_core_count')) def test_merge_fingerprint_sudo(self): """Test merging two network one sudo and one without.""" # Test that sudo is preferred when part of priority fingerprint sudo_fingerprint = self._create_network_fingerprint() sudo_fingerprint['products'] = [] sudo_fingerprint['entitlements'] = [] regular_fingerprint = self._create_network_fingerprint( user_has_sudo=False) regular_fingerprint['products'] = [] regular_fingerprint['entitlements'] = [] result = self.fp_task_runner._merge_fingerprint( sudo_fingerprint, regular_fingerprint) self.assertEqual(result, sudo_fingerprint) metadata = result.get('metadata') for key in metadata: self.assertTrue(metadata.get(key).get('has_sudo')) # Test that sudo is preferred when part of to merge fingerprint sudo_fingerprint = self._create_network_fingerprint() sudo_fingerprint['products'] = [] sudo_fingerprint['entitlements'] = [] regular_fingerprint = self._create_network_fingerprint( user_has_sudo=False) regular_fingerprint['products'] = [] regular_fingerprint['entitlements'] = [] result = self.fp_task_runner._merge_fingerprint( regular_fingerprint, sudo_fingerprint) metadata = result.get('metadata') for key in metadata: self.assertTrue(metadata.get(key).get('has_sudo')) def test_merge_fingerprint_network_win(self): """Test merge of fingerprint prioritizes network values.""" nfingerprint = self._create_network_fingerprint() vfingerprint = self._create_vcenter_fingerprint() nfingerprint['os_release'] = 'Fedora' self.assertNotEqual(vfingerprint.get('os_release'), nfingerprint['os_release']) new_fingerprint = self.fp_task_runner._merge_fingerprint( nfingerprint, vfingerprint) self.assertEqual(new_fingerprint.get('os_release'), nfingerprint['os_release']) def test_source_name_in_metadata(self): """Test that adding facts includes source_name in metadata.""" sourcetopass = { 'server_id': self.server_id, 'source_name': self.source.name, 'source_type': self.source.source_type } fingerprint = {'metadata': {}} result = self.fp_task_runner._process_network_fact( sourcetopass, fingerprint) self.assertEqual( result['metadata']['infrastructure_type']['source_name'], 'source1') ################################################################ # Test post processing ################################################################ def test_compute_system_creation_time(self): """Test merge of two lists of fingerprints.""" nfingerprints = [ self._create_network_fingerprint(ifconfig_mac_addresses=['1'], date_machine_id='2018-3-7') ] sfingerprints = [ self._create_satellite_fingerprint(mac_addresses=['1']) ] _, result_fingerprints = \ self.fp_task_runner._merge_fingerprints_from_source_types( NETWORK_SATELLITE_MERGE_KEYS, nfingerprints, sfingerprints) self.assertEqual(len(result_fingerprints), 1) fp = result_fingerprints[0] fp['date_yum_history'] = '2018-1-7' fp['date_filesystem_create'] = None fp['date_anaconda_log'] = '201837' fp['registration_time'] = '2018-4-7 12:45:02' fp['date_machine_id'] = None self.fp_task_runner._compute_system_creation_time(fp) test_date = datetime.strptime('2018-4-7', '%Y-%m-%d').date() self.assertEqual(fp['system_creation_date'], test_date) metadata = fp['metadata']['system_creation_date']['raw_fact_key'] self.assertEqual('registration_time', metadata) ################################################################ # Test multi_format_dateparse ################################################################ def test_multi_format_dateparse(self): """Test multi_format_dateparse with various formats.""" source = {'source_type': 'network', 'source_name': 'test_source'} test_date = datetime.strptime('2018-4-7', '%Y-%m-%d').date() date_value = self.fp_task_runner._multi_format_dateparse( source, 'fake_key', '2018-4-7 12:45:02', ['%Y-%m-%d %H:%M:%S', '%Y-%m-%d %H:%M:%S %z']) self.assertEqual(date_value, test_date) date_value = self.fp_task_runner._multi_format_dateparse( source, 'fake_key', '2018-4-7 12:45:02 -0400', ['%Y-%m-%d %H:%M:%S', '%Y-%m-%d %H:%M:%S %z']) self.assertEqual(date_value, test_date) date_value = self.fp_task_runner._multi_format_dateparse( source, 'fake_key', '2018-4-7 12:45:02 -0400', ['%Y-%m-%d %H:%M:%S']) self.assertIsNone(date_value) def test_process_details_report_failed(self): """Test processing a details report no valid fps.""" fact_collection = {} deployments_report = DeploymentsReport(report_id=1) details_report = DetailsReport(deployment_report=deployments_report) with patch('fingerprinter.task.FingerprintTaskRunner._process_sources', return_value=fact_collection): status_message, status = \ self.fp_task_runner._process_details_report('', details_report) self.assertIn('failed', status_message.lower()) self.assertEqual(status, 'failed') def test_process_details_report_success(self): """Test processing a details report success.""" fact_collection = { 'name': 'dhcp181-3.gsslab.rdu2.redhat.com', 'metadata': {}, 'sources': [] } deployments_report = DeploymentsReport(report_id=1, id=1) deployments_report.save() details_report = DetailsReport(id=1, deployment_report=deployments_report) with patch('fingerprinter.task.FingerprintTaskRunner._process_sources', return_value=[fact_collection]): status_message, status = \ self.fp_task_runner._process_details_report('', details_report) self.assertIn('success', status_message.lower()) self.assertEqual(status, 'completed') def test_process_details_report_exception(self): """Test processing a details report with an exception.""" fact_collection = { 'name': 'dhcp181-3.gsslab.rdu2.redhat.com', 'metadata': {}, 'sources': [] } deployments_report = DeploymentsReport(report_id=1, id=1) deployments_report.save() details_report = DetailsReport(id=1, deployment_report=deployments_report) with patch('fingerprinter.task.FingerprintTaskRunner._process_sources', return_value=[fact_collection]): with patch('fingerprinter.task.SystemFingerprintSerializer.save', side_effect=DataError): status_message, status = \ self.fp_task_runner._process_details_report('', details_report) self.assertIn('failed', status_message.lower()) self.assertEqual(status, 'failed')
class SatelliteUtilsTest(TestCase): """Tests Satellite utils functions.""" def setUp(self): """Create test case setup.""" self.cred = Credential(name='cred1', cred_type=Credential.SATELLITE_CRED_TYPE, username='******', password='******', become_password=None, become_method=None, become_user=None, ssh_keyfile=None) self.cred.save() self.source = Source(name='source1', port=443, hosts='["1.2.3.4"]') self.source.save() self.source.credentials.add(self.cred) self.options = SourceOptions(ssl_cert_verify=False) self.options.save() self.source.options = self.options self.source.save() self.scan_job, self.scan_task = create_scan_job( self.source, scan_type=ScanTask.SCAN_TYPE_CONNECT) def tearDown(self): """Cleanup test case setup.""" pass def test_get_credential(self): """Test the method to extract credential.""" cred = get_credential(self.scan_task) self.assertEqual(cred, self.cred) def test_get_connect_data(self): """Test method to get connection data from task.""" host, port, user, password = get_connect_data(self.scan_task) self.assertEqual(host, '1.2.3.4') self.assertEqual(port, 443) self.assertEqual(user, 'username') self.assertEqual(password, 'password') def test_construct_url(self): """Test method to construct satellite url.""" expected = 'https://1.2.3.4:443/api/status' status_url = 'https://{sat_host}:{port}/api/status' url = construct_url(status_url, '1.2.3.4') self.assertEqual(url, expected) def test_execute_request(self): """Test the method to execute a request against a satellite server.""" status_url = 'https://{sat_host}:{port}/api/status' with requests_mock.Mocker() as mocker: url = construct_url(status_url, '1.2.3.4') jsonresult = {'api_version': 2} mocker.get(url, status_code=200, json=jsonresult) response, formatted_url = execute_request(self.scan_task, status_url) self.assertEqual(url, formatted_url) self.assertEqual(response.status_code, 200) self.assertEqual(response.json(), jsonresult) @patch('scanner.satellite.utils._status6', return_value=(200, SATELLITE_VERSION_6, SATELLITE_VERSION_6)) def test_status_sat6(self, mock_status5): """Test a patched status request to Satellite 6 server.""" status_code, api_version, satellite_version = status(self.scan_task) self.assertEqual(status_code, 200) self.assertEqual(api_version, SATELLITE_VERSION_6) self.assertEqual(satellite_version, SATELLITE_VERSION_6) mock_status5.assert_called_once_with(ANY) @patch('xmlrpc.client.ServerProxy') def test_status5(self, mock_serverproxy): """Test a successful status request to Satellite 5 server.""" client = mock_serverproxy.return_value client.auth.login.return_value = 'key' client.auth.logout.return_value = 'key' status_code, api_version, satellite_version = _status5(self.scan_task) self.assertEqual(status_code, 200) self.assertEqual(api_version, SATELLITE_VERSION_5) self.assertEqual(satellite_version, SATELLITE_VERSION_5) @patch('xmlrpc.client.ServerProxy') def test_status5_xmlfault(self, mock_serverproxy): """Test a successful status request to Satellite 5 server.""" client = mock_serverproxy.return_value client.auth.login.side_effect = mock_xml_fault with self.assertRaises(SatelliteException): _status5(self.scan_task) mock_serverproxy.auth.login.assert_called_once_with(ANY, ANY) @patch('xmlrpc.client.ServerProxy') def test_status(self, mock_serverproxy): """Test a successful status request to Satellite server.""" client = mock_serverproxy.return_value client.auth.login.side_effect = mock_xml_fault with requests_mock.Mocker() as mocker: status_url = 'https://{sat_host}:{port}/api/status' url = construct_url(status_url, '1.2.3.4') jsonresult = {'api_version': 2} mocker.get(url, status_code=200, json=jsonresult) status_code, api_version, satellite_version = \ status(self.scan_task) self.assertEqual(status_code, 200) self.assertEqual(api_version, 2) self.assertEqual(satellite_version, SATELLITE_VERSION_6) @patch('xmlrpc.client.ServerProxy') def test_status_error(self, mock_serverproxy): """Test a error status request to Satellite server.""" client = mock_serverproxy.return_value client.auth.login.side_effect = mock_xml_fault with requests_mock.Mocker() as mocker: status_url = 'https://{sat_host}:{port}/api/status' url = construct_url(status_url, '1.2.3.4') jsonresult = {'api_version': 2} mocker.get(url, status_code=401, json=jsonresult) with self.assertRaises(SatelliteAuthException): status(self.scan_task) mock_serverproxy.auth.login.assert_called_once_with(ANY, ANY) def test_data_map(self): """Test a mapping of data from a response dictionary.""" map_dict = {'id': 'uuid', 'color': 'new::color'} data = {'uuid': '100', 'new::color': 'blue', 'key': 'value'} expected = {'id': '100', 'color': 'blue'} mapped = data_map(map_dict, data) self.assertEqual(mapped, expected) def test_get_sat5_client(self): """Test the sat5 client helper.""" client, user, password = get_sat5_client(self.scan_task) self.assertIsNotNone(client) self.assertEqual(user, 'username') self.assertEqual(password, 'password') def test_validate_task_stats(self): """Test validate task stats no errors.""" validate_task_stats(self.scan_task) def test_validate_task_stats_error(self): """Test validate task stats errors.""" with self.assertRaises(SatelliteException): self.scan_task.increment_stats('TEST', increment_sys_count=True) validate_task_stats(self.scan_task)
class SatelliteSixV2Test(TestCase): """Tests Satellite 6 v2 functions.""" def setUp(self): """Create test case setup.""" self.cred = Credential(name='cred1', cred_type=Credential.SATELLITE_CRED_TYPE, username='******', password='******', become_password=None, become_method=None, become_user=None, ssh_keyfile=None) self.cred.save() self.source = Source(name='source1', port=443, hosts='["1.2.3.4"]') self.source.save() self.source.credentials.add(self.cred) self.scan_job, self.scan_task = create_scan_job( self.source, ScanTask.SCAN_TYPE_INSPECT) self.scan_task.update_stats('TEST_SAT.', sys_scanned=0) self.api = SatelliteSixV2(self.scan_job, self.scan_task) job_conn_result = JobConnectionResult() job_conn_result.save() connection_results = TaskConnectionResult( job_connection_result=job_conn_result) connection_results.save() self.api.connect_scan_task.connection_result = connection_results self.api.connect_scan_task.connection_result.save() conn_result = self.api.connect_scan_task.connection_result sys_result = SystemConnectionResult( name='sys1_1', status=SystemInspectionResult.SUCCESS, task_connection_result=conn_result) sys_result.save() self.api.connect_scan_task.save() def tearDown(self): """Cleanup test case setup.""" pass def test_host_count(self): """Test the method host_count.""" hosts_url = 'https://{sat_host}:{port}/api/v2/hosts' with requests_mock.Mocker() as mocker: url = construct_url(url=hosts_url, sat_host='1.2.3.4', org_id=1) jsonresult = { 'results': [{ 'name': 'sys1' }, { 'name': 'sys2' }, { 'name': 'sys3' }], 'per_page': 100, 'total': 3 } mocker.get(url, status_code=200, json=jsonresult) systems_count = self.api.host_count() self.assertEqual(systems_count, 3) def test_host_count_with_err(self): """Test the method host_count with error.""" hosts_url = 'https://{sat_host}:{port}/api/v2/hosts' with requests_mock.Mocker() as mocker: url = construct_url(url=hosts_url, sat_host='1.2.3.4', org_id=1) jsonresult = { 'results': [{ 'name': 'sys1' }, { 'name': 'sys2' }, { 'name': 'sys3' }], 'per_page': 100, 'total': 3 } mocker.get(url, status_code=500, json=jsonresult) with self.assertRaises(SatelliteException): self.api.host_count() def test_hosts(self): """Test the method hosts.""" hosts_url = 'https://{sat_host}:{port}/api/v2/hosts' with requests_mock.Mocker() as mocker: url = construct_url(url=hosts_url, sat_host='1.2.3.4', org_id=1) jsonresult = { 'results': [{ 'name': 'sys1', 'id': 1 }, { 'name': 'sys2', 'id': 2 }, { 'name': 'sys3', 'id': 3 }], 'per_page': 100, 'total': 3 } mocker.get(url, status_code=200, json=jsonresult) systems_count = self.api.host_count() hosts = self.api.hosts() self.assertEqual(systems_count, 3) self.assertEqual(len(hosts), 3) self.assertEqual(hosts, ['sys1_1', 'sys2_2', 'sys3_3']) def test_hosts_with_err(self): """Test the method hosts with error.""" hosts_url = 'https://{sat_host}:{port}/api/v2/hosts' with requests_mock.Mocker() as mocker: url = construct_url(url=hosts_url, sat_host='1.2.3.4', org_id=1) jsonresult = { 'results': [{ 'name': 'sys1' }, { 'name': 'sys2' }, { 'name': 'sys3' }], 'per_page': 100, 'total': 3 } mocker.get(url, status_code=500, json=jsonresult) with self.assertRaises(SatelliteException): self.api.hosts() def test_processing_fields_with_err(self): """Test the post_processing with error.""" host_field_url = 'https://{sat_host}:{port}/api/v2/hosts/{host_id}' with requests_mock.Mocker() as mocker: url = construct_url(url=host_field_url, sat_host='1.2.3.4', host_id=1) mocker.get(url, status_code=500) result = request_host_details( self.scan_task, { 'job_id': self.scan_job.id, 'task_sequence_number': self.scan_task.id, 'scan_type': self.scan_task.scan_type, 'source_type': self.scan_task.source.source_type, 'source_name': self.scan_task.source.name }, 1, 'sys', url, url, {}) expected = { 'unique_name': 'sys_1', 'system_inspection_result': 'failed', 'host_fields_response': {}, 'host_subscriptions_response': {} } self.assertEqual(result, expected) process_results(self.api, [result], 1) inspect_results = self.scan_task.inspection_result.systems.all() sys_1_result = inspect_results.filter(name='sys_1').first() self.assertEqual(sys_1_result.name, 'sys_1') self.assertEqual(sys_1_result.status, 'failed') def test_host_fields(self): """Test the method host_fields.""" host_field_url = 'https://{sat_host}:{port}/api/v2/hosts/{host_id}' with requests_mock.Mocker() as mocker: url = construct_url(url=host_field_url, sat_host='1.2.3.4', host_id=1) jsonresult = { 'architecture_id': 1, 'architecture_name': 'x86_64', 'operatingsystem_name': 'RedHat 7.4', 'uuid': None, 'created_at': '2017-12-04 13:19:57 UTC', 'updated_at': '2017-12-04 13:21:47 UTC', 'organization_name': 'ACME', 'location_name': 'Raleigh', 'name': 'mac52540071bafe.prov.lan', 'virtual_host': { 'uuid': '100', 'name': 'vhost1' }, 'virtual_guests': [{ 'name': 'foo' }], 'content_facet_attributes': { 'id': 11, 'katello_agent_installed': False }, 'subscription_facet_attributes': { 'uuid': '00c7a108-48ec-4a97-835c-aa3369777f64', 'last_checkin': '2018-01-04 17:36:07 UTC', 'registered_at': '2017-12-04 13:33:52 UTC', 'registered_by': 'sat-r220-07.lab.eng.rdu2.redhat.com', 'virtual_host': { 'uuid': '100', 'name': 'vhost1' }, 'virtual_guests': [{ 'name': 'foo' }], }, 'facts': { 'memorysize_mb': '992.45', 'memorysize': '992.45 MB', 'hostname': 'fdi', 'type': 'Other', 'architecture': 'x86_64', 'is_virtual': 'true', 'virtual': 'kvm', 'net::interface::ipv4_address': '192.168.99.123', 'net::interface::mac_address': 'fe80::5054:ff:fe24:946e', }, } mocker.get(url, status_code=200, json=jsonresult) host_info = host_fields(2, jsonresult) expected = { 'uuid': '00c7a108-48ec-4a97-835c-aa3369777f64', 'hostname': 'mac52540071bafe.prov.lan', 'registered_by': 'sat-r220-07.lab.eng.rdu2.redhat.com', 'registration_time': '2017-12-04 13:33:52 UTC', 'last_checkin_time': '2018-01-04 17:36:07 UTC', 'katello_agent_installed': False, 'os_release': 'RedHat 7.4', 'organization': 'ACME', 'virtual_host_uuid': '100', 'virtual_host_name': 'vhost1', 'virt_type': None, 'kernel_version': None, 'architecture': None, 'is_virtualized': None, 'cores': None, 'num_sockets': None, 'num_virtual_guests': 1, 'virtual': 'hypervisor', 'location': 'Raleigh', 'ip_addresses': ['192.168.99.123'], 'mac_addresses': ['fe80::5054:ff:fe24:946e'], 'os_name': 'RedHat', 'os_version': '7.4' } self.assertEqual(host_info, expected) def test_get_https_with_err(self): """Test the host subscriptons method with bad status code.""" sub_url = 'https://{sat_host}:{port}/' \ 'api/v2/hosts/{host_id}/subscriptions' with requests_mock.Mocker() as mocker: url = construct_url(url=sub_url, sat_host='1.2.3.4', host_id=1) mocker.get(url, status_code=500) result = request_host_details( self.scan_task, { 'job_id': self.scan_job.id, 'task_sequence_number': self.scan_task.id, 'scan_type': self.scan_task.scan_type, 'source_type': self.scan_task.source.source_type, 'source_name': self.scan_task.source.name }, 1, 'sys', url, url, {}) expected = { 'unique_name': 'sys_1', 'system_inspection_result': 'failed', 'host_fields_response': {}, 'host_subscriptions_response': {} } self.assertEqual(result, expected) def test_processing_subs_err_nojson(self): """Test the flow of post processing with bad code and not json.""" sub_url = 'https://{sat_host}:{port}/' \ 'api/v2/hosts/{host_id}/subscriptions' with requests_mock.Mocker() as mocker: url = construct_url(url=sub_url, sat_host='1.2.3.4', host_id=1) mocker.get(url, status_code=404, text='error message') result = request_host_details( self.scan_task, { 'job_id': self.scan_job.id, 'task_sequence_number': self.scan_task.id, 'scan_type': self.scan_task.scan_type, 'source_type': self.scan_task.source.source_type, 'source_name': self.scan_task.source.name }, 1, 'sys', url, url, {}) process_results(self.api, [result], 1) inspect_results = self.scan_task.inspection_result.systems.all() sys_1_result = inspect_results.filter(name='sys_1').first() self.assertEqual(sys_1_result.name, 'sys_1') self.assertEqual(sys_1_result.status, 'failed') def test_host_not_subscribed(self): """Test the host subscriptons method for not subscribed error.""" sub_url = 'https://{sat_host}:{port}/' \ 'api/v2/hosts/{host_id}/subscriptions' with requests_mock.Mocker() as mocker: url = construct_url(url=sub_url, sat_host='1.2.3.4', host_id=1) err_msg = { 'displayMessage': 'Host has not been registered ' 'with subscription-manager', 'errors': ['Host has not been registered' ' with subscription-manager'] } # noqa mocker.get(url, status_code=400, json=err_msg) result = request_host_details( self.scan_task, { 'job_id': self.scan_job.id, 'task_sequence_number': self.scan_task.id, 'scan_type': self.scan_task.scan_type, 'source_type': self.scan_task.source.source_type, 'source_name': self.scan_task.source.name }, 1, 'sys', url, url, {}) process_results(self.api, [result], 1) inspect_results = self.scan_task.inspection_result.systems.all() sys_1_result = inspect_results.filter(name='sys_1').first() self.assertEqual(sys_1_result.name, 'sys_1') self.assertEqual(sys_1_result.status, 'failed') def test_host_subscriptons(self): """Test the host subscriptons method.""" sub_url = 'https://{sat_host}:{port}/' \ 'api/v2/hosts/{host_id}/subscriptions' with requests_mock.Mocker() as mocker: url = construct_url(url=sub_url, sat_host='1.2.3.4', host_id=1) jsonresult = { 'results': [{ 'amount': 1, 'name': 'Satellite Tools 6.3', 'start_date': '2017-12-01 14:50:59 UTC', 'end_date': '2047-11-24 14:50:59 UTC', 'product_name': 'Satellite Tools 6.3', }, { 'quantity_consumed': 1, 'name': 'Employee SKU', 'start_date': '2016-03-24 04:00:00 UTC', 'end_date': '2022-01-01 04:59:59 UTC', 'account_number': 1212729, 'contract_number': 10913844, 'type': 'ENTITLEMENT_DERIVED', 'product_name': 'Employee SKU', }] } mocker.get(url, status_code=200, json=jsonresult) subs = host_subscriptions(jsonresult) expected = { 'entitlements': [{ 'derived_entitlement': False, 'name': 'Satellite Tools 6.3', 'amount': 1, 'account_number': None, 'contract_number': None, 'start_date': '2017-12-01 14:50:59 UTC', 'end_date': '2047-11-24 14:50:59 UTC' }, { 'derived_entitlement': True, 'name': 'Employee SKU', 'amount': 1, 'account_number': 1212729, 'contract_number': 10913844, 'start_date': '2016-03-24 04:00:00 UTC', 'end_date': '2022-01-01 04:59:59 UTC' }] } self.assertEqual(subs, expected) def test_post_processing_err(self): """Test error flow & check that a failed system is marked.""" response = { 'unique_name': 'sys_1', 'system_inspection_result': SystemInspectionResult.FAILED, 'host_fields_response': {}, 'host_subscriptions_response': {} } process_results(self.api, [response], 1) inspect_results = self.scan_task.inspection_result.systems.all() sys_1_result = inspect_results.filter(name='sys_1').first() self.assertEqual(sys_1_result.name, 'sys_1') self.assertEqual(sys_1_result.status, 'failed') def test_post_processing(self): """Test process_results method with mock data.""" fields_return_value = { 'uuid': '00c7a108-48ec-4a97-835c-aa3369777f64', 'hostname': 'mac52540071bafe.prov.lan', 'registered_by': 'sat-r220-07.lab.eng.rdu2.redhat.com', 'registration_time': '2017-12-04 13:33:52 UTC', 'last_checkin_time': '2018-01-04 17:36:07 UTC', 'katello_agent_installed': False, 'os_name': 'RedHat 7.4', 'organization': 'ACME', 'virtual_host_uuid': '100', 'virtual_host_name': 'vhost1', 'virt_type': None, 'kernel_version': None, 'architecture': None, 'is_virtualized': None, 'cores': None, 'num_sockets': None, 'num_virtual_guests': 1, 'virtual': 'hypervisor', 'location': 'Raleigh', 'ip_addresses': ['192.168.99.123'], 'ipv6_addresses': ['fe80::5054:ff:fe24:946e'] } subs_return_value = { 'entitlements': [{ 'derived_entitlement': False, 'name': 'Satellite Tools 6.3', 'amount': 1, 'account_number': None, 'contract_number': None, 'start_date': '2017-12-01 14:50:59 UTC', 'end_date': '2047-11-24 14:50:59 UTC' }, { 'derived_entitlement': True, 'name': 'Employee SKU', 'amount': 1, 'account_number': 1212729, 'contract_number': 10913844, 'start_date': '2016-03-24 04:00:00 UTC', 'end_date': '2022-01-01 04:59:59 UTC' }] } expected = { 'uuid': '00c7a108-48ec-4a97-835c-aa3369777f64', 'hostname': 'mac52540071bafe.prov.lan', 'registered_by': 'sat-r220-07.lab.eng.rdu2.redhat.com', 'registration_time': '2017-12-04 13:33:52 UTC', 'last_checkin_time': '2018-01-04 17:36:07 UTC', 'katello_agent_installed': False, 'os_name': 'RedHat 7.4', 'organization': 'ACME', 'virtual_host_uuid': '100', 'virtual_host_name': 'vhost1', 'num_virtual_guests': 1, 'virtual': 'hypervisor', 'location': 'Raleigh', 'ip_addresses': ['192.168.99.123'], 'ipv6_addresses': ['fe80::5054:ff:fe24:946e'], 'entitlements': [{ 'derived_entitlement': False, 'name': 'Satellite Tools 6.3', 'amount': 1, 'account_number': None, 'contract_number': None, 'start_date': '2017-12-01 14:50:59 UTC', 'end_date': '2047-11-24 14:50:59 UTC' }, { 'derived_entitlement': True, 'name': 'Employee SKU', 'amount': 1, 'account_number': 1212729, 'contract_number': 10913844, 'start_date': '2016-03-24 04:00:00 UTC', 'end_date': '2022-01-01 04:59:59 UTC' }] } with patch('scanner.satellite.six.host_fields', return_value=fields_return_value) as mock_fields: with patch('scanner.satellite.six.host_subscriptions', return_value=subs_return_value) as mock_subs: result = { 'unique_name': 'sys_1', 'system_inspection_result': SystemInspectionResult.SUCCESS, 'host_fields_response': fields_return_value, 'host_subscriptions_response': subs_return_value } process_results(self.api, [result], 1) inspect_results = \ self.scan_task.inspection_result.systems.all() sys_1_result = inspect_results.filter(name='sys_1').first() self.assertEqual(sys_1_result.name, 'sys_1') self.assertEqual(sys_1_result.status, 'success') result = {} for fact in sys_1_result.facts.all(): result[fact.name] = json.loads(fact.value) self.assertEqual(result, expected) mock_fields.assert_called_once_with(ANY, ANY) mock_subs.assert_called_once_with(ANY) mock_fields.assert_called_once_with(ANY, ANY) mock_subs.assert_called_once_with(ANY) def test_hosts_facts_with_err(self): """Test the hosts_facts method.""" hosts_url = 'https://{sat_host}:{port}/api/v2/hosts' with requests_mock.Mocker() as mocker: url = construct_url(url=hosts_url, sat_host='1.2.3.4') mocker.get(url, status_code=500) with self.assertRaises(SatelliteException): self.api.hosts_facts(Value('i', ScanJob.JOB_RUN)) @patch('multiprocessing.pool.Pool.starmap', return_value=[{ 'unique_name': 'sys_1', 'system_inspection_result': 'success', 'host_fields_response': {}, 'host_subscriptions_response': {} }]) def test_hosts_facts(self, mock_pool): """Test the hosts_facts method.""" scan_options = ScanOptions(max_concurrency=10) scan_options.save() scan_job, scan_task = create_scan_job(self.source, ScanTask.SCAN_TYPE_INSPECT, scan_name='test_62', scan_options=scan_options) scan_task.update_stats('TEST_SAT.', sys_scanned=0) api = SatelliteSixV2(scan_job, scan_task) job_conn_result = JobConnectionResult() job_conn_result.save() connection_results = TaskConnectionResult( job_connection_result=job_conn_result) connection_results.save() api.connect_scan_task.connection_result = connection_results api.connect_scan_task.connection_result.save() sys_result = SystemConnectionResult( name='sys1_1', status=SystemInspectionResult.SUCCESS, task_connection_result=api.connect_scan_task.connection_result) sys_result.save() api.connect_scan_task.save() hosts_url = 'https://{sat_host}:{port}/api/v2/hosts' with requests_mock.Mocker() as mocker: url = construct_url(url=hosts_url, sat_host='1.2.3.4') jsonresult = { 'total': 1, 'subtotal': 1, 'page': 1, 'per_page': 100, 'results': [{ 'id': 10, 'name': 'sys10' }] } # noqa mocker.get(url, status_code=200, json=jsonresult) api.hosts_facts(Value('i', ScanJob.JOB_RUN)) inspect_result = scan_task.inspection_result self.assertEqual(len(inspect_result.systems.all()), 1)
class ConnectTaskRunnerTest(TestCase): """Tests Satellite connect capabilities.""" def setUp(self): """Create test case setup.""" self.cred = Credential( name='cred1', cred_type=Credential.SATELLITE_CRED_TYPE, username='******', password='******', become_password=None, become_method=None, become_user=None, ssh_keyfile=None) self.cred.save() self.source = Source( name='source1', port=443, hosts='["1.2.3.4"]') self.source.save() self.source.credentials.add(self.cred) self.scan_job, self.scan_task = create_scan_job( self.source, ScanTask.SCAN_TYPE_CONNECT) def tearDown(self): """Cleanup test case setup.""" pass def test_run_sat5_bad_status(self): """Test the running connect task for Satellite 5.""" task = ConnectTaskRunner(self.scan_job, self.scan_task) with patch('scanner.satellite.connect.utils.status', return_value=(401, None, SATELLITE_VERSION_5)) as mock_sat_status: status = task.run() mock_sat_status.assert_called_once_with(ANY) self.assertEqual(status[1], ScanTask.FAILED) def test_run_sat6_bad_status(self): """Test the running connect task for Sat 6 with bad status.""" task = ConnectTaskRunner(self.scan_job, self.scan_task) with patch('scanner.satellite.connect.utils.status', return_value=(401, None, SATELLITE_VERSION_6)) as mock_sat_status: status = task.run() mock_sat_status.assert_called_once_with(ANY) self.assertEqual(status[1], ScanTask.FAILED) def test_run_sat6_bad_api_version(self): """Test the running connect task for Sat6 with bad api version.""" task = ConnectTaskRunner(self.scan_job, self.scan_task) with patch('scanner.satellite.connect.utils.status', return_value=(200, 3, SATELLITE_VERSION_6)) as mock_sat_status: status = task.run() mock_sat_status.assert_called_once_with(ANY) self.assertEqual(status[1], ScanTask.FAILED) def test_run_with_conn_err(self): """Test the running connect task with connection error.""" task = ConnectTaskRunner(self.scan_job, self.scan_task) with patch('scanner.satellite.connect.utils.status', side_effect=mock_conn_exception) as mock_sat_status: status = task.run() mock_sat_status.assert_called_once_with(ANY) self.assertEqual(status[1], ScanTask.FAILED) def test_run_with_sat_err(self): """Test the running connect task with satellite error.""" task = ConnectTaskRunner(self.scan_job, self.scan_task) with patch('scanner.satellite.connect.utils.status', side_effect=mock_sat_exception) as mock_sat_status: status = task.run() mock_sat_status.assert_called_once_with(ANY) self.assertEqual(status[1], ScanTask.FAILED) def test_run_with_auth_err(self): """Test the running connect task with satellite auth error.""" task = ConnectTaskRunner(self.scan_job, self.scan_task) with patch('scanner.satellite.connect.utils.status', side_effect=mock_sat_auth_exception) as mock_sat_status: status = task.run() mock_sat_status.assert_called_once_with(ANY) self.assertEqual(status[1], ScanTask.FAILED) def test_run_with_timeout_err(self): """Test the running connect task with timeout error.""" task = ConnectTaskRunner(self.scan_job, self.scan_task) with patch('scanner.satellite.connect.utils.status', side_effect=mock_timeout_error) as mock_sat_status: status = task.run() mock_sat_status.assert_called_once_with(ANY) self.assertEqual(status[1], ScanTask.FAILED) def test_run_sat6_v2(self): """Test the running connect task for Sat6 with api version 2.""" task = ConnectTaskRunner(self.scan_job, self.scan_task) with patch('scanner.satellite.connect.utils.status', return_value=(200, 2, SATELLITE_VERSION_6)) as mock_sat_status: with patch.object(SatelliteSixV2, 'host_count', return_value=1) as mock_host_count: with patch.object(SatelliteSixV2, 'hosts', return_value=['sys1']) as mock_hosts: status = task.run() mock_sat_status.assert_called_once_with(ANY) mock_host_count.assert_called_once_with() mock_hosts.assert_called_once_with() self.assertEqual(status[1], ScanTask.COMPLETED)
class TestScanList(TestCase): """Tests for the List method. These are separate from the other Scan tests because they have more setup than the others. """ maxDiff = None def setUp(self): """Create test setup.""" management.call_command('flush', '--no-input') self.cred = Credential.objects.create(name='cred1', username='******', password='******', become_password=None, ssh_keyfile=None) self.cred.save() self.cred_for_upload = self.cred.id self.source = Source(name='source1', source_type='network', port=22) self.source.save() self.source.credentials.add(self.cred) self.source.save() self.test1 = Scan(name='test1', scan_type=ScanTask.SCAN_TYPE_INSPECT) self.test1.save() self.test1.sources.add(self.source) self.test1.save() self.test2 = Scan(name='test2', scan_type=ScanTask.SCAN_TYPE_CONNECT) self.test2.save() self.test2.sources.add(self.source) self.test2.save() # self.test1 will not have a most_recent_scanjob, self.test2 # will. job = ScanJob(scan=self.test2) job.save() job.sources.add(self.source) job.save() self.test2.most_recent_scanjob = job self.test2.save() results1 = [{ 'id': 1, 'name': 'test1', 'sources': [{ 'id': 1, 'name': 'source1', 'source_type': 'network' }], 'scan_type': 'inspect' }, { 'id': 2, 'jobs': [{ 'id': 1 }], 'most_recent': { 'id': 1, 'scan_type': 'inspect', 'status': 'created', 'status_details': { 'job_status_message': 'Job is created.' } }, 'name': 'test2', 'sources': [{ 'id': 1, 'name': 'source1', 'source_type': 'network' }], 'scan_type': 'connect' }] self.expected = { 'count': 2, 'next': None, 'previous': None, 'results': results1 } def test_list(self): """List all scan objects.""" url = reverse('scan-list') response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.json(), self.expected) def test_list_by_scanjob_end_time(self): """List all scan objects, ordered by ScanJob start time.""" url = reverse('scan-list') response = self.client.get( url, {'ordering': 'most_recent_scanjob__start_time'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.json(), self.expected)
class SatelliteSixV1Test(TestCase): """Tests Satellite 6 v1 functions.""" def setUp(self): """Create test case setup.""" self.cred = Credential(name='cred1', cred_type=Credential.SATELLITE_CRED_TYPE, username='******', password='******', become_password=None, become_method=None, become_user=None, ssh_keyfile=None) self.cred.save() self.source = Source(name='source1', port=443, hosts='["1.2.3.4"]') self.source.save() self.source.credentials.add(self.cred) self.scan_job, self.scan_task = create_scan_job( self.source, ScanTask.SCAN_TYPE_INSPECT) self.api = SatelliteSixV1(self.scan_job, self.scan_task) job_conn_result = JobConnectionResult() job_conn_result.save() connection_results = TaskConnectionResult( job_connection_result=job_conn_result) connection_results.save() self.api.connect_scan_task.connection_result = connection_results self.api.connect_scan_task.connection_result.save() conn_result = self.api.connect_scan_task.connection_result sys_result = SystemConnectionResult( name='sys1_1', status=SystemInspectionResult.SUCCESS, task_connection_result=conn_result) sys_result.save() self.api.connect_scan_task.save() def tearDown(self): """Cleanup test case setup.""" pass def test_get_orgs(self): """Test the method to get orgs.""" orgs_url = 'https://{sat_host}:{port}/katello/api/v2/organizations' with requests_mock.Mocker() as mocker: url = construct_url(orgs_url, '1.2.3.4') jsonresult = { 'results': [{ 'id': 1 }, { 'id': 7 }, { 'id': 8 }], 'per_page': 100 } mocker.get(url, status_code=200, json=jsonresult) orgs = self.api.get_orgs() orgs2 = self.api.get_orgs() self.assertEqual(orgs, [1, 7, 8]) self.assertEqual(orgs, orgs2) def test_get_orgs_with_err(self): """Test the method to get orgs with err.""" orgs_url = 'https://{sat_host}:{port}/katello/api/v2/organizations' with requests_mock.Mocker() as mocker: url = construct_url(orgs_url, '1.2.3.4') jsonresult = { 'results': [{ 'id': 1 }, { 'id': 7 }, { 'id': 8 }], 'per_page': 100 } mocker.get(url, status_code=500, json=jsonresult) with self.assertRaises(SatelliteException): self.api.get_orgs() @patch('scanner.satellite.six.SatelliteSixV1.get_orgs') def test_host_count(self, mock_get_orgs): """Test the method host_count.""" mock_get_orgs.return_value = [1] hosts_url = 'https://{sat_host}:{port}/katello/api' \ '/v2/organizations/{org_id}/systems' with requests_mock.Mocker() as mocker: url = construct_url(url=hosts_url, sat_host='1.2.3.4', org_id=1) jsonresult = { 'results': [{ 'name': 'sys1' }, { 'name': 'sys2' }, { 'name': 'sys3' }], 'per_page': 100, 'total': 3 } mocker.get(url, status_code=200, json=jsonresult) systems_count = self.api.host_count() self.assertEqual(systems_count, 3) @patch('scanner.satellite.six.SatelliteSixV1.get_orgs') def test_host_count_with_err(self, mock_get_orgs): """Test the method host_count with err.""" mock_get_orgs.return_value = [1] hosts_url = 'https://{sat_host}:{port}/katello/api' \ '/v2/organizations/{org_id}/systems' with requests_mock.Mocker() as mocker: url = construct_url(url=hosts_url, sat_host='1.2.3.4', org_id=1) jsonresult = { 'results': [{ 'name': 'sys1' }, { 'name': 'sys2' }, { 'name': 'sys3' }], 'per_page': 100, 'total': 3 } mocker.get(url, status_code=500, json=jsonresult) with self.assertRaises(SatelliteException): self.api.host_count() @patch('scanner.satellite.six.SatelliteSixV1.get_orgs') def test_hosts(self, mock_get_orgs): """Test the method hosts.""" mock_get_orgs.return_value = [1] hosts_url = 'https://{sat_host}:{port}/katello/api' \ '/v2/organizations/{org_id}/systems' with requests_mock.Mocker() as mocker: url = construct_url(url=hosts_url, sat_host='1.2.3.4', org_id=1) jsonresult = { 'results': [{ 'name': 'sys1', 'id': 1 }, { 'name': 'sys2', 'id': 2 }, { 'name': 'sys3', 'id': 3 }], 'per_page': 100, 'total': 3 } mocker.get(url, status_code=200, json=jsonresult) systems_count = self.api.host_count() hosts = self.api.hosts() self.assertEqual(systems_count, 3) self.assertEqual(len(hosts), 3) self.assertEqual(hosts, ['sys1_1', 'sys2_2', 'sys3_3']) @patch('scanner.satellite.six.SatelliteSixV1.get_orgs') def test_hosts_with_err(self, mock_get_orgs): """Test the method hosts.""" mock_get_orgs.return_value = [1] hosts_url = 'https://{sat_host}:{port}/katello/api' \ '/v2/organizations/{org_id}/systems' with requests_mock.Mocker() as mocker: url = construct_url(url=hosts_url, sat_host='1.2.3.4', org_id=1) jsonresult = { 'results': [{ 'name': 'sys1' }, { 'name': 'sys2' }, { 'name': 'sys3' }], 'per_page': 100, 'total': 3 } mocker.get(url, status_code=500, json=jsonresult) with self.assertRaises(SatelliteException): self.api.hosts() def test_host_fields(self): """Test the method host_fields.""" host_field_url = 'https://{sat_host}:{port}/api/v2/hosts/{host_id}' with requests_mock.Mocker() as mocker: url = construct_url(url=host_field_url, sat_host='1.2.3.4', host_id=1) jsonresult = { 'architecture_id': 1, 'architecture_name': 'x86_64', 'operatingsystem_name': 'RedHat 7.4', 'uuid': None, 'created_at': '2017-12-04 13:19:57 UTC', 'updated_at': '2017-12-04 13:21:47 UTC', 'organization_name': 'ACME', 'location_name': 'Raleigh', 'name': 'mac52540071bafe.prov.lan', 'virtual_host': { 'uuid': '100', 'name': 'vhost1' }, 'virtual_guests': [{ 'name': 'foo' }], 'content_facet_attributes': { 'id': 11, 'katello_agent_installed': False }, 'subscription_facet_attributes': { 'uuid': '00c7a108-48ec-4a97-835c-aa3369777f64', 'last_checkin': '2018-01-04 17:36:07 UTC', 'registered_at': '2017-12-04 13:33:52 UTC', 'registered_by': 'sat-r220-07.lab.eng.rdu2.redhat.com', 'virtual_host': { 'uuid': '100', 'name': 'vhost1' }, 'virtual_guests': [{ 'name': 'foo' }], }, 'facts': { 'memorysize_mb': '992.45', 'memorysize': '992.45 MB', 'hostname': 'fdi', 'type': 'Other', 'architecture': 'x86_64', 'is_virtual': 'true', 'virtual': 'kvm', 'net.interface.ipv4_address': '192.168.99.123', 'net.interface.mac_address': 'fe80::5054:ff:fe24:946e', }, } mocker.get(url, status_code=200, json=jsonresult) host_info = host_fields(1, jsonresult) expected = { 'uuid': '00c7a108-48ec-4a97-835c-aa3369777f64', 'hostname': 'mac52540071bafe.prov.lan', 'registered_by': 'sat-r220-07.lab.eng.rdu2.redhat.com', 'registration_time': '2017-12-04 13:33:52 UTC', 'last_checkin_time': '2018-01-04 17:36:07 UTC', 'katello_agent_installed': False, 'os_release': 'RedHat 7.4', 'organization': 'ACME', 'virtual_host_uuid': '100', 'virtual_host_name': 'vhost1', 'virt_type': None, 'kernel_version': None, 'architecture': None, 'is_virtualized': None, 'cores': None, 'num_sockets': None, 'num_virtual_guests': 1, 'virtual': 'hypervisor', 'location': 'Raleigh', 'ip_addresses': ['192.168.99.123'], 'mac_addresses': ['fe80::5054:ff:fe24:946e'], 'os_name': 'RedHat', 'os_version': '7.4' } self.assertEqual(host_info, expected) def test_prepare_host_s61(self): """Test the prepare host method for satellite 6.1.""" url1 = \ 'https://{sat_host}:{port}/katello/api' \ '/v2/organizations/{org_id}/systems/{host_id}' url2 = \ 'https://{sat_host}:{port}/katello/api' \ '/v2/organizations/{org_id}/systems/{host_id}/subscriptions' expected = [(self.scan_task, { 'job_id': self.scan_job.id, 'task_sequence_number': self.scan_task.sequence_number, 'scan_type': self.scan_task.scan_type, 'source_type': self.scan_task.source.source_type, 'source_name': self.scan_task.source.name }, 1, 'sys', url1, url2, { 'host': { 'id': 1, 'name': 'sys' }, 'port': '443', 'user': self.cred.username, 'password': self.cred.password, 'ssl_cert_verify': True })] host = {'id': 1, 'name': 'sys'} chunk = [host] port = '443' user = self.cred.username password = self.cred.password connect_data_return_value = host, port, user, password with patch('scanner.satellite.utils.get_connect_data', return_value=connect_data_return_value) as mock_connect: host_params = SatelliteSixV1.prepare_host(self.api, chunk) self.assertEqual(expected, host_params) mock_connect.assert_called_once_with(ANY) def test_request_host_details_err(self): """Test request_host_details for error mark a failed system.""" host_field_url = 'https://{sat_host}:{port}/api/v2/hosts/{host_id}' with requests_mock.Mocker() as mocker: url = construct_url(url=host_field_url, sat_host='1.2.3.4', host_id=1) mocker.get(url, status_code=500) result = request_host_details( self.scan_task, { 'job_id': self.scan_job.id, 'task_sequence_number': self.scan_task.id, 'scan_type': self.scan_task.scan_type, 'source_type': self.scan_task.source.source_type, 'source_name': self.scan_task.source.name }, 1, 'sys', url, url, {}) expected = { 'unique_name': 'sys_1', 'system_inspection_result': 'failed', 'host_fields_response': {}, 'host_subscriptions_response': {} } self.assertEqual(result, expected) inspect_result = self.scan_task.inspection_result self.assertEqual(len(inspect_result.systems.all()), 0) def test_post_processing(self): """Test process_results method with mock data.""" fields_return_value = { 'uuid': '00c7a108-48ec-4a97-835c-aa3369777f64', 'hostname': 'mac52540071bafe.prov.lan', 'registered_by': 'sat-r220-07.lab.eng.rdu2.redhat.com', 'registration_time': '2017-12-04 13:33:52 UTC', 'last_checkin_time': '2018-01-04 17:36:07 UTC', 'katello_agent_installed': False, 'os_name': 'RedHat 7.4', 'organization': 'ACME', 'virtual_host_uuid': '100', 'virtual_host_name': 'vhost1', 'virt_type': None, 'kernel_version': None, 'architecture': None, 'is_virtualized': None, 'cores': None, 'num_sockets': None, 'num_virtual_guests': 1, 'virtual': 'hypervisor', 'location': 'Raleigh', 'ip_addresses': ['192.168.99.123'], 'ipv6_addresses': ['fe80::5054:ff:fe24:946e'] } subs_return_value = { 'entitlements': [{ 'derived_entitlement': False, 'name': 'Satellite Tools 6.3', 'amount': 1, 'account_number': None, 'contract_number': None, 'start_date': '2017-12-01 14:50:59 UTC', 'end_date': '2047-11-24 14:50:59 UTC' }, { 'derived_entitlement': True, 'name': 'Employee SKU', 'amount': 1, 'account_number': 1212729, 'contract_number': 10913844, 'start_date': '2016-03-24 04:00:00 UTC', 'end_date': '2022-01-01 04:59:59 UTC' }] } expected = { 'uuid': '00c7a108-48ec-4a97-835c-aa3369777f64', 'hostname': 'mac52540071bafe.prov.lan', 'registered_by': 'sat-r220-07.lab.eng.rdu2.redhat.com', 'registration_time': '2017-12-04 13:33:52 UTC', 'last_checkin_time': '2018-01-04 17:36:07 UTC', 'katello_agent_installed': False, 'os_name': 'RedHat 7.4', 'organization': 'ACME', 'virtual_host_uuid': '100', 'virtual_host_name': 'vhost1', 'num_virtual_guests': 1, 'virtual': 'hypervisor', 'location': 'Raleigh', 'ip_addresses': ['192.168.99.123'], 'ipv6_addresses': ['fe80::5054:ff:fe24:946e'], 'entitlements': [{ 'derived_entitlement': False, 'name': 'Satellite Tools 6.3', 'amount': 1, 'account_number': None, 'contract_number': None, 'start_date': '2017-12-01 14:50:59 UTC', 'end_date': '2047-11-24 14:50:59 UTC' }, { 'derived_entitlement': True, 'name': 'Employee SKU', 'amount': 1, 'account_number': 1212729, 'contract_number': 10913844, 'start_date': '2016-03-24 04:00:00 UTC', 'end_date': '2022-01-01 04:59:59 UTC' }] } self.scan_task.save() self.scan_task.update_stats('TEST_SAT.', sys_scanned=0) with patch('scanner.satellite.six.host_fields', return_value=fields_return_value) as mock_fields: with patch('scanner.satellite.six.host_subscriptions', return_value=subs_return_value) as mock_subs: result = { 'unique_name': 'sys_1', 'system_inspection_result': SystemInspectionResult.SUCCESS, 'host_fields_response': fields_return_value, 'host_subscriptions_response': subs_return_value } process_results(self.api, [result], 1) inspect_results = \ self.scan_task.inspection_result.systems.all() sys_1_result = inspect_results.filter(name='sys_1').first() self.assertEqual(sys_1_result.name, 'sys_1') self.assertEqual(sys_1_result.status, 'success') result = {} for fact in sys_1_result.facts.all(): result[fact.name] = json.loads(fact.value) self.assertEqual(result, expected) mock_fields.assert_called_once_with(ANY, ANY) mock_subs.assert_called_once_with(ANY) @patch('scanner.satellite.six.SatelliteSixV1.get_orgs') def test_hosts_facts_with_err(self, mock_get_orgs): """Test the hosts_facts method.""" mock_get_orgs.return_value = [1] hosts_url = 'https://{sat_host}:{port}/katello/api' \ '/v2/organizations/{org_id}/systems' with requests_mock.Mocker() as mocker: url = construct_url(url=hosts_url, sat_host='1.2.3.4', org_id=1) mocker.get(url, status_code=500) with self.assertRaises(SatelliteException): self.api.hosts_facts(Value('i', ScanJob.JOB_RUN)) @patch('multiprocessing.pool.Pool.starmap', return_value=[{ 'unique_name': 'sys_1', 'system_inspection_result': 'failed', 'host_fields_response': {}, 'host_subscriptions_response': {} }]) def test_hosts_facts(self, mock_pool): """Test the method hosts.""" hosts_url = 'https://{sat_host}:{port}/katello/api' \ '/v2/organizations/{org_id}/systems' with patch.object(SatelliteSixV1, 'get_orgs', return_value=[1]): with patch('scanner.satellite.six.request_host_details', return_value={}): with requests_mock.Mocker() as mocker: url = construct_url(url=hosts_url, sat_host='1.2.3.4', org_id=1) jsonresult = { 'results': [{ 'uuid': '1', 'name': 'sys1' }, { 'uuid': '2', 'name': 'sys2' }, { 'uuid': '3', 'name': 'sys3' }], 'per_page': 100, 'total': 3 } mocker.get(url, status_code=200, json=jsonresult) self.api.hosts_facts(Value('i', ScanJob.JOB_RUN)) inspect_result = self.scan_task.inspection_result self.assertEqual(len(inspect_result.systems.all()), 1)
class HostScannerTest(TestCase): """Tests against the HostScanner class and functions.""" # pylint: disable=too-many-instance-attributes def setUp(self): """Create test case setup.""" self.cred = Credential(name='cred1', username='******', password='******', sudo_password=None, ssh_keyfile=None) self.cred.save() self.source = Source(name='source1', port=22) self.source.save() self.source.credentials.add(self.cred) self.host = HostRange(host_range='1.2.3.4', source_id=self.source.id) self.host.save() self.source.hosts.add(self.host) self.connect_scan_task = ScanTask(source=self.source, scan_type=ScanTask.SCAN_TYPE_CONNECT, status=ScanTask.COMPLETED) self.connect_scan_task.systems_failed = 0 self.connect_scan_task.save() self.inspect_scan_task = ScanTask(source=self.source, scan_type=ScanTask.SCAN_TYPE_INSPECT) self.inspect_scan_task.systems_failed = 0 self.inspect_scan_task.save() self.inspect_scan_task.prerequisites.add(self.connect_scan_task) self.inspect_scan_task.save() self.scan_job = ScanJob(scan_type=ScanTask.SCAN_TYPE_INSPECT) self.scan_job.save() self.scan_job.tasks.add(self.connect_scan_task) self.scan_job.tasks.add(self.inspect_scan_task) scan_options = ScanOptions() scan_options.save() self.scan_job.options = scan_options self.scan_job.save() self.fact_endpoint = 'http://testserver' + reverse('facts-list') self.conn_results = ConnectionResults(scan_job=self.scan_job) self.conn_results.save() self.conn_result = ConnectionResult(scan_task=self.connect_scan_task, source=self.source) self.conn_result.save() success_sys = SystemConnectionResult( name='1.2.3.4', credential=self.cred, status=SystemConnectionResult.SUCCESS) success_sys.save() failed_sys = SystemConnectionResult( name='1.1.1.2', status=SystemConnectionResult.FAILED) failed_sys.save() self.conn_result.systems.add(success_sys) self.conn_result.systems.add(failed_sys) self.conn_result.save() self.conn_results.results.add(self.conn_result) self.conn_results.save() self.inspect_results = InspectionResults(scan_job=self.scan_job) self.inspect_results.save() def test_scan_inventory(self): """Test construct ansible inventory dictionary.""" serializer = SourceSerializer(self.source) source = serializer.data connection_port = source['port'] hc_serializer = CredentialSerializer(self.cred) cred = hc_serializer.data inventory_dict = construct_scan_inventory([('1.2.3.4', cred)], connection_port, 50) expected = { 'all': { 'children': { 'group_0': { 'hosts': { '1.2.3.4': { 'ansible_user': '******', 'ansible_ssh_pass': '******', 'ansible_host': '1.2.3.4' } } } }, 'vars': { 'ansible_port': 22 } } } self.assertEqual(inventory_dict[1], expected) def test_scan_inventory_grouping(self): """Test construct ansible inventory dictionary.""" serializer = SourceSerializer(self.source) source = serializer.data connection_port = source['port'] hc_serializer = CredentialSerializer(self.cred) cred = hc_serializer.data inventory_dict = construct_scan_inventory([('1.2.3.1', cred), ('1.2.3.2', cred), ('1.2.3.3', cred), ('1.2.3.4', cred)], connection_port, 1) expected = { 'all': { 'children': { 'group_0': { 'hosts': { '1.2.3.1': { 'ansible_user': '******', 'ansible_ssh_pass': '******', 'ansible_host': '1.2.3.1' } } }, 'group_1': { 'hosts': { '1.2.3.2': { 'ansible_user': '******', 'ansible_ssh_pass': '******', 'ansible_host': '1.2.3.2' } } }, 'group_2': { 'hosts': { '1.2.3.3': { 'ansible_user': '******', 'ansible_ssh_pass': '******', 'ansible_host': '1.2.3.3' } } }, 'group_3': { 'hosts': { '1.2.3.4': { 'ansible_user': '******', 'ansible_ssh_pass': '******', 'ansible_host': '1.2.3.4' } } } }, 'vars': { 'ansible_port': 22 } } } self.assertEqual(inventory_dict[1], expected) @patch('scanner.network.utils.TaskQueueManager.run', side_effect=mock_run_failed) def test_inspect_scan_failure(self, mock_run): """Test scan flow with mocked manager and failure.""" scanner = InspectTaskRunner(self.scan_job, self.inspect_scan_task, self.inspect_results) # Init for unit test as run is not called scanner.connect_scan_task = self.connect_scan_task with self.assertRaises(AnsibleError): scanner.inspect_scan() mock_run.assert_called() @patch('scanner.network.inspect.InspectTaskRunner.inspect_scan', side_effect=mock_scan_error) def test_inspect_scan_error(self, mock_scan): """Test scan flow with mocked manager and failure.""" scanner = InspectTaskRunner(self.scan_job, self.inspect_scan_task, self.inspect_results) scan_task_status = scanner.run() mock_scan.assert_called_with() self.assertEqual(scan_task_status, ScanTask.FAILED) @patch('scanner.network.utils.TaskQueueManager.run', side_effect=mock_run_success) def test_inspect_scan_fail_no_facts(self, mock_run): """Test running a inspect scan with mocked connection.""" expected = ([('1.2.3.4', {'name': 'cred1'})], []) mock_run.return_value = expected with requests_mock.Mocker() as mocker: mocker.post(self.fact_endpoint, status_code=201, json={'id': 1}) scanner = InspectTaskRunner(self.scan_job, self.inspect_scan_task, self.inspect_results) scan_task_status = scanner.run() mock_run.assert_called_with(ANY) self.assertEqual(scan_task_status, ScanTask.FAILED) def test_populate_callback(self): """Test the population of the callback object for inspect scan.""" callback = ResultCallback(scan_task=self.inspect_scan_task, inspect_results=self.inspect_results) host = Mock() host.name = '1.2.3.4' result = Mock(_host=host, _results={'rc': 3}) callback.v2_runner_on_unreachable(result)
class SatelliteFiveTest(TestCase): """Tests Satellite 5 functions.""" def setUp(self): """Create test case setup.""" self.cred = Credential(name='cred1', cred_type=Credential.SATELLITE_CRED_TYPE, username='******', password='******', become_password=None, become_method=None, become_user=None, ssh_keyfile=None) self.cred.save() self.source = Source(name='source1', port=443, hosts='["1.2.3.4"]') self.source.save() self.source.credentials.add(self.cred) self.scan_job, self.scan_task = create_scan_job( self.source, ScanTask.SCAN_TYPE_INSPECT) self.api = SatelliteFive(self.scan_job, self.scan_task) connection_results = TaskConnectionResult() connection_results.save() self.api.connect_scan_task.connection_result = connection_results self.api.connect_scan_task.connection_result.save() sys_result = SystemConnectionResult( name='sys1_1', status=SystemInspectionResult.SUCCESS) sys_result.save() self.api.connect_scan_task.connection_result.systems.add(sys_result) self.api.connect_scan_task.connection_result.save() self.api.connect_scan_task.save() def tearDown(self): """Cleanup test case setup.""" pass @patch('xmlrpc.client.ServerProxy') def test_host_count(self, mock_serverproxy): """Test the method host_count.""" client = mock_serverproxy.return_value client.auth.login.return_value = 'key' client.auth.logout.return_value = 'key' client.system.list_user_systems.return_value = ['sys1', 'sys2', 'sys3'] systems_count = self.api.host_count() self.assertEqual(systems_count, 3) @patch('xmlrpc.client.ServerProxy') def test_host_count_with_err(self, mock_serverproxy): """Test the method host_count with error.""" client = mock_serverproxy.return_value client.auth.login.side_effect = mock_xml_fault with self.assertRaises(SatelliteException): self.api.host_count() @patch('xmlrpc.client.ServerProxy') def test_hosts(self, mock_serverproxy): """Test the method hosts.""" systems = [{ 'name': 'sys1', 'id': 1 }, { 'name': 'sys2', 'id': 2 }, { 'name': 'sys3', 'id': 3 }] client = mock_serverproxy.return_value client.auth.login.return_value = 'key' client.auth.logout.return_value = 'key' client.system.list_user_systems.return_value = systems systems_count = self.api.host_count() hosts = self.api.hosts() self.assertEqual(systems_count, 3) self.assertEqual(len(hosts), 3) self.assertEqual(hosts, ['sys1_1', 'sys2_2', 'sys3_3']) @patch('xmlrpc.client.ServerProxy') def test_hosts_with_err(self, mock_serverproxy): """Test the method hosts with error.""" client = mock_serverproxy.return_value client.auth.login.side_effect = mock_xml_fault with self.assertRaises(SatelliteException): self.api.hosts() @patch('xmlrpc.client.ServerProxy') def test_host_details_virt_host(self, mock_serverproxy): """Test host_details method with mock data for virt host.""" expected = { 'uuid': 1, 'name': 'sys1', 'hostname': 'sys1_hostname', 'last_checkin_time': '', 'registration_time': 'datetime', 'architecture': 'x86', 'kernel_version': 'kernel', 'cores': 2, 'num_sockets': 2, 'os_release': '7server', 'entitlements': [{ 'name': 'ent1' }], 'ip_addresses': ['1.2.3.4'], 'mac_addresses': ['1:a:2:b:3:c'], 'virtual': 'hypervisor', 'num_virtual_guests': 3, 'is_virtualized': False } client = mock_serverproxy.return_value client.auth.login.return_value = 'key' client.auth.logout.return_value = 'key' client.system.get_uuid.return_value = '' cpu = {'arch': 'x86', 'count': 2, 'socket_count': 2} client.system.get_cpu.return_value = cpu system_details = {'hostname': 'sys1_hostname', 'release': '7server'} client.system.get_details.return_value = system_details client.system.get_running_kernel.return_value = 'kernel' client.system.get_entitlements.return_value = ['ent1'] net_devices = [{ 'interface': 'eth0', 'ip': '1.2.3.4', 'hardware_address': '1:a:2:b:3:c' }] client.system.get_network_devices.return_value = net_devices client.system.get_registration_date.return_value = 'datetime' virt = {1: {'id': 1, 'num_virtual_guests': 3}} logging_options = { 'job_id': self.scan_job.id, 'task_sequence_number': self.scan_task.sequence_number, 'scan_type': self.scan_task.scan_type, 'source_type': self.scan_task.source.source_type, 'source_name': self.scan_task.source.name } raw_result = request_host_details(host_id=1, host_name='sys1', last_checkin='', scan_task=self.scan_task, request_options={}, logging_options=logging_options) self.api.process_results([raw_result], virt, {1: 2}, []) inspect_results = \ self.scan_task.inspection_result.systems.all() sys_1_result = inspect_results.filter(name='sys1_1').first() self.assertEqual(sys_1_result.name, 'sys1_1') self.assertEqual(sys_1_result.status, 'success') result = {} for fact in sys_1_result.facts.all(): result[fact.name] = json.loads(fact.value) self.assertEqual(result, expected) @patch('xmlrpc.client.ServerProxy') def test_host_details_virt_guest(self, mock_serverproxy): """Test host_details method with mock data for virt guest.""" expected = { 'uuid': 1, 'name': 'sys1', 'hostname': 'sys1_hostname', 'last_checkin_time': '', 'registration_time': 'datetime', 'architecture': 'x86', 'kernel_version': 'kernel', 'cores': 2, 'num_sockets': 2, 'os_release': '7server', 'entitlements': [{ 'name': 'ent1' }], 'ip_addresses': ['1.2.3.4'], 'mac_addresses': ['1:a:2:b:3:c'], 'is_virtualized': True, 'virtual_host': 2, 'virtual_host_name': 'sys2' } client = mock_serverproxy.return_value client.auth.login.return_value = 'key' client.auth.logout.return_value = 'key' client.system.get_uuid.return_value = '' cpu = {'arch': 'x86', 'count': 2, 'socket_count': 2} client.system.get_cpu.return_value = cpu system_details = {'hostname': 'sys1_hostname', 'release': '7server'} client.system.get_details.return_value = system_details client.system.get_running_kernel.return_value = 'kernel' client.system.get_entitlements.return_value = ['ent1'] net_devices = [{ 'interface': 'eth0', 'ip': '1.2.3.4', 'hardware_address': '1:a:2:b:3:c' }] client.system.get_network_devices.return_value = net_devices client.system.get_registration_date.return_value = 'datetime' virt = {2: {'uuid': 2, 'name': 'sys2', 'num_virtual_guests': 3}} raw_result = request_host_details(host_id=1, host_name='sys1', last_checkin='', scan_task=self.scan_task, request_options={}, logging_options=None) self.api.process_results([raw_result], virt, {1: 2}, []) inspect_results = \ self.scan_task.inspection_result.systems.all() sys_1_result = inspect_results.filter(name='sys1_1').first() self.assertEqual(sys_1_result.name, 'sys1_1') self.assertEqual(sys_1_result.status, 'success') result = {} for fact in sys_1_result.facts.all(): result[fact.name] = json.loads(fact.value) self.assertEqual(result, expected) def test_prepare_host_s5(self): """Test the prepare host method for satellite 5.""" expected = [(1, 'sys', '', self.scan_task, { 'host': { 'id': 1, 'name': 'sys', 'last_checkin': '' }, 'port': '443', 'user': self.cred.username, 'password': self.cred.password, 'ssl_cert_verify': True }, { 'job_id': self.scan_job.id, 'task_sequence_number': self.scan_task.sequence_number, 'scan_type': self.scan_task.scan_type, 'source_type': self.scan_task.source.source_type, 'source_name': self.scan_task.source.name })] host = {'id': 1, 'name': 'sys', 'last_checkin': ''} chunk = [host] port = '443' user = self.cred.username password = self.cred.password connect_data_return_value = host, port, user, password with patch('scanner.satellite.utils.get_connect_data', return_value=connect_data_return_value) as mock_connect: host_params = self.api.prepare_host(chunk) self.assertEqual(expected, host_params) mock_connect.assert_called_once_with(ANY) @patch('xmlrpc.client.ServerProxy') def test_host_details_with_err(self, mock_serverproxy): """Test the host details with an error.""" client = mock_serverproxy.return_value client.auth.login.side_effect = mock_xml_fault virt = {2: {'uuid': 2, 'name': 'sys2', 'num_virtual_guests': 3}} raw_result = request_host_details(host_id=2, host_name='sys2', last_checkin='', scan_task=self.scan_task, request_options={}, logging_options=None) self.api.process_results([raw_result], virt, {1: 2}, []) inspect_results = \ self.scan_task.inspection_result.systems.all() sys_1_result = inspect_results.filter(name='sys2_2').first() self.assertEqual(sys_1_result.name, 'sys2_2') self.assertEqual(sys_1_result.status, 'failed') result = {} for fact in sys_1_result.facts.all(): result[fact.name] = json.loads(fact.value) self.assertEqual(result, {}) @patch('xmlrpc.client.ServerProxy') def test_virtual_guests_with_err(self, mock_serverproxy): """Test the virtual_guests method with an error.""" client = mock_serverproxy.return_value client.auth.login.side_effect = mock_xml_fault with self.assertRaises(SatelliteException): self.api.virtual_guests(1) @patch('xmlrpc.client.ServerProxy') def test_virtual_guests(self, mock_serverproxy): """Test the virtual_guests method with an error.""" client = mock_serverproxy.return_value client.auth.login.return_value = 'key' client.auth.logout.return_value = 'key' guests = [{'id': 2}] client.system.list_virtual_guests.return_value = guests virt_guests = self.api.virtual_guests(1) self.assertEqual(virt_guests, ({2: 1}, 1)) @patch('xmlrpc.client.ServerProxy') def test_virtual_hosts_with_err(self, mock_serverproxy): """Test the virtual_hosts method with an error.""" client = mock_serverproxy.return_value client.auth.login.side_effect = mock_xml_fault with self.assertRaises(SatelliteException): self.api.virtual_hosts() @patch('xmlrpc.client.ServerProxy') def test_virtual_hosts(self, mock_serverproxy): """Test the virtual_hosts method.""" client = mock_serverproxy.return_value client.auth.login.return_value = 'key' client.auth.logout.return_value = 'key' guests = [{'id': 2}] client.system.list_virtual_guests.return_value = guests hosts = [{'id': 1, 'name': 'host1'}] client.system.list_virtual_hosts.return_value = hosts client.system.get_uuid.return_value = '' virtual_hosts, virtual_guests = self.api.virtual_hosts() virt_host = { 1: { 'id': 1, 'name': 'host1', 'uuid': 1, 'num_virtual_guests': 1 } } virt_guest = {2: 1} self.assertEqual(virtual_hosts, virt_host) self.assertEqual(virtual_guests, virt_guest) @patch('xmlrpc.client.ServerProxy') def test_physical_hosts_with_err(self, mock_serverproxy): """Test the phyiscal_hosts method with an error.""" client = mock_serverproxy.return_value client.auth.login.side_effect = mock_xml_fault with self.assertRaises(SatelliteException): self.api.physical_hosts() @patch('xmlrpc.client.ServerProxy') def test_physical_hosts(self, mock_serverproxy): """Test the physical_hosts method.""" client = mock_serverproxy.return_value client.auth.login.return_value = 'key' client.auth.logout.return_value = 'key' hosts = [{'id': 1, 'name': 'host1'}] client.system.list_physical_systems.return_value = hosts phyiscal_hosts = self.api.physical_hosts() self.assertEqual(phyiscal_hosts, [1]) @patch('xmlrpc.client.ServerProxy') def test_hosts_facts_with_err(self, mock_serverproxy): """Test the hosts_facts method with an error.""" client = mock_serverproxy.return_value client.auth.login.side_effect = mock_xml_fault with self.assertRaises(SatelliteException): self.api.hosts_facts(Value('i', ScanJob.JOB_RUN)) @patch('multiprocessing.pool.Pool.starmap', return_value=[{ 'host_name': 'sys10', 'last_checkin': '', 'host_id': 1, 'cpu': {}, 'uuid': 1, 'system_details': {}, 'kernel': '', 'subs': [], 'network_devices': [], 'registration_date': '', 'system_inspection_result': SystemInspectionResult.SUCCESS }]) @patch('xmlrpc.client.ServerProxy') def test_hosts_facts(self, mock_serverproxy, mock_pool): """Test the hosts_facts method.""" # pylint: disable=unused-argument systems = [{'id': 1, 'name': 'sys1'}] client = mock_serverproxy.return_value client.auth.login.return_value = 'key' client.auth.logout.return_value = 'key' client.system.list_user_systems.return_value = systems hosts_return_value = ({}, {}) with patch.object(SatelliteFive, 'virtual_hosts', return_value=hosts_return_value) as mock_vhosts: with patch.object(SatelliteFive, 'physical_hosts', return_value=[]) as mock_physical: self.api.hosts_facts(Value('i', ScanJob.JOB_RUN)) inspect_result = self.scan_task.inspection_result self.assertEqual(len(inspect_result.systems.all()), 1) mock_vhosts.assert_called_once_with() mock_physical.assert_called_once_with()
class ScanJobTest(TestCase): """Test the basic ScanJob infrastructure.""" def setUp(self): """Create test setup.""" self.cred = Credential.objects.create(name='cred1', username='******', password='******', sudo_password=None, ssh_keyfile=None) self.cred_for_upload = self.cred.id self.source = Source(name='source1', source_type='network', port=22) self.source.save() self.source.credentials.add(self.cred) def create(self, data): """Call the create endpoint.""" url = reverse('scanjob-list') return self.client.post(url, json.dumps(data), 'application/json') def create_expect_400(self, data, expected_response): """We will do a lot of create tests that expect HTTP 400s.""" response = self.create(data) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) response_json = response.json() self.assertEqual(response_json, expected_response) def create_expect_201(self, data): """Create a source, return the response as a dict.""" response = self.create(data) response_json = response.json() if response.status_code != status.HTTP_201_CREATED: print('Cause of failure: ') print(response_json) self.assertEqual(response.status_code, status.HTTP_201_CREATED) return response_json def test_queue_task(self): """Test create queue state change.""" scan_job = ScanJob(scan_type=ScanTask.SCAN_TYPE_INSPECT) scan_job.save() scan_job.sources.add(self.source) # Job in created state self.assertEqual(scan_job.status, ScanTask.CREATED) tasks = scan_job.tasks.all() self.assertEqual(len(tasks), 0) # Queue job to run scan_job.queue() # Job should be in pending state self.assertEqual(scan_job.status, ScanTask.PENDING) # Queue should have created scan tasks tasks = scan_job.tasks.all() self.assertEqual(len(tasks), 2) # Validate connect task created and correct connect_task = tasks[0] self.assertEqual(connect_task.scan_type, ScanTask.SCAN_TYPE_CONNECT) self.assertEqual(connect_task.status, ScanTask.PENDING) # Validate inspect task created and correct inspect_task = tasks[1] self.assertEqual(inspect_task.scan_type, ScanTask.SCAN_TYPE_INSPECT) self.assertEqual(inspect_task.status, ScanTask.PENDING) def test_queue_invalid_state_changes(self): """Test create queue failed.""" scan_job = ScanJob(status=ScanTask.FAILED, scan_type=ScanTask.SCAN_TYPE_INSPECT) scan_job.save() scan_job.sources.add(self.source) # Queue job to run scan_job.queue() self.assertEqual(scan_job.status, ScanTask.FAILED) scan_job.complete() self.assertEqual(scan_job.status, ScanTask.FAILED) scan_job.pause() self.assertEqual(scan_job.status, ScanTask.FAILED) scan_job.start() self.assertEqual(scan_job.status, ScanTask.FAILED) scan_job.cancel() self.assertEqual(scan_job.status, ScanTask.FAILED) scan_job.restart() self.assertEqual(scan_job.status, ScanTask.FAILED) scan_job.fail() self.assertEqual(scan_job.status, ScanTask.FAILED) scan_job.status = ScanTask.CREATED scan_job.fail() self.assertEqual(scan_job.status, ScanTask.CREATED) scan_job.status = ScanTask.RUNNING scan_job.complete() self.assertEqual(scan_job.status, ScanTask.COMPLETED) @patch('api.scanjob.view.start_scan', side_effect=dummy_start) def test_start_task(self, start_scan): """Test start pending task.""" scan_job = ScanJob(scan_type=ScanTask.SCAN_TYPE_CONNECT) scan_job.save() scan_job.sources.add(self.source) # Job in created state tasks = scan_job.tasks.all() # Queue job to run scan_job.queue() self.assertEqual(scan_job.status, ScanTask.PENDING) # Queue should have created scan tasks tasks = scan_job.tasks.all() self.assertEqual(len(tasks), 1) # Start job scan_job.start() @patch('api.scanjob.view.start_scan', side_effect=dummy_start) def test_pause_restart_task(self, start_scan): """Test pause and restart task.""" scan_job = ScanJob(scan_type=ScanTask.SCAN_TYPE_CONNECT) scan_job.save() scan_job.sources.add(self.source) # Job in created state tasks = scan_job.tasks.all() # Queue job to run scan_job.queue() self.assertEqual(scan_job.status, ScanTask.PENDING) # Queue should have created scan tasks tasks = scan_job.tasks.all() self.assertEqual(len(tasks), 1) connect_task = scan_job.tasks.first() self.assertEqual(connect_task.status, ScanTask.PENDING) # Start job scan_job.start() self.assertEqual(scan_job.status, ScanTask.RUNNING) scan_job.pause() connect_task = scan_job.tasks.first() self.assertEqual(scan_job.status, ScanTask.PAUSED) self.assertEqual(connect_task.status, ScanTask.PAUSED) scan_job.restart() connect_task = scan_job.tasks.first() self.assertEqual(scan_job.status, ScanTask.RUNNING) self.assertEqual(connect_task.status, ScanTask.PENDING) scan_job.cancel() connect_task = scan_job.tasks.first() self.assertEqual(scan_job.status, ScanTask.CANCELED) self.assertEqual(connect_task.status, ScanTask.CANCELED) @patch('api.scanjob.view.start_scan', side_effect=dummy_start) def test_successful_create(self, start_scan): """A valid create request should succeed.""" data = { 'sources': [self.source.id], 'scan_type': ScanTask.SCAN_TYPE_CONNECT } response = self.create_expect_201(data) self.assertIn('id', response) def test_create_no_source(self): """A create request must have a source.""" self.create_expect_400({}, {'sources': ['This field is required.']}) def test_create_invalid_scan_type(self): """A create request must have a valid scan_type.""" data = {'sources': [self.source.id], 'scan_type': 'foo'} self.create_expect_400(data, {'scan_type': ['"foo" is not a valid choice.']}) @patch('api.scanjob.view.start_scan', side_effect=dummy_start) def test_create_default_host_type(self, start_scan): """A valid create request should succeed with defaulted type.""" data = {'sources': [self.source.id]} response = self.create_expect_201(data) self.assertIn('id', response) self.assertIn('scan_type', response) self.assertEqual(response['scan_type'], ScanTask.SCAN_TYPE_INSPECT) def test_create_invalid_source(self): """The Source name must valid.""" self.create_expect_400( {'sources': -1}, {'sources': ['Expected a list of items but got type "int".']}) def test_create_invalid_forks(self): """Test valid number of forks.""" data = { 'sources': [self.source.id], 'options': { 'max_concurrency': -5 } } self.create_expect_400( data, { 'options': { 'max_concurrency': ['Ensure this value is greater than or equal ' 'to 1.'] } }) @patch('api.scanjob.view.start_scan', side_effect=dummy_start) def test_list(self, start_scan): """List all ScanJob objects.""" data_default = {'sources': [self.source.id]} data_discovery = { 'sources': [self.source.id], 'scan_type': ScanTask.SCAN_TYPE_CONNECT } self.create_expect_201(data_default) self.create_expect_201(data_discovery) url = reverse('scanjob-list') response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_200_OK) content = response.json() expected = [{ 'id': 1, 'options': { 'max_concurrency': 50 }, 'sources': [{ 'id': 1, 'name': 'source1', 'source_type': 'network' }], 'scan_type': ScanTask.SCAN_TYPE_INSPECT, 'status': 'created' }, { 'id': 2, 'options': { 'max_concurrency': 50 }, 'sources': [{ 'id': 1, 'name': 'source1', 'source_type': 'network' }], 'scan_type': ScanTask.SCAN_TYPE_CONNECT, 'status': 'created' }] self.assertEqual(content, expected) @patch('api.scanjob.view.start_scan', side_effect=dummy_start) def test_filtered_list(self, start_scan): """List filtered ScanJob objects.""" data_default = {'sources': [self.source.id]} data_discovery = { 'sources': [self.source.id], 'scan_type': ScanTask.SCAN_TYPE_CONNECT } self.create_expect_201(data_default) self.create_expect_201(data_discovery) url = reverse('scanjob-list') response = self.client.get(url, {'scan_type': ScanTask.SCAN_TYPE_CONNECT}) self.assertEqual(response.status_code, status.HTTP_200_OK) content = response.json() expected = [{ 'id': 2, 'options': { 'max_concurrency': 50 }, 'sources': [{ 'id': 1, 'name': 'source1', 'source_type': 'network' }], 'scan_type': ScanTask.SCAN_TYPE_CONNECT, 'status': 'created' }] self.assertEqual(content, expected) response = self.client.get(url, {'status': ScanTask.PENDING}) self.assertEqual(response.status_code, status.HTTP_200_OK) content = response.json() expected = [] self.assertEqual(content, expected) response = self.client.get(url, {'status': ScanTask.CREATED}) self.assertEqual(response.status_code, status.HTTP_200_OK) content = response.json() expected = [{ 'id': 1, 'options': { 'max_concurrency': 50 }, 'sources': [{ 'id': 1, 'name': 'source1', 'source_type': 'network' }], 'scan_type': ScanTask.SCAN_TYPE_INSPECT, 'status': 'created' }, { 'id': 2, 'options': { 'max_concurrency': 50 }, 'sources': [{ 'id': 1, 'name': 'source1', 'source_type': 'network' }], 'scan_type': ScanTask.SCAN_TYPE_CONNECT, 'status': 'created' }] self.assertEqual(content, expected) @patch('api.scanjob.view.start_scan', side_effect=dummy_start) def test_retrieve(self, start_scan): """Get ScanJob details by primary key.""" data_discovery = { 'sources': [self.source.id], 'scan_type': ScanTask.SCAN_TYPE_CONNECT } initial = self.create_expect_201(data_discovery) url = reverse('scanjob-detail', args=(initial['id'], )) response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertIn('sources', response.json()) sources = response.json()['sources'] self.assertEqual(sources, [{ 'id': 1, 'name': 'source1', 'source_type': 'network' }]) @patch('api.scanjob.view.start_scan', side_effect=dummy_start) def test_details(self, start_scan): """Get ScanJob result details by primary key.""" scan_job = ScanJob(scan_type=ScanTask.SCAN_TYPE_INSPECT) scan_job.save() scan_job.sources.add(self.source) # Job in created state self.assertEqual(scan_job.status, ScanTask.CREATED) tasks = scan_job.tasks.all() self.assertEqual(len(tasks), 0) # Queue job to run scan_job.queue() conn_task = scan_job.tasks.first() conn_results = ConnectionResults(scan_job=scan_job) conn_results.save() conn_result = ConnectionResult(source=conn_task.source, scan_task=conn_task) conn_result.save() conn_results.results.add(conn_result) conn_results.save() sys_result = SystemConnectionResult( name='Foo', credential=self.cred, status=SystemConnectionResult.SUCCESS) sys_result.save() conn_result.systems.add(sys_result) conn_result.save() inspect_task = scan_job.tasks.all()[1] inspect_results = InspectionResults(scan_job=scan_job) inspect_results.save() inspect_result = InspectionResult(source=inspect_task.source, scan_task=inspect_task) inspect_result.save() inspect_results.results.add(inspect_result) inspect_results.save() sys_result = SystemInspectionResult( name='Foo', status=SystemConnectionResult.SUCCESS) sys_result.save() fact = RawFact(name='foo', value='value') fact.save() sys_result.facts.add(fact) sys_result.save() inspect_result.systems.add(sys_result) inspect_result.save() url = reverse('scanjob-detail', args=(scan_job.id, )) + 'results/' response = self.client.get(url) self.assertEqual(response.status_code, status.HTTP_200_OK) json_response = response.json() self.assertIn('connection_results', json_response) self.assertIn('inspection_results', json_response) self.assertEqual( json_response, { 'connection_results': { 'scan_job': 1, 'results': [] }, 'inspection_results': { 'scan_job': 1, 'results': [] } }) @patch('api.scanjob.view.start_scan', side_effect=dummy_start) def test_update_not_allowed(self, start_scan): """Completely update a Source.""" data_discovery = { 'sources': [self.source.id], 'scan_type': ScanTask.SCAN_TYPE_CONNECT } initial = self.create_expect_201(data_discovery) data = { 'sources': [self.source.id], 'scan_type': ScanTask.SCAN_TYPE_INSPECT } url = reverse('scanjob-detail', args=(initial['id'], )) response = self.client.put(url, json.dumps(data), content_type='application/json', format='json') self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED) @patch('api.scanjob.view.start_scan', side_effect=dummy_start) def test_partial_update(self, start_scan): """Partially update a ScanJob is not supported.""" data_discovery = { 'sources': [self.source.id], 'scan_type': ScanTask.SCAN_TYPE_CONNECT } initial = self.create_expect_201(data_discovery) data = {'scan_type': ScanTask.SCAN_TYPE_INSPECT} url = reverse('scanjob-detail', args=(initial['id'], )) response = self.client.patch(url, json.dumps(data), content_type='application/json', format='json') self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED) @patch('api.scanjob.view.start_scan', side_effect=dummy_start) def test_delete(self, start_scan): """Delete a ScanJob is not supported.""" data_discovery = { 'sources': [self.source.id], 'scan_type': ScanTask.SCAN_TYPE_CONNECT } response = self.create_expect_201(data_discovery) url = reverse('scanjob-detail', args=(response['id'], )) response = self.client.delete(url, format='json') self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED) @patch('api.scanjob.view.start_scan', side_effect=dummy_start) def test_pause_bad_state(self, start_scan): """Pause a scanjob.""" data_host = { 'sources': [self.source.id], 'scan_type': ScanTask.SCAN_TYPE_INSPECT } response = self.create_expect_201(data_host) url = reverse('scanjob-detail', args=(response['id'], )) pause_url = '{}pause/'.format(url) response = self.client.put(pause_url, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @patch('api.scanjob.view.start_scan', side_effect=dummy_start) def test_cancel(self, start_scan): """Cancel a scanjob.""" data_host = { 'sources': [self.source.id], 'scan_type': ScanTask.SCAN_TYPE_INSPECT } response = self.create_expect_201(data_host) url = reverse('scanjob-detail', args=(response['id'], )) pause_url = '{}cancel/'.format(url) response = self.client.put(pause_url, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) @patch('api.scanjob.view.start_scan', side_effect=dummy_start) def test_restart_bad_state(self, start_scan): """Restart a scanjob.""" data_host = { 'sources': [self.source.id], 'scan_type': ScanTask.SCAN_TYPE_INSPECT } response = self.create_expect_201(data_host) url = reverse('scanjob-detail', args=(response['id'], )) pause_url = '{}restart/'.format(url) response = self.client.put(pause_url, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_expand_scanjob(self): """Test view expand_scanjob.""" scan_job = ScanJob(scan_type=ScanTask.SCAN_TYPE_INSPECT) scan_job.save() scan_job.sources.add(self.source) # Job in created state self.assertEqual(scan_job.status, ScanTask.CREATED) tasks = scan_job.tasks.all() self.assertEqual(len(tasks), 0) # Queue job to run scan_job.queue() task = scan_job.tasks.all()[1] task.systems_count = 2 task.systems_failed = 1 task.systems_scanned = 1 task.save() scan_job = ScanJob.objects.filter(pk=scan_job.id).first() json_scan = {'tasks': [{}]} expand_scanjob(scan_job, json_scan) self.assertEqual(json_scan.get('systems_count'), 2) self.assertEqual(json_scan.get('systems_failed'), 1) self.assertEqual(json_scan.get('systems_scanned'), 1) def test_expand_sys_conn_result(self): """Test view expand_sys_conn_result.""" scan_job = ScanJob(scan_type=ScanTask.SCAN_TYPE_CONNECT) scan_job.save() scan_job.sources.add(self.source) # Job in created state self.assertEqual(scan_job.status, ScanTask.CREATED) tasks = scan_job.tasks.all() self.assertEqual(len(tasks), 0) # Queue job to run scan_job.queue() conn_task = scan_job.tasks.first() conn_result = ConnectionResult(source=conn_task.source, scan_task=conn_task) conn_result.save() sys_result = SystemConnectionResult( name='Foo', credential=self.cred, status=SystemConnectionResult.SUCCESS) sys_result.save() conn_result.systems.add(sys_result) conn_result.save() result = expand_sys_conn_result(conn_result) self.assertEqual(result[0]['credential']['name'], 'cred1') def test_expand_conn_results(self): """Test view expand_conn_results.""" scan_job = ScanJob(scan_type=ScanTask.SCAN_TYPE_CONNECT) scan_job.save() scan_job.sources.add(self.source) # Job in created state self.assertEqual(scan_job.status, ScanTask.CREATED) tasks = scan_job.tasks.all() self.assertEqual(len(tasks), 0) # Queue job to run scan_job.queue() conn_task = scan_job.tasks.first() conn_results = ConnectionResults(scan_job=scan_job) conn_results.save() conn_result = ConnectionResult(source=conn_task.source, scan_task=conn_task) conn_result.save() conn_results.results.add(conn_result) conn_results.save() sys_result = SystemConnectionResult( name='Foo', credential=self.cred, status=SystemConnectionResult.SUCCESS) sys_result.save() conn_result.systems.add(sys_result) conn_result.save() conn_results_json = {'results': [{}]} expand_conn_results(conn_results, conn_results_json) self.assertEqual(conn_results_json['results'][0]['systems'][0]['name'], 'Foo') def test_expand_inspect_results(self): """Test view expand_inspect_results.""" scan_job = ScanJob(scan_type=ScanTask.SCAN_TYPE_INSPECT) scan_job.save() scan_job.sources.add(self.source) # Job in created state self.assertEqual(scan_job.status, ScanTask.CREATED) tasks = scan_job.tasks.all() self.assertEqual(len(tasks), 0) # Queue job to run scan_job.queue() inspect_task = scan_job.tasks.all()[1] inspect_results = InspectionResults(scan_job=scan_job) inspect_results.save() inspect_result = InspectionResult(source=inspect_task.source, scan_task=inspect_task) inspect_result.save() inspect_results.results.add(inspect_result) inspect_results.save() sys_result = SystemInspectionResult( name='Foo', status=SystemConnectionResult.SUCCESS) sys_result.save() fact = RawFact(name='foo', value='value') fact.save() sys_result.facts.add(fact) sys_result.save() inspect_result.systems.add(sys_result) inspect_result.save() inspect_results_json = {'results': [{}]} expand_inspect_results(inspect_results, inspect_results_json) self.assertEqual( inspect_results_json['results'][0]['systems'][0]['facts'][0] ['name'], 'foo')