def wait_for_tasks(search_query, search_rate=1, max_tries=10, poll_rate=None, poll_timeout=None): """Search for tasks by specified search query and poll them to ensure that task has finished. :param search_query: Search query that will be passed to API call. :param search_rate: Delay between searches. :param max_tries: How many times search should be executed. :param poll_rate: Delay between the end of one task check-up and the start of the next check-up. Parameter for ``nailgun.entities.ForemanTask.poll()`` method. :param poll_timeout: Maximum number of seconds to wait until timing out. Parameter for ``nailgun.entities.ForemanTask.poll()`` method. :return: List of ``nailgun.entities.ForemanTasks`` entities. :raises: ``AssertionError``. If not tasks were found until timeout. """ for _ in range(max_tries): tasks = entities.ForemanTask().search(query={'search': search_query}) if len(tasks) > 0: for task in tasks: task.poll(poll_rate=poll_rate, timeout=poll_timeout) break else: time.sleep(search_rate) else: raise AssertionError( "No task was found using query '{}'".format(search_query)) return tasks
def test_negative_fetch_non_existent_task(self): """Fetch a non-existent task. @id: a2a81ca2-63c4-47f5-9314-5852f5e2617f @Assert: An HTTP 4XX or 5XX message is returned. """ with self.assertRaises(HTTPError): entities.ForemanTask(id='abc123').read()
def test_positive_get_summary(self): """Get a summary of foreman tasks. @id: bdcab413-a25d-4fe1-9db4-b50b5c31ebce @Assert: A list of dicts is returned. """ summary = entities.ForemanTask().summary() self.assertIsInstance(summary, list) for item in summary: self.assertIsInstance(item, dict)
def test_negative_fetch_non_existent_task(self): """Fetch a non-existent task. :id: a2a81ca2-63c4-47f5-9314-5852f5e2617f :expectedresults: An HTTP 4XX or 5XX message is returned. :CaseImportance: Critical """ with self.assertRaises(HTTPError): entities.ForemanTask(id='abc123').read()
def test_positive_get_summary(self): """Get a summary of foreman tasks. @Assert: A list of dicts is returned. @Feature: ForemanTask """ summary = entities.ForemanTask().summary() self.assertIsInstance(summary, list) for item in summary: self.assertIsInstance(item, dict)
def test_negative_fetch_non_existent_task(self): """Fetch a non-existent task. @Assert: An HTTP 4XX or 5XX message is returned. @Feature: ForemanTask @bz: 1131702 """ with self.assertRaises(HTTPError): entities.ForemanTask(id='abc123').read()
def test_positive_get_summary(): """Get a summary of foreman tasks. :id: bdcab413-a25d-4fe1-9db4-b50b5c31ebce :expectedresults: A list of dicts is returned. :CaseImportance: Critical """ summary = entities.ForemanTask().summary() assert type(summary) is list for item in summary: assert type(item) is dict
def test_positive_get_summary(self): """Get a summary of foreman tasks. :id: bdcab413-a25d-4fe1-9db4-b50b5c31ebce :expectedresults: A list of dicts is returned. :CaseImportance: Critical """ summary = entities.ForemanTask().summary() self.assertIsInstance(summary, list) for item in summary: self.assertIsInstance(item, dict)
def wait_untill_capsule_sync(capsule): """The polling function that waits for capsule sync task to finish :param capsule: A capsule hostname """ cap = entities.Capsule().search( query={'search': 'name={}'.format(capsule)})[0] active_tasks = cap.content_get_sync()['active_sync_tasks'] if len(active_tasks) >= 1: logger.info('Wait for background capsule sync to finish on ' 'capsule: {}'.format(cap.name)) for task in active_tasks: entities.ForemanTask(id=task['id']).poll(timeout=2700)
def wait_for_errata_applicability_task( host_id, from_when, search_rate=1, max_tries=10, poll_rate=None, poll_timeout=15 ): """Search the generate applicability task for given host and make sure it finishes :param int host_id: Content host ID of the host where we are regenerating applicability. :param int from_when: Timestamp (in UTC) to limit number of returned tasks to investigate. :param int search_rate: Delay between searches. :param int max_tries: How many times search should be executed. :param int poll_rate: Delay between the end of one task check-up and the start of the next check-up. Parameter for ``nailgun.entities.ForemanTask.poll()`` method. :param int poll_timeout: Maximum number of seconds to wait until timing out. Parameter for ``nailgun.entities.ForemanTask.poll()`` method. :return: Relevant errata applicability task. :raises: ``AssertionError``. If not tasks were found for given host until timeout. """ assert isinstance(host_id, int), 'Param host_id have to be int' assert isinstance(from_when, int), 'Param from_when have to be int' now = int(time.time()) assert from_when <= now, 'Param from_when have to be timestamp in the past' for _ in range(max_tries): now = int(time.time()) max_age = now - from_when + 1 search_query = ( '( label = Actions::Katello::Host::GenerateApplicability OR label = ' 'Actions::Katello::Host::UploadPackageProfile ) AND started_at > "%s seconds ago"' % max_age ) tasks = entities.ForemanTask().search(query={'search': search_query}) tasks_finished = 0 for task in tasks: if ( task.label == 'Actions::Katello::Host::GenerateApplicability' and host_id in task.input['host_ids'] ): task.poll(poll_rate=poll_rate, timeout=poll_timeout) tasks_finished += 1 elif ( task.label == 'Actions::Katello::Host::UploadPackageProfile' and host_id == task.input['host']['id'] ): task.poll(poll_rate=poll_rate, timeout=poll_timeout) tasks_finished += 1 if tasks_finished > 0: break time.sleep(search_rate) else: raise AssertionError( f"No task was found using query '{search_query}' for host '{host_id}'" )
def wait_for_sync(self, capsule, timeout=600, start_time=datetime.utcnow()): # Assert that a task to sync lifecycle environment to the capsule # is started (or finished already) sync_status = capsule.nailgun_capsule.content_get_sync() assert ( len(sync_status['active_sync_tasks']) or datetime.strptime(sync_status['last_sync_time'], '%Y-%m-%d %H:%M:%S UTC') > start_time ) # Wait till capsule sync finishes and assert the sync task succeeded for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll(timeout=timeout) sync_status = capsule.nailgun_capsule.content_get_sync() assert len(sync_status['last_failed_sync_tasks']) == 0
def test_positive_capsule_sync(self): """Create repository, add it to lifecycle environment, assign lifecycle environment with a capsule, sync repository, sync it once again, update repository (add 1 new package), sync repository once again. :id: 35513099-c918-4a8e-90d0-fd4c87ad2f82 :customerscenario: true :BZ: 1394354, 1439691 :expectedresults: 1. Repository sync triggers capsule sync 2. After syncing capsule contains same repo content as satellite 3. Syncing repository which has no changes for a second time does not trigger any new publish task 4. Repository revision on capsule remains exactly the same after second repo sync with no changes 5. Syncing repository which was updated will update the content on capsule :CaseLevel: System """ repo_name = gen_string('alphanumeric') # Create and publish custom repository with 2 packages in it repo_url = create_repo( repo_name, FAKE_1_YUM_REPO, FAKE_1_YUM_REPO_RPMS[0:2], ) # Create organization, product, repository in satellite, and lifecycle # environment org = entities.Organization(smart_proxy=[self.capsule_id]).create() product = entities.Product(organization=org).create() repo = entities.Repository( product=product, url=repo_url, ).create() lce = entities.LifecycleEnvironment(organization=org).create() # Associate the lifecycle environment with the capsule capsule = entities.Capsule(id=self.capsule_id).read() capsule.content_add_lifecycle_environment(data={ 'environment_id': lce.id, }) result = capsule.content_lifecycle_environments() self.assertGreaterEqual(len(result['results']), 1) self.assertIn(lce.id, [capsule_lce['id'] for capsule_lce in result['results']]) # Create a content view with the repository cv = entities.ContentView( organization=org, repository=[repo], ).create() # Sync repository repo.sync() repo = repo.read() # Publish new version of the content view cv.publish() cv = cv.read() self.assertEqual(len(cv.version), 1) cvv = cv.version[-1].read() # Promote content view to lifecycle environment promote(cvv, lce.id) cvv = cvv.read() self.assertEqual(len(cvv.environment), 2) # Assert that a task to sync lifecycle environment to the capsule # is started (or finished already) sync_status = capsule.content_get_sync() self.assertTrue( len(sync_status['active_sync_tasks']) >= 1 or sync_status['last_sync_time']) # Assert that the content of the published content view in # lifecycle environment is exactly the same as content of # repository lce_repo_path = form_repo_path( org=org.label, lce=lce.label, cv=cv.label, prod=product.label, repo=repo.label, ) cvv_repo_path = form_repo_path( org=org.label, cv=cv.label, cvv=cvv.version, prod=product.label, repo=repo.label, ) # Wait till capsule sync finishes for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll() sync_status = capsule.content_get_sync() last_sync_time = sync_status['last_sync_time'] # If BZ1439691 is open, need to sync repo once more, as repodata # will change on second attempt even with no changes in repo if is_open('BZ:1439691'): repo.sync() repo = repo.read() cv.publish() cv = cv.read() self.assertEqual(len(cv.version), 2) cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() promote(cvv, lce.id) cvv = cvv.read() self.assertEqual(len(cvv.environment), 2) sync_status = capsule.content_get_sync() self.assertTrue( len(sync_status['active_sync_tasks']) >= 1 or sync_status['last_sync_time'] != last_sync_time) for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll() sync_status = capsule.content_get_sync() last_sync_time = sync_status['last_sync_time'] # Assert that the content published on the capsule is exactly the # same as in repository on satellite lce_revision_capsule = get_repomd_revision(lce_repo_path, hostname=self.capsule_ip) self.assertEqual( get_repo_files(lce_repo_path, hostname=self.capsule_ip), get_repo_files(cvv_repo_path)) # Sync repository for a second time result = repo.sync() # Assert that the task summary contains a message that says the # publish was skipped because content had not changed self.assertEqual(result['result'], 'success') self.assertTrue(result['output']['post_sync_skipped']) self.assertEqual(result['humanized']['output'], 'No new packages.') # Publish a new version of content view cv.publish() cv = cv.read() cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() # Promote new content view version to lifecycle environment promote(cvv, lce.id) cvv = cvv.read() self.assertEqual(len(cvv.environment), 2) # Wait till capsule sync finishes sync_status = capsule.content_get_sync() tasks = [] if not sync_status['active_sync_tasks']: self.assertNotEqual(sync_status['last_sync_time'], last_sync_time) else: for task in sync_status['active_sync_tasks']: tasks.append(entities.ForemanTask(id=task['id'])) tasks[-1].poll() # Assert that the value of repomd revision of repository in # lifecycle environment on the capsule has not changed new_lce_revision_capsule = get_repomd_revision( lce_repo_path, hostname=self.capsule_ip) self.assertEqual(lce_revision_capsule, new_lce_revision_capsule) # Update a repository with 1 new rpm create_repo( repo_name, FAKE_1_YUM_REPO, FAKE_1_YUM_REPO_RPMS[-1:], ) # Sync, publish and promote the repository repo.sync() repo = repo.read() cv.publish() cv = cv.read() cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() promote(cvv, lce.id) cvv = cvv.read() self.assertEqual(len(cvv.environment), 2) # Assert that a task to sync lifecycle environment to the capsule # is started (or finished already) sync_status = capsule.content_get_sync() self.assertTrue( len(sync_status['active_sync_tasks']) >= 1 or sync_status['last_sync_time'] != last_sync_time) # Assert that packages count in the repository is updated self.assertEqual(repo.content_counts['package'], 3) # Assert that the content of the published content view in # lifecycle environment is exactly the same as content of the # repository cvv_repo_path = form_repo_path( org=org.label, cv=cv.label, cvv=cvv.version, prod=product.label, repo=repo.label, ) self.assertEqual( repo.content_counts['package'], cvv.package_count, ) self.assertEqual(get_repo_files(lce_repo_path), get_repo_files(cvv_repo_path)) # Wait till capsule sync finishes for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll() # Assert that the content published on the capsule is exactly the # same as in the repository self.assertEqual( get_repo_files(lce_repo_path, hostname=self.capsule_ip), get_repo_files(cvv_repo_path))
def test_positive_checksum_sync(self): """Synchronize repository to capsule, update repository's checksum type, trigger capsule sync and make sure checksum type was updated on capsule :id: eb07bdf3-6cd8-4a2f-919b-8dfc84e16115 :customerscenario: true :BZ: 1288656, 1664288, 1732066 :expectedresults: checksum type is updated in repodata of corresponding repository on capsule :CaseLevel: System :CaseImportance: Critical """ repomd_path = 'repodata/repomd.xml' # Create organization, product, lce and repository with sha256 checksum # type org = entities.Organization(smart_proxy=[self.capsule_id]).create() product = entities.Product(organization=org).create() repo = entities.Repository(product=product, checksum_type='sha256', download_policy='immediate').create() lce = entities.LifecycleEnvironment(organization=org).create() # Associate the lifecycle environment with the capsule capsule = entities.Capsule(id=self.capsule_id).read() capsule.content_add_lifecycle_environment(data={ 'environment_id': lce.id, }) result = capsule.content_lifecycle_environments() self.assertGreaterEqual(len(result['results']), 1) self.assertIn(lce.id, [capsule_lce['id'] for capsule_lce in result['results']]) # Sync, publish and promote a repo cv = entities.ContentView( organization=org, repository=[repo], ).create() repo.sync() repo = repo.read() cv.publish() cv = cv.read() self.assertEqual(len(cv.version), 1) cvv = cv.version[-1].read() promote(cvv, lce.id) cvv = cvv.read() self.assertEqual(len(cvv.environment), 2) # Wait till capsule sync finishes sync_status = capsule.content_get_sync() self.assertTrue( len(sync_status['active_sync_tasks']) >= 1 or sync_status['last_sync_time']) for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll() sync_status = capsule.content_get_sync() last_sync_time = sync_status['last_sync_time'] # Verify repodata's checksum type is sha256, not sha1 on capsule lce_repo_path = form_repo_path( org=org.label, lce=lce.label, cv=cv.label, prod=product.label, repo=repo.label, ) result = ssh.command('grep -o \'checksum type="sha1"\' {}/{}'.format( lce_repo_path, repomd_path), hostname=self.capsule_ip) self.assertNotEqual(result.return_code, 0) self.assertEqual(len(result.stdout), 0) result = ssh.command('grep -o \'checksum type="sha256"\' {}/{}'.format( lce_repo_path, repomd_path), hostname=self.capsule_ip) self.assertEqual(result.return_code, 0) self.assertGreater(len(result.stdout), 0) # Update repo's checksum type to sha1 repo.checksum_type = 'sha1' repo = repo.update(['checksum_type']) # Sync, publish and promote repo repo.sync() cv.publish() cv = cv.read() self.assertEqual(len(cv.version), 2) cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() promote(cvv, lce.id) cvv = cvv.read() self.assertEqual(len(cvv.environment), 2) # Wait till capsule sync finishes sync_status = capsule.content_get_sync() self.assertTrue( len(sync_status['active_sync_tasks']) >= 1 or sync_status['last_sync_time'] != last_sync_time) for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll() # Verify repodata's checksum type has updated to sha1 on capsule result = ssh.command('grep -o \'checksum type="sha256"\' {}/{}'.format( lce_repo_path, repomd_path), hostname=self.capsule_ip) self.assertNotEqual(result.return_code, 0) self.assertEqual(len(result.stdout), 0) result = ssh.command('grep -o \'checksum type="sha1"\' {}/{}'.format( lce_repo_path, repomd_path), hostname=self.capsule_ip) self.assertEqual(result.return_code, 0) self.assertGreater(len(result.stdout), 0)
def test_positive_uploaded_content_library_sync(self): """Ensure custom repo with no upstream url and manually uploaded content after publishing to Library is synchronized to capsule automatically :id: f5406312-dd31-4551-9f03-84eb9c3415f5 :customerscenario: true :BZ: 1340686 :expectedresults: custom content is present on external capsule :CaseLevel: System """ # Create organization, product, repository with no upstream url org = entities.Organization(smart_proxy=[self.capsule_id]).create() product = entities.Product(organization=org).create() repo = entities.Repository( product=product, url=None, ).create() capsule = entities.Capsule(id=self.capsule_id).search( query={'search': 'name={0}'.format(self.capsule_hostname)})[0] # Find "Library" lifecycle env for specific organization lce = entities.LifecycleEnvironment(organization=org).search( query={'search': 'name={}'.format(ENVIRONMENT)})[0] # Associate the lifecycle environment with the capsule capsule.content_add_lifecycle_environment(data={ 'environment_id': lce.id, }) result = capsule.content_lifecycle_environments() self.assertGreaterEqual(len(result['results']), 1) self.assertIn(lce.id, [capsule_lce['id'] for capsule_lce in result['results']]) # Create a content view with the repository cv = entities.ContentView( organization=org, repository=[repo], ).create() # Upload custom content into the repo with open(get_data_file(RPM_TO_UPLOAD), 'rb') as handle: repo.upload_content(files={'content': handle}) self.assertEqual(repo.read().content_counts['package'], 1) # Publish new version of the content view cv.publish() cv = cv.read() self.assertEqual(len(cv.version), 1) # Assert that a task to sync lifecycle environment to the capsule # is started (or finished already) sync_status = capsule.content_get_sync() self.assertTrue( len(sync_status['active_sync_tasks']) >= 1 or sync_status['last_sync_time']) # Wait till capsule sync finishes for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll() # Verify previously uploaded content is present on capsule lce_repo_path = form_repo_path( org=org.label, lce=lce.label, cv=cv.label, prod=product.label, repo=repo.label, ) for i in range(5): capsule_rpms = get_repo_files(lce_repo_path, hostname=self.capsule_ip) if (len(capsule_rpms) != 0): break else: sleep(5) self.assertEqual(len(capsule_rpms), 1) self.assertEqual(capsule_rpms[0], RPM_TO_UPLOAD)
def test_positive_sync_puppet_module_with_versions(self): """Ensure it's possible to sync multiple versions of the same puppet module to the capsule :id: 83a0ddd6-8a6a-43a0-b169-094a2556dd28 :customerscenario: true :BZ: 1365952, 1655243 :Steps: 1. Register a capsule 2. Associate LCE with the capsule 3. Sync a puppet module with multiple versions 4. Publish a CV with one version of puppet module and promote it to capsule's LCE 5. Wait for capsule synchronization to finish 6. Publish another CV with different version of puppet module and promote it to capsule's LCE 7. Wait for capsule synchronization to finish once more :expectedresults: Capsule was successfully synchronized, new version of puppet module is present on capsule :CaseLevel: System :CaseImportance: Medium """ module_name = 'versioned' module_versions = ['2.2.2', '3.3.3'] org = entities.Organization().create() lce = entities.LifecycleEnvironment(organization=org).create() content_view = entities.ContentView(organization=org).create() prod = entities.Product(organization=org).create() puppet_repository = entities.Repository( content_type=REPO_TYPE['puppet'], product=prod, url=CUSTOM_PUPPET_REPO, ).create() capsule = entities.Capsule(id=self.capsule_id).read() capsule.content_add_lifecycle_environment(data={ 'environment_id': lce.id, }) result = capsule.content_lifecycle_environments() self.assertGreaterEqual(len(result['results']), 1) self.assertIn(lce.id, [capsule_lce['id'] for capsule_lce in result['results']]) puppet_repository.sync() puppet_module_old = entities.PuppetModule().search( query={ 'search': 'name={} and version={}'.format(module_name, module_versions[0]) })[0] # Add puppet module to the CV entities.ContentViewPuppetModule( content_view=content_view, id=puppet_module_old.id, ).create() content_view = content_view.read() self.assertGreater(len(content_view.puppet_module), 0) # Publish and promote CVV content_view.publish() content_view = content_view.read() self.assertEqual(len(content_view.version), 1) cvv = content_view.version[-1].read() promote(cvv, lce.id) cvv = cvv.read() self.assertEqual(len(cvv.environment), 2) # Wait till capsule sync finishes sync_status = capsule.content_get_sync() self.assertTrue( len(sync_status['active_sync_tasks']) >= 1 or sync_status['last_sync_time']) for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll() sync_status = capsule.content_get_sync() last_sync_time = sync_status['last_sync_time'] # Unassign old puppet module version from CV entities.ContentViewPuppetModule( content_view=content_view, id=content_view.puppet_module[0].id, ).delete() # Assign new puppet module version puppet_module_new = entities.PuppetModule().search( query={ 'search': 'name={} and version={}'.format(module_name, module_versions[1]) })[0] entities.ContentViewPuppetModule( content_view=content_view, id=puppet_module_new.id, ).create() self.assertGreater(len(content_view.puppet_module), 0) # Publish and promote CVV content_view.publish() content_view = content_view.read() self.assertEqual(len(content_view.version), 2) cvv = content_view.version[-1].read() promote(cvv, lce.id) cvv = cvv.read() self.assertEqual(len(cvv.environment), 2) # Wait till capsule sync finishes sync_status = capsule.content_get_sync() if sync_status['active_sync_tasks']: for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll() else: self.assertNotEqual(sync_status['last_sync_time'], last_sync_time) stored_modules = get_repo_files(PULP_PUBLISHED_PUPPET_REPOS_PATH, 'gz', self.capsule_ip) with self.assertNotRaises(StopIteration): next( filename for filename in stored_modules if '{}-{}'.format(module_name, module_versions[1]) in filename)
def test_positive_sync_puppet_module_with_versions(self, capsule_vm): """Ensure it's possible to sync multiple versions of the same puppet module to the capsule :id: 83a0ddd6-8a6a-43a0-b169-094a2556dd28 :customerscenario: true :BZ: 1365952, 1655243 :Steps: 1. Register a capsule 2. Associate LCE with the capsule 3. Sync a puppet module with multiple versions 4. Publish a CV with one version of puppet module and promote it to capsule's LCE 5. Wait for capsule synchronization to finish 6. Publish another CV with different version of puppet module and promote it to capsule's LCE 7. Wait for capsule synchronization to finish once more :expectedresults: Capsule was successfully synchronized, new version of puppet module is present on capsule :CaseLevel: System :CaseImportance: Medium """ module_name = 'versioned' module_versions = ['2.2.2', '3.3.3'] org = entities.Organization().create() lce = entities.LifecycleEnvironment(organization=org).create() content_view = entities.ContentView(organization=org).create() prod = entities.Product(organization=org).create() puppet_repository = entities.Repository( content_type=REPO_TYPE['puppet'], product=prod, url=CUSTOM_PUPPET_REPO ).create() capsule = entities.Capsule(id=capsule_vm._capsule.id).read() capsule.content_add_lifecycle_environment(data={'environment_id': lce.id}) result = capsule.content_lifecycle_environments() assert len(result['results']) >= 1 assert lce.id in [capsule_lce['id'] for capsule_lce in result['results']] puppet_repository.sync() puppet_module_old = entities.PuppetModule().search( query={'search': f'name={module_name} and version={module_versions[0]}'} )[0] # Add puppet module to the CV entities.ContentViewPuppetModule( content_view=content_view, id=puppet_module_old.id ).create() content_view = content_view.read() assert len(content_view.puppet_module) > 0 # Publish and promote CVV content_view.publish() content_view = content_view.read() assert len(content_view.version) == 1 cvv = content_view.version[-1].read() promote(cvv, lce.id) cvv = cvv.read() assert len(cvv.environment) == 2 # Wait till capsule sync finishes sync_status = capsule.content_get_sync() assert len(sync_status['active_sync_tasks']) >= 1 or sync_status['last_sync_time'] for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll() sync_status = capsule.content_get_sync() last_sync_time = sync_status['last_sync_time'] # Unassign old puppet module version from CV entities.ContentViewPuppetModule( content_view=content_view, id=content_view.puppet_module[0].id ).delete() # Assign new puppet module version puppet_module_new = entities.PuppetModule().search( query={'search': f'name={module_name} and version={module_versions[1]}'} )[0] entities.ContentViewPuppetModule( content_view=content_view, id=puppet_module_new.id ).create() assert len(content_view.puppet_module) > 0 # Publish and promote CVV content_view.publish() content_view = content_view.read() assert len(content_view.version) == 2 cvv = content_view.version[-1].read() promote(cvv, lce.id) cvv = cvv.read() assert len(cvv.environment) == 2 # Wait till capsule sync finishes sync_status = capsule.content_get_sync() if sync_status['active_sync_tasks']: for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll() else: assert sync_status['last_sync_time'] != last_sync_time stored_modules = get_repo_files(PULP_PUBLISHED_PUPPET_REPOS_PATH, 'gz', capsule_vm.ip_addr) matching_filenames = filter( lambda filename: f'{module_name}-{module_versions[1]}' in filename, stored_modules ) assert next(matching_filenames, None)
def test_positive_iso_library_sync(self, module_manifest_org, capsule_configured): """Ensure RH repo with ISOs after publishing to Library is synchronized to capsule automatically :id: 221a2d41-0fef-46dd-a804-fdedd7187163 :customerscenario: true :BZ: 1303102, 1480358, 1303103, 1734312 :expectedresults: ISOs are present on external capsule :CaseLevel: System """ # Enable & sync RH repository with ISOs rh_repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=module_manifest_org.id, product=constants.PRDS['rhsc'], repo=constants.REPOS['rhsc7_iso']['name'], reposet=constants.REPOSET['rhsc7_iso'], releasever=None, ) rh_repo = entities.Repository(id=rh_repo_id).read() call_entity_method_with_timeout(rh_repo.sync, timeout=2500) # Find "Library" lifecycle env for specific organization lce = entities.LifecycleEnvironment( organization=module_manifest_org).search( query={'search': f'name={constants.ENVIRONMENT}'})[0] # Associate the lifecycle environment with the capsule capsule_configured.nailgun_capsule.content_add_lifecycle_environment( data={'environment_id': lce.id}) result = capsule_configured.nailgun_capsule.content_lifecycle_environments( ) assert len(result['results']) >= 1 assert lce.id in [ capsule_lce['id'] for capsule_lce in result['results'] ] # Create a content view with the repository cv = entities.ContentView(organization=module_manifest_org, repository=[rh_repo]).create() # Publish new version of the content view cv.publish() cv = cv.read() assert len(cv.version) == 1 # Verify ISOs are present on satellite sat_isos = get_repo_files_by_url(rh_repo.full_path, extension='iso') assert len(sat_isos) == 4 # Assert that a task to sync lifecycle environment to the capsule # is started (or finished already) sync_status = capsule_configured.nailgun_capsule.content_get_sync() assert len(sync_status['active_sync_tasks'] ) >= 1 or sync_status['last_sync_time'] # Wait till capsule sync finishes for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll(timeout=600) # Verify all the ISOs are present on capsule caps_path = ( f'{capsule_configured.url}/pulp/content/{module_manifest_org.label}/{lce.label}' f'/{cv.label}/content/dist/rhel/server/7/7Server/x86_64/sat-capsule/6.4/iso/' ) caps_isos = get_repo_files_by_url(caps_path, extension='iso') assert len(caps_isos) == 4 assert set(sat_isos) == set(caps_isos)
def test_positive_update_with_immediate_sync(self, capsule_configured, default_sat): """Create a repository with on_demand download policy, associate it with capsule, sync repo, update download policy to immediate, sync once more. :id: 511b531d-1fbe-4d64-ae31-0f9eb6625e7f :customerscenario: true :BZ: 1315752 :expectedresults: content was successfully synchronized - capsule filesystem contains valid links to packages :CaseLevel: System """ repo_url = settings.repos.yum_1.url packages_count = constants.FAKE_1_YUM_REPOS_COUNT # Create organization, product, repository in satellite, and lifecycle # environment org = entities.Organization().create() prod = entities.Product(organization=org).create() repo = entities.Repository(download_policy='on_demand', mirror_on_sync=True, product=prod, url=repo_url).create() lce = entities.LifecycleEnvironment(organization=org).create() # Update capsule's download policy to on_demand to match repository's # policy self.update_capsule_download_policy(capsule_configured, 'on_demand') # Associate the lifecycle environment with the capsule capsule_configured.nailgun_capsule.content_add_lifecycle_environment( data={'environment_id': lce.id}) result = capsule_configured.nailgun_capsule.content_lifecycle_environments( ) assert len(result['results']) assert lce.id in [ capsule_lce['id'] for capsule_lce in result['results'] ] # Create a content view with the repository cv = entities.ContentView(organization=org, repository=[repo]).create() # Sync repository repo.sync() repo = repo.read() # Publish new version of the content view cv.publish() cv = cv.read() assert len(cv.version) == 1 cvv = cv.version[-1].read() # Promote content view to lifecycle environment promote(cvv, lce.id) cvv = cvv.read() assert len(cvv.environment) == 2 # Assert that a task to sync lifecycle environment to the capsule # is started (or finished already) sync_status = capsule_configured.nailgun_capsule.content_get_sync() assert len( sync_status['active_sync_tasks']) or sync_status['last_sync_time'] # Wait till capsule sync finishes for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll() # Update download policy to 'immediate' repo.download_policy = 'immediate' repo = repo.update(['download_policy']) assert repo.download_policy == 'immediate' # Update capsule's download policy as well self.update_capsule_download_policy(capsule_configured, 'immediate') # Sync repository once again repo.sync() repo = repo.read() # Publish new version of the content view cv.publish() cv = cv.read() assert len(cv.version) == 2 cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() # Promote content view to lifecycle environment promote(cvv, lce.id) cvv = cvv.read() assert len(cvv.environment) == 2 # Assert that a task to sync lifecycle environment to the capsule # is started (or finished already) sync_status = capsule_configured.nailgun_capsule.content_get_sync() assert len(sync_status['active_sync_tasks'] ) >= 1 or sync_status['last_sync_time'] # Wait till capsule sync finishes for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll() # Verify that new artifacts were created on Capsule result = capsule_configured.run( 'find /var/lib/pulp/media/artifact -type f | wc -l') assert int(result.stdout) > packages_count # Verify the count of RPMs published on Capsule caps_repo_url = form_repo_url( capsule_configured, org=org.label, lce=lce.label, cv=cv.label, prod=prod.label, repo=repo.label, ) caps_files = get_repo_files_by_url(caps_repo_url) assert len(caps_files) == packages_count
def test_positive_on_demand_sync(self, capsule_configured): """Create a repository with 'on_demand' sync, add it to lifecycle environment with a capsule, sync repository, examine existing packages on capsule, download any package, examine packages once more :id: ba470269-a7ad-4181-bc7c-8e17a177ca20 :expectedresults: 1. After initial syncing only symlinks are present on both satellite and capsule, no real packages were fetched. 2. All the symlinks are pointing to non-existent files. 3. Attempt to download package is successful 4. Downloaded package checksum matches checksum of the source package :CaseLevel: System """ repo_url = constants.repos.FAKE_3_YUM_REPO packages_count = constants.FAKE_3_YUM_REPOS_COUNT package = constants.YUM_REPO_RPMS[0] # Create organization, product, repository in satellite, and lifecycle # environment org = entities.Organization().create() prod = entities.Product(organization=org).create() repo = entities.Repository(download_policy='on_demand', mirror_on_sync=True, product=prod, url=repo_url).create() lce = entities.LifecycleEnvironment(organization=org).create() # Associate the lifecycle environment with the capsule capsule_configured.nailgun_capsule.content_add_lifecycle_environment( data={'environment_id': lce.id}) result = capsule_configured.nailgun_capsule.content_lifecycle_environments( ) assert len(result['results']) assert lce.id in [ capsule_lce['id'] for capsule_lce in result['results'] ] # Create a content view with the repository cv = entities.ContentView(organization=org, repository=[repo]).create() # Sync repository repo.sync() repo = repo.read() # Publish new version of the content view cv.publish() cv = cv.read() assert len(cv.version) == 1 cvv = cv.version[-1].read() # Promote content view to lifecycle environment promote(cvv, lce.id) cvv = cvv.read() assert len(cvv.environment) == 2 # Assert that a task to sync lifecycle environment to the capsule # is started (or finished already) sync_status = capsule_configured.nailgun_capsule.content_get_sync() assert len( sync_status['active_sync_tasks']) or sync_status['last_sync_time'] # Check whether the symlinks for all the packages were created on # satellite cvv_repo_path = form_repo_path(org=org.label, cv=cv.label, cvv=cvv.version, prod=prod.label, repo=repo.label) result = ssh.command(f'find {cvv_repo_path}/ -type l') assert result.return_code == 0 links = {link for link in result.stdout if link} assert len(links) == packages_count # Ensure all the symlinks on satellite are broken (pointing to # nonexistent files) result = ssh.command( f'find {cvv_repo_path}/ -type l ! -exec test -e {{}} \\; -print') assert result.return_code == 0 broken_links = {link for link in result.stdout if link} assert len(broken_links) == packages_count assert broken_links == links # Wait till capsule sync finishes for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll() lce_repo_path = form_repo_path(org=org.label, lce=lce.label, cv=cv.label, prod=prod.label, repo=repo.label) # Check whether the symlinks for all the packages were created on # capsule result = capsule_configured.run(f'find {lce_repo_path}/ -type l') assert result.status == 0 links = {link for link in result.stdout if link} assert len(links) == packages_count # Ensure all the symlinks on capsule are broken (pointing to # nonexistent files) result = capsule_configured.run( f'find {lce_repo_path}/ -type l ! -exec test -e {{}} \\; -print', ) assert result.status == 0 broken_links = {link for link in result.stdout if link} assert len(broken_links) == packages_count assert broken_links == links # Download package from satellite and get its md5 checksum published_repo_url = 'http://{}{}/pulp/{}/'.format( settings.server.hostname, f':{settings.server.port}' if settings.server.port else '', lce_repo_path.split('http/')[1], ) package_md5 = md5_by_url(f'{repo_url}{package}') # Get md5 checksum of source package published_package_md5 = md5_by_url(f'{published_repo_url}{package}') # Assert checksums are matching assert package_md5 == published_package_md5
def test_positive_sync_kickstart_repo(self, module_manifest_org, default_sat, capsule_configured): """Sync kickstart repository to the capsule. :id: bc97b53f-f79b-42f7-8014-b0641435bcfc :steps: 1. Sync a kickstart repository to Satellite. 2. Publish it in a CV, promote to Capsule's LCE. 3. Check it is synced to Capsule without errors. 4. Check for kickstart content on Satellite and Capsule. :expectedresults: 1. The kickstart repo is successfully synced to the Capsule. :CaseLevel: Integration :BZ: 1992329 """ repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=module_manifest_org.id, product=constants.PRDS['rhel8'], repo=constants.REPOS['rhel8_bos_ks']['name'], reposet=constants.REPOSET['rhel8_bos_ks'], releasever='8.4', ) repo = entities.Repository(id=repo_id).read() lce = entities.LifecycleEnvironment( organization=module_manifest_org).create() # Associate the lifecycle environment with the capsule capsule_configured.nailgun_capsule.content_add_lifecycle_environment( data={'environment_id': lce.id}) result = capsule_configured.nailgun_capsule.content_lifecycle_environments( ) assert len(result['results']) assert lce.id in [ capsule_lce['id'] for capsule_lce in result['results'] ] # Create a content view with the repository cv = entities.ContentView(organization=module_manifest_org, repository=[repo]).create() # Sync repository repo.sync() repo = repo.read() # Publish new version of the content view cv.publish() cv = cv.read() assert len(cv.version) == 1 cvv = cv.version[-1].read() # Promote content view to lifecycle environment promote(cvv, lce.id) cvv = cvv.read() assert len(cvv.environment) == 2 # Assert that a task to sync lifecycle environment to the capsule # is started (or finished already) sync_status = capsule_configured.nailgun_capsule.content_get_sync() assert len( sync_status['active_sync_tasks']) or sync_status['last_sync_time'] # Wait till capsule sync finishes for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll() # Assert the sync task succeeded sync_status = capsule_configured.nailgun_capsule.content_get_sync() assert len(sync_status['last_failed_sync_tasks']) == 0 # Check for kickstart content on SAT and CAPS url_base = ( f'pulp/content/{module_manifest_org.label}/{lce.label}/' f'{cv.label}/content/dist/rhel8/8.4/x86_64/baseos/kickstart') # Check kickstart specific files for file in constants.KICKSTART_CONTENT: sat_file = md5_by_url(f'{default_sat.url}/{url_base}/{file}') caps_file = md5_by_url( f'{capsule_configured.url}/{url_base}/{file}') assert sat_file == caps_file # Check packages sat_pkg_url = f'{default_sat.url}/{url_base}/Packages/' caps_pkg_url = f'{capsule_configured.url}/{url_base}/Packages/' sat_pkgs = get_repo_files_by_url(sat_pkg_url) caps_pkgs = get_repo_files_by_url(caps_pkg_url) assert len(caps_pkgs) assert sat_pkgs == caps_pkgs
def test_positive_uploaded_content_library_sync(self, capsule_configured): """Ensure custom repo with no upstream url and manually uploaded content after publishing to Library is synchronized to capsule automatically :id: f5406312-dd31-4551-9f03-84eb9c3415f5 :customerscenario: true :BZ: 1340686 :expectedresults: custom content is present on external capsule :CaseLevel: System """ org = entities.Organization( smart_proxy=[capsule_configured.nailgun_capsule.id]).create() product = entities.Product(organization=org).create() repo = entities.Repository(product=product, url=None).create() capsule = entities.Capsule( id=capsule_configured.nailgun_capsule.id).search( query={'search': f'name={capsule_configured.hostname}'})[0] # Find "Library" lifecycle env for specific organization lce = entities.LifecycleEnvironment(organization=org).search( query={'search': f'name={constants.ENVIRONMENT}'})[0] # Associate the lifecycle environment with the capsule capsule.content_add_lifecycle_environment( data={'environment_id': lce.id}) result = capsule.content_lifecycle_environments() assert len(result['results']) >= 1 assert lce.id in [ capsule_lce['id'] for capsule_lce in result['results'] ] # Create a content view with the repository cv = entities.ContentView(organization=org, repository=[repo]).create() # Upload custom content into the repo with open(get_data_file(constants.RPM_TO_UPLOAD), 'rb') as handle: repo.upload_content(files={'content': handle}) assert repo.read().content_counts['package'] == 1 # Publish new version of the content view cv.publish() cv = cv.read() assert len(cv.version) == 1 # Assert that a task to sync lifecycle environment to the capsule # is started (or finished already) sync_status = capsule.content_get_sync() assert len(sync_status['active_sync_tasks'] ) >= 1 or sync_status['last_sync_time'] # Wait till capsule sync finishes for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll() # Verify that new artifacts were created on Capsule result = capsule_configured.run( 'find /var/lib/pulp/media/artifact -type f | wc -l') assert int(result.stdout) > 0 # Verify the RPM published on Capsule caps_repo_url = form_repo_url( capsule_configured, org=org.label, lce=lce.label, cv=cv.label, prod=product.label, repo=repo.label, ) caps_files = get_repo_files_by_url(caps_repo_url) assert len(caps_files) == 1 assert caps_files[0] == constants.RPM_TO_UPLOAD
def test_positive_mirror_on_sync(self): """Create 2 repositories with 'on_demand' download policy and mirror on sync option, associate them with capsule, sync first repo, move package from first repo to second one, sync it, attempt to install package on some host. :id: 39149642-1e7e-4ef8-8762-bec295913014 :BZ: 1426408 :expectedresults: host, subscribed to second repo only, can successfully install package :CaseLevel: System """ repo1_name = gen_string('alphanumeric') repo2_name = gen_string('alphanumeric') # Create and publish first custom repository with 2 packages in it repo1_url = create_repo( repo1_name, FAKE_1_YUM_REPO, FAKE_1_YUM_REPO_RPMS[1:3], ) # Create and publish second repo with no packages in it repo2_url = create_repo(repo2_name) # Create organization, product, repository in satellite, and lifecycle # environment org = entities.Organization().create() prod1 = entities.Product(organization=org).create() repo1 = entities.Repository( download_policy='on_demand', mirror_on_sync=True, product=prod1, url=repo1_url, ).create() prod2 = entities.Product(organization=org).create() repo2 = entities.Repository( download_policy='on_demand', mirror_on_sync=True, product=prod2, url=repo2_url, ).create() lce1 = entities.LifecycleEnvironment(organization=org).create() lce2 = entities.LifecycleEnvironment(organization=org).create() # Associate the lifecycle environments with the capsule capsule = entities.Capsule(id=self.capsule_id).read() for lce_id in (lce1.id, lce2.id): capsule.content_add_lifecycle_environment(data={ 'environment_id': lce_id, }) result = capsule.content_lifecycle_environments() self.assertGreaterEqual(len(result['results']), 2) self.assertTrue({lce1.id, lce2.id}.issubset( [capsule_lce['id'] for capsule_lce in result['results']]), ) # Create content views with the repositories cv1 = entities.ContentView( organization=org, repository=[repo1], ).create() cv2 = entities.ContentView( organization=org, repository=[repo2], ).create() # Sync first repository repo1.sync() repo1 = repo1.read() # Publish new version of the content view cv1.publish() cv1 = cv1.read() self.assertEqual(len(cv1.version), 1) cvv1 = cv1.version[-1].read() # Promote content view to lifecycle environment promote(cvv1, lce1.id) cvv1 = cvv1.read() self.assertEqual(len(cvv1.environment), 2) # Assert that a task to sync lifecycle environment to the capsule # is started (or finished already) sync_status = capsule.content_get_sync() self.assertTrue( len(sync_status['active_sync_tasks']) >= 1 or sync_status['last_sync_time']) # Wait till capsule sync finishes for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll() # Move one package from the first repo to second one ssh.command('mv {} {}'.format( os.path.join( PULP_PUBLISHED_YUM_REPOS_PATH, repo1_name, FAKE_1_YUM_REPO_RPMS[2], ), os.path.join( PULP_PUBLISHED_YUM_REPOS_PATH, repo2_name, FAKE_1_YUM_REPO_RPMS[2], ), )) # Update repositories (re-trigger 'createrepo' command) create_repo(repo1_name) create_repo(repo2_name) # Synchronize first repository repo1.sync() cv1.publish() cv1 = cv1.read() self.assertEqual(len(cv1.version), 2) cv1.version.sort(key=lambda version: version.id) cvv1 = cv1.version[-1].read() # Promote content view to lifecycle environment promote(cvv1, lce1.id) cvv1 = cvv1.read() self.assertEqual(len(cvv1.environment), 2) # Synchronize second repository repo2.sync() repo2 = repo2.read() self.assertEqual(repo2.content_counts['package'], 1) cv2.publish() cv2 = cv2.read() self.assertEqual(len(cv2.version), 1) cvv2 = cv2.version[-1].read() # Promote content view to lifecycle environment promote(cvv2, lce2.id) cvv2 = cvv2.read() self.assertEqual(len(cvv2.environment), 2) # Create activation key, add subscription to second repo only activation_key = entities.ActivationKey( content_view=cv2, environment=lce2, organization=org, ).create() subscription = entities.Subscription(organization=org).search( query={'search': 'name={}'.format(prod2.name)})[0] activation_key.add_subscriptions( data={'subscription_id': subscription.id}) # Subscribe a host with activation key with VirtualMachine(distro=DISTRO_RHEL7) as client: client.install_katello_ca() client.register_contenthost( org.label, activation_key.name, ) # Install the package package_name = FAKE_1_YUM_REPO_RPMS[2].rstrip('.rpm') result = client.run('yum install -y {}'.format(package_name)) self.assertEqual(result.return_code, 0) # Ensure package installed result = client.run('rpm -qa | grep {}'.format(package_name)) self.assertEqual(result.return_code, 0) self.assertIn(package_name, result.stdout[0])
def test_positive_capsule_sync(self, capsule_configured, default_sat): """Create repository, add it to lifecycle environment, assign lifecycle environment with a capsule, sync repository, sync it once again, update repository (add 1 new package), sync repository once again. :id: 35513099-c918-4a8e-90d0-fd4c87ad2f82 :customerscenario: true :BZ: 1394354, 1439691 :expectedresults: 1. Repository sync triggers capsule sync 2. After syncing capsule contains same repo content as satellite 3. Syncing repository which has no changes for a second time does not trigger any new publish task 4. Repository revision on capsule remains exactly the same after second repo sync with no changes 5. Syncing repository which was updated will update the content on capsule :CaseLevel: System """ # Create organization, product, repository in satellite, and lifecycle # environment repo_url = settings.repos.yum_1.url org = entities.Organization( smart_proxy=[capsule_configured.nailgun_capsule.id]).create() product = entities.Product(organization=org).create() repo = entities.Repository(product=product, url=repo_url).create() lce = entities.LifecycleEnvironment(organization=org).create() # Associate the lifecycle environment with the capsule capsule_configured.nailgun_capsule.content_add_lifecycle_environment( data={'environment_id': lce.id}) result = capsule_configured.nailgun_capsule.content_lifecycle_environments( ) assert len(result['results']) assert lce.id in [ capsule_lce['id'] for capsule_lce in result['results'] ] # Create a content view with the repository cv = entities.ContentView(organization=org, repository=[repo]).create() # Sync repository repo.sync() repo = repo.read() # Publish new version of the content view cv.publish() cv = cv.read() assert len(cv.version) == 1 cvv = cv.version[-1].read() # Promote content view to lifecycle environment promote(cvv, lce.id) cvv = cvv.read() assert len(cvv.environment) == 2 # Assert that a task to sync lifecycle environment to the capsule # is started (or finished already) sync_status = capsule_configured.nailgun_capsule.content_get_sync() assert len( sync_status['active_sync_tasks']) or sync_status['last_sync_time'] # Content of the published content view in # lifecycle environment should equal content of the # repository assert repo.content_counts['package'] == cvv.package_count # Wait till capsule sync finishes for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll() sync_status = capsule_configured.nailgun_capsule.content_get_sync() last_sync_time = sync_status['last_sync_time'] # Assert that the content published on the capsule is exactly the # same as in repository on satellite sat_repo_url = form_repo_url( default_sat, org=org.label, lce=lce.label, cv=cv.label, prod=product.label, repo=repo.label, ) caps_repo_url = form_repo_url( capsule_configured, org=org.label, lce=lce.label, cv=cv.label, prod=product.label, repo=repo.label, ) sat_files = get_repo_files_by_url(sat_repo_url) caps_files = get_repo_files_by_url(caps_repo_url) assert sat_files == caps_files lce_revision_capsule = get_repomd_revision(caps_repo_url) # Sync repository for a second time result = repo.sync() # Assert that the task summary contains a message that says the # publish was skipped because content had not changed assert result['result'] == 'success' assert result['output']['post_action_skipped'] assert 'Associating Content: 0/0' in result['humanized']['output'] # Publish a new version of content view cv.publish() cv = cv.read() cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() # Promote new content view version to lifecycle environment promote(cvv, lce.id) cvv = cvv.read() assert len(cvv.environment) == 2 # Wait till capsule sync finishes sync_status = capsule_configured.nailgun_capsule.content_get_sync() tasks = [] if not sync_status['active_sync_tasks']: assert sync_status['last_sync_time'] != last_sync_time else: for task in sync_status['active_sync_tasks']: tasks.append(entities.ForemanTask(id=task['id'])) tasks[-1].poll() # Assert that the value of repomd revision of repository in # lifecycle environment on the capsule has not changed new_lce_revision_capsule = get_repomd_revision(caps_repo_url) assert lce_revision_capsule == new_lce_revision_capsule # Update a repository with 1 new rpm with open(get_data_file(constants.RPM_TO_UPLOAD), 'rb') as handle: repo.upload_content(files={'content': handle}) # Publish and promote the repository repo = repo.read() cv.publish() cv = cv.read() cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() promote(cvv, lce.id) cvv = cvv.read() assert len(cvv.environment) == 2 # Assert that a task to sync lifecycle environment to the capsule # is started (or finished already) sync_status = capsule_configured.nailgun_capsule.content_get_sync() assert (len(sync_status['active_sync_tasks']) or sync_status['last_sync_time'] != last_sync_time) # Assert that packages count in the repository is updated assert repo.content_counts['package'] == ( constants.FAKE_1_YUM_REPOS_COUNT + 1) # Assert that the content of the published content view in # lifecycle environment is exactly the same as content of the # repository assert repo.content_counts['package'] == cvv.package_count # Wait till capsule sync finishes for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll() # Assert that the content published on the capsule is exactly the # same as in the repository sat_files = get_repo_files_by_url(sat_repo_url) caps_files = get_repo_files_by_url(caps_repo_url) assert sat_files == caps_files
def test_positive_iso_library_sync(self): """Ensure RH repo with ISOs after publishing to Library is synchronized to capsule automatically :id: 221a2d41-0fef-46dd-a804-fdedd7187163 :customerscenario: true :BZ: 1303102, 1480358, 1303103, 1734312 :expectedresults: ISOs are present on external capsule :CaseLevel: System """ # Create organization, product, enable & sync RH repository with ISOs org = entities.Organization(smart_proxy=[self.capsule_id]).create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) rh_repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhsc'], repo=REPOS['rhsc7_iso']['name'], reposet=REPOSET['rhsc7_iso'], releasever=None, ) rh_repo = entities.Repository(id=rh_repo_id).read() call_entity_method_with_timeout(rh_repo.sync, timeout=2500) capsule = entities.Capsule(id=self.capsule_id).read() # Find "Library" lifecycle env for specific organization lce = entities.LifecycleEnvironment(organization=org).search( query={'search': 'name={}'.format(ENVIRONMENT)})[0] # Associate the lifecycle environment with the capsule capsule.content_add_lifecycle_environment(data={ 'environment_id': lce.id, }) result = capsule.content_lifecycle_environments() self.assertGreaterEqual(len(result['results']), 1) self.assertIn(lce.id, [capsule_lce['id'] for capsule_lce in result['results']]) # Create a content view with the repository cv = entities.ContentView( organization=org, repository=[rh_repo], ).create() # Publish new version of the content view cv.publish() cv = cv.read() self.assertEqual(len(cv.version), 1) # Verify ISOs are present on satellite repo_path = os.path.join(PULP_PUBLISHED_ISO_REPOS_PATH, rh_repo.backend_identifier) sat_isos = get_repo_files(repo_path, extension='iso') self.assertGreater(len(result), 0) # Assert that a task to sync lifecycle environment to the capsule # is started (or finished already) sync_status = capsule.content_get_sync() self.assertTrue( len(sync_status['active_sync_tasks']) >= 1 or sync_status['last_sync_time']) # Wait till capsule sync finishes for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll(timeout=600) # Verify all the ISOs are present on capsule capsule_isos = get_repo_files(repo_path, extension='iso', hostname=self.capsule_ip) self.assertGreater(len(result), 0) self.assertEqual(set(sat_isos), set(capsule_isos))
def test_rhcloud_insights_e2e( session, rhel8_insights_vm, fixable_rhel8_vm, organization_ak_setup, unset_rh_cloud_token, ): """Synchronize hits data from cloud, verify it is displayed in Satellite and run remediation. :id: d952e83c-3faf-4299-a048-2eb6ccb8c9c2 :Steps: 1. Prepare misconfigured machine and upload its data to Insights. 2. Add Cloud API key in Satellite. 3. In Satellite UI, Configure -> Insights -> Add RH Cloud token and syns recommendations. 4. Run remediation for dnf.conf recommendation against rhel8 host. 5. Assert that job completed successfully. 6. Sync Insights recommendations. 7. Search for previously remediated issue. :expectedresults: 1. Insights recommendation related to dnf.conf issue is listed for misconfigured machine. 2. Remediation job finished successfully. 3. Insights recommendation related to dnf.conf issue is not listed. :CaseAutomation: Automated """ org, ak = organization_ak_setup query = 'dnf.conf' job_query = ( f'Remote action: Insights remediations for selected issues on {rhel8_insights_vm.hostname}' ) with session: session.organization.select(org_name=org.name) session.location.select(loc_name=DEFAULT_LOC) session.cloudinsights.save_token_sync_hits(settings.rh_cloud.token) timestamp = datetime.utcnow().strftime('%Y-%m-%d %H:%M') wait_for_tasks( search_query=f'Insights full sync and started_at >= "{timestamp}"', search_rate=15, max_tries=10, ) # Workaround for alert message causing search to fail. See airgun issue 584. session.browser.refresh() result = session.cloudinsights.search(query)[0] assert result['Hostname'] == rhel8_insights_vm.hostname assert (result['Recommendation'] == 'The dnf installs lower versions of packages when the ' '"best" option is not present in the /etc/dnf/dnf.conf') timestamp = datetime.utcnow().strftime('%Y-%m-%d %H:%M') session.cloudinsights.remediate(query) result = wait_for_tasks( search_query=f'{job_query} and started_at >= "{timestamp}"', search_rate=15, max_tries=10, ) task_output = entities.ForemanTask().search( query={'search': result[0].id}) assert task_output[ 0].result == 'success', f'result: {result}\n task_output: {task_output}' timestamp = datetime.utcnow().strftime('%Y-%m-%d %H:%M') session.cloudinsights.sync_hits() wait_for_tasks( search_query=f'Insights full sync and started_at >= "{timestamp}"', search_rate=15, max_tries=10, ) # Workaround for alert message causing search to fail. See airgun issue 584. session.browser.refresh() assert not session.cloudinsights.search(query)
def test_positive_on_demand_sync(self): """Create a repository with 'on_demand' sync, add it to lifecycle environment with a capsule, sync repository, examine existing packages on capsule, download any package, examine packages once more :id: ba470269-a7ad-4181-bc7c-8e17a177ca20 :expectedresults: 1. After initial syncing only symlinks are present on both satellite and capsule, no real packages were fetched. 2. All the symlinks are pointing to non-existent files. 3. Attempt to download package is successful 4. Downloaded package checksum matches checksum of the source package :CaseLevel: System """ repo_url = FAKE_3_YUM_REPO packages_count = FAKE_3_YUM_REPOS_COUNT package = FAKE_1_YUM_REPO_RPMS[0] # Create organization, product, repository in satellite, and lifecycle # environment org = entities.Organization().create() prod = entities.Product(organization=org).create() repo = entities.Repository( download_policy='on_demand', mirror_on_sync=True, product=prod, url=repo_url, ).create() lce = entities.LifecycleEnvironment(organization=org).create() # Associate the lifecycle environment with the capsule capsule = entities.Capsule(id=self.capsule_id).read() capsule.content_add_lifecycle_environment(data={ 'environment_id': lce.id, }) result = capsule.content_lifecycle_environments() self.assertGreaterEqual(len(result['results']), 1) self.assertIn(lce.id, [capsule_lce['id'] for capsule_lce in result['results']]) # Create a content view with the repository cv = entities.ContentView( organization=org, repository=[repo], ).create() # Sync repository repo.sync() repo = repo.read() # Publish new version of the content view cv.publish() cv = cv.read() self.assertEqual(len(cv.version), 1) cvv = cv.version[-1].read() # Promote content view to lifecycle environment promote(cvv, lce.id) cvv = cvv.read() self.assertEqual(len(cvv.environment), 2) # Assert that a task to sync lifecycle environment to the capsule # is started (or finished already) sync_status = capsule.content_get_sync() self.assertTrue( len(sync_status['active_sync_tasks']) >= 1 or sync_status['last_sync_time']) # Check whether the symlinks for all the packages were created on # satellite cvv_repo_path = form_repo_path( org=org.label, cv=cv.label, cvv=cvv.version, prod=prod.label, repo=repo.label, ) result = ssh.command('find {}/ -type l'.format(cvv_repo_path)) self.assertEqual(result.return_code, 0) links = set(link for link in result.stdout if link) self.assertEqual(len(links), packages_count) # Ensure all the symlinks on satellite are broken (pointing to # nonexistent files) result = ssh.command( 'find {}/ -type l ! -exec test -e {{}} \\; -print'.format( cvv_repo_path)) self.assertEqual(result.return_code, 0) broken_links = set(link for link in result.stdout if link) self.assertEqual(len(broken_links), packages_count) self.assertEqual(broken_links, links) # Wait till capsule sync finishes for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll() lce_repo_path = form_repo_path( org=org.label, lce=lce.label, cv=cv.label, prod=prod.label, repo=repo.label, ) # Check whether the symlinks for all the packages were created on # capsule result = ssh.command( 'find {}/ -type l'.format(lce_repo_path), hostname=self.capsule_ip, ) self.assertEqual(result.return_code, 0) links = set(link for link in result.stdout if link) self.assertEqual(len(links), packages_count) # Ensure all the symlinks on capsule are broken (pointing to # nonexistent files) result = ssh.command( 'find {}/ -type l ! -exec test -e {{}} \\; -print'.format( lce_repo_path), hostname=self.capsule_ip, ) self.assertEqual(result.return_code, 0) broken_links = set(link for link in result.stdout if link) self.assertEqual(len(broken_links), packages_count) self.assertEqual(broken_links, links) # Download package from satellite and get its md5 checksum published_repo_url = 'http://{}{}/pulp/{}/'.format( settings.server.hostname, ':{}'.format(settings.server.port) if settings.server.port else '', lce_repo_path.split('http/')[1]) package_md5 = md5_by_url('{}{}'.format(repo_url, package)) # Get md5 checksum of source package published_package_md5 = md5_by_url('{}{}'.format( published_repo_url, package)) # Assert checksums are matching self.assertEqual(package_md5, published_package_md5)
def test_positive_on_demand_sync(self, capsule_configured, default_sat): """Create a repository with 'on_demand' policy, add it to a CV, promote to an 'on_demand' Capsule's LCE, check artifacts were created, download a published package, assert it matches the source. :id: ba470269-a7ad-4181-bc7c-8e17a177ca20 :expectedresults: 1. A custom yum repository is successfully synced and ContentView published 2. The ContentView is successfully promoted to the Capsule's LCE and the content is automatically synced to the Capsule 3. Artifacts are created on the Capsule in /var/lib/pulp/media/artifacts/ 4. Package is successfully downloaded from the Capsule, its checksum matches the original package from the upstream repo :CaseLevel: System """ repo_url = settings.repos.yum_3.url packages_count = constants.FAKE_3_YUM_REPOS_COUNT package = constants.FAKE_3_YUM_REPO_RPMS[0] # Create organization, product, repository in satellite, and lifecycle # environment org = entities.Organization().create() prod = entities.Product(organization=org).create() repo = entities.Repository(download_policy='on_demand', mirror_on_sync=True, product=prod, url=repo_url).create() lce = entities.LifecycleEnvironment(organization=org).create() # Associate the lifecycle environment with the capsule capsule_configured.nailgun_capsule.content_add_lifecycle_environment( data={'environment_id': lce.id}) result = capsule_configured.nailgun_capsule.content_lifecycle_environments( ) assert len(result['results']) assert lce.id in [ capsule_lce['id'] for capsule_lce in result['results'] ] # Create a content view with the repository cv = entities.ContentView(organization=org, repository=[repo]).create() # Sync repository repo.sync() repo = repo.read() # Publish new version of the content view cv.publish() cv = cv.read() assert len(cv.version) == 1 cvv = cv.version[-1].read() # Promote content view to lifecycle environment promote(cvv, lce.id) cvv = cvv.read() assert len(cvv.environment) == 2 # Assert that a task to sync lifecycle environment to the capsule # is started (or finished already) sync_status = capsule_configured.nailgun_capsule.content_get_sync() assert len( sync_status['active_sync_tasks']) or sync_status['last_sync_time'] # Wait till capsule sync finishes for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll() # Verify that new artifacts were created on Capsule but rpms were not downloaded result = capsule_configured.run( 'find /var/lib/pulp/media/artifact -type f | wc -l') assert 0 < int(result.stdout) < packages_count # Verify packages on Capsule match the source caps_repo_url = form_repo_url( capsule_configured, org=org.label, lce=lce.label, cv=cv.label, prod=prod.label, repo=repo.label, ) source_files = get_repo_files_by_url(repo_url) caps_files = get_repo_files_by_url(caps_repo_url) assert source_files == caps_files assert len(caps_files) == packages_count # Download a package from the Capsule and get its md5 checksum published_package_md5 = md5_by_url(f'{caps_repo_url}/{package}') # Get md5 checksum of source package package_md5 = md5_by_url(f'{repo_url}/{package}') # Assert checksums are matching assert package_md5 == published_package_md5
def test_positive_update_with_immediate_sync(self): """Create a repository with on_demand download policy, associate it with capsule, sync repo, update download policy to immediate, sync once more. :id: 511b531d-1fbe-4d64-ae31-0f9eb6625e7f :customerscenario: true :BZ: 1315752 :expectedresults: content was successfully synchronized - capsule filesystem contains valid links to packages :CaseLevel: System """ repo_url = FAKE_1_YUM_REPO packages_count = FAKE_1_YUM_REPOS_COUNT # Create organization, product, repository in satellite, and lifecycle # environment org = entities.Organization().create() prod = entities.Product(organization=org).create() repo = entities.Repository( download_policy='on_demand', mirror_on_sync=True, product=prod, url=repo_url, ).create() lce = entities.LifecycleEnvironment(organization=org).create() # Update capsule's download policy to on_demand to match repository's # policy self.update_capsule_download_policy(self.capsule_id, 'on_demand') # Associate the lifecycle environment with the capsule capsule = entities.Capsule(id=self.capsule_id).read() capsule.content_add_lifecycle_environment(data={ 'environment_id': lce.id, }) result = capsule.content_lifecycle_environments() self.assertGreaterEqual(len(result['results']), 1) self.assertIn(lce.id, [capsule_lce['id'] for capsule_lce in result['results']]) # Create a content view with the repository cv = entities.ContentView( organization=org, repository=[repo], ).create() # Sync repository repo.sync() repo = repo.read() # Publish new version of the content view cv.publish() cv = cv.read() self.assertEqual(len(cv.version), 1) cvv = cv.version[-1].read() # Promote content view to lifecycle environment promote(cvv, lce.id) cvv = cvv.read() self.assertEqual(len(cvv.environment), 2) # Assert that a task to sync lifecycle environment to the capsule # is started (or finished already) sync_status = capsule.content_get_sync() self.assertTrue( len(sync_status['active_sync_tasks']) >= 1 or sync_status['last_sync_time']) # Wait till capsule sync finishes for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll() # Update download policy to 'immediate' repo.download_policy = 'immediate' repo = repo.update(['download_policy']) self.assertEqual(repo.download_policy, 'immediate') # Update capsule's download policy as well self.update_capsule_download_policy(self.capsule_id, 'immediate') # Make sure to revert capsule's download policy after the test as the # capsule is shared among other tests self.addCleanup(self.update_capsule_download_policy, self.capsule_id, 'on_demand') # Sync repository once again repo.sync() repo = repo.read() # Publish new version of the content view cv.publish() cv = cv.read() self.assertEqual(len(cv.version), 2) cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() # Promote content view to lifecycle environment promote(cvv, lce.id) cvv = cvv.read() self.assertEqual(len(cvv.environment), 2) # Assert that a task to sync lifecycle environment to the capsule # is started (or finished already) sync_status = capsule.content_get_sync() self.assertTrue( len(sync_status['active_sync_tasks']) >= 1 or sync_status['last_sync_time']) # Check whether the symlinks for all the packages were created on # satellite cvv_repo_path = form_repo_path( org=org.label, cv=cv.label, cvv=cvv.version, prod=prod.label, repo=repo.label, ) result = ssh.command('find {}/ -type l'.format(cvv_repo_path)) self.assertEqual(result.return_code, 0) links = set(link for link in result.stdout if link) self.assertEqual(len(links), packages_count) # Ensure there're no broken symlinks (pointing to nonexistent files) on # satellite result = ssh.command( 'find {}/ -type l ! -exec test -e {{}} \\; -print'.format( cvv_repo_path)) self.assertEqual(result.return_code, 0) broken_links = set(link for link in result.stdout if link) self.assertEqual(len(broken_links), 0) # Wait till capsule sync finishes for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll() lce_repo_path = form_repo_path( org=org.label, lce=lce.label, cv=cv.label, prod=prod.label, repo=repo.label, ) # Check whether the symlinks for all the packages were created on # capsule result = ssh.command('find {}/ -type l'.format(lce_repo_path), hostname=self.capsule_ip) self.assertEqual(result.return_code, 0) links = set(link for link in result.stdout if link) self.assertEqual(len(links), packages_count) # Ensure there're no broken symlinks (pointing to nonexistent files) on # capsule result = ssh.command( 'find {}/ -type l ! -exec test -e {{}} \\; -print'.format( lce_repo_path), hostname=self.capsule_ip) self.assertEqual(result.return_code, 0) broken_links = set(link for link in result.stdout if link) self.assertEqual(len(broken_links), 0)
def test_positive_checksum_sync(self, capsule_configured): """Synchronize repository to capsule, update repository's checksum type, trigger capsule sync and make sure checksum type was updated on capsule :id: eb07bdf3-6cd8-4a2f-919b-8dfc84e16115 :customerscenario: true :BZ: 1288656, 1664288, 1732066 :expectedresults: checksum type is updated in repodata of corresponding repository on capsule :CaseLevel: System :CaseImportance: Critical """ REPOMD_PATH = 'repodata/repomd.xml' # Create organization, product, lce and repository with sha256 checksum # type org = entities.Organization( smart_proxy=[capsule_configured.nailgun_capsule.id]).create() product = entities.Product(organization=org).create() repo = entities.Repository(product=product, checksum_type='sha256', download_policy='immediate').create() lce = entities.LifecycleEnvironment(organization=org).create() # Associate the lifecycle environment with the capsule capsule_configured.nailgun_capsule.content_add_lifecycle_environment( data={'environment_id': lce.id}) result = capsule_configured.nailgun_capsule.content_lifecycle_environments( ) assert len(result['results']) >= 1 assert lce.id in [ capsule_lce['id'] for capsule_lce in result['results'] ] # Sync, publish and promote a repo cv = entities.ContentView(organization=org, repository=[repo]).create() repo.sync() repo = repo.read() cv.publish() cv = cv.read() assert len(cv.version) == 1 cvv = cv.version[-1].read() promote(cvv, lce.id) cvv = cvv.read() assert len(cvv.environment) == 2 # Wait till capsule sync finishes sync_status = capsule_configured.nailgun_capsule.content_get_sync() assert len(sync_status['active_sync_tasks'] ) >= 1 or sync_status['last_sync_time'] for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll() sync_status = capsule_configured.nailgun_capsule.content_get_sync() last_sync_time = sync_status['last_sync_time'] # Verify repodata's checksum type is sha256, not sha1 on capsule lce_repo_path = form_repo_path(org=org.label, lce=lce.label, cv=cv.label, prod=product.label, repo=repo.label) result = capsule_configured.run( f'grep -o \'checksum type="sha1"\' {lce_repo_path}/{REPOMD_PATH}', ) assert result.status != 0 assert len(result.stdout) == 0 result = capsule_configured.run( f'grep -o \'checksum type="sha256"\' {lce_repo_path}/{REPOMD_PATH}' ) assert result.status == 0 assert len(result.stdout) # Update repo's checksum type to sha1 repo.checksum_type = 'sha1' repo = repo.update(['checksum_type']) # Sync, publish, and promote repo repo.sync() cv.publish() cv = cv.read() assert len(cv.version) == 2 cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() promote(cvv, lce.id) cvv = cvv.read() assert len(cvv.environment) == 2 # Wait till capsule sync finishes sync_status = capsule_configured.nailgun_capsule.content_get_sync() assert (len(sync_status['active_sync_tasks']) or sync_status['last_sync_time'] != last_sync_time) for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll() # Verify repodata's checksum type has updated to sha1 on capsule result = capsule_configured.run( f'grep -o \'checksum type="sha256"\' {lce_repo_path}/{REPOMD_PATH}', ) assert result.status != 0 assert not len(result.stdout) result = capsule_configured.run( f'grep -o \'checksum type="sha1"\' {lce_repo_path}/{REPOMD_PATH}', ) assert result.return_code == 0 assert len(result.stdout) > 0