def test_update_gpgkey(self): """@Test: Create a repository and update its GPGKey @Assert: The updated repository points to a new GPG key. @Feature: Repository """ # Create a repo and make it point to a GPG key. key_1_id = entities.GPGKey( content=read_data_file(VALID_GPG_KEY_FILE), organization=self.org_id, ).create()['id'] repo_id = entities.Repository( gpg_key=key_1_id, product=self.prod_id, ).create()['id'] # Update the repo and make it point to a new GPG key. key_2_id = entities.GPGKey( content=read_data_file(VALID_GPG_KEY_BETA_FILE), organization=self.org_id, ).create()['id'] client.put( entities.Repository(id=repo_id).path(), { u'gpg_key_id': key_2_id }, auth=get_server_credentials(), verify=False, ).raise_for_status() # Verify the repository's attributes. attrs = entities.Repository(id=repo_id).read_json() self.assertEqual(attrs['gpg_key_id'], key_2_id)
def test_create_gpgkey(self): """@Test: Create a repository and provide a GPG key ID. @Assert: A repository is created with the given GPG key ID. @Feature: Repository """ # Create this dependency tree: # # repository -> product -. # `-> gpg key --`-> organization # gpgkey_id = entities.GPGKey( content=read_data_file(VALID_GPG_KEY_FILE), organization=self.org_id, ).create()['id'] repo_id = entities.Repository( gpg_key=gpgkey_id, product=self.prod_id, ).create()['id'] # Verify that the given GPG key ID is used. repo_attrs = entities.Repository(id=repo_id).read_json() self.assertEqual(repo_attrs['gpg_key_id'], gpgkey_id)
def test_update_docker_repo_upstream_name(self, name): """@Test: Create a Docker-type repository and update its upstream name. @Assert: A repository is created with a Docker image and that its upstream name can be updated. @Feature: Docker @BZ: 1193669 """ upstream_name = u'busybox' new_upstream_name = u'fedora/ssh' content_type = u'docker' prod_id = entities.Product( organization=self.org_id).create_json()['id'] repo_id = _create_repository(prod_id, name, upstream_name)['id'] real_attrs = entities.Repository(id=repo_id).read_json() self.assertEqual(real_attrs['name'], name) self.assertEqual(real_attrs['docker_upstream_name'], upstream_name) self.assertEqual(real_attrs['content_type'], content_type) # Update the repository upstream name real_attrs['docker_upstream_name'] = new_upstream_name client.put( entities.Repository(id=repo_id).path(), real_attrs, auth=get_server_credentials(), verify=False, ).raise_for_status() new_attrs = entities.Repository(id=repo_id).read_json() self.assertEqual(new_attrs['docker_upstream_name'], new_upstream_name) self.assertNotEqual(new_attrs['name'], upstream_name)
def test_update_docker_repo_url(self, name): """@Test: Create a Docker-type repository and update its URL. @Assert: A repository is created with a Docker image and that its URL can be updated. @Feature: Docker """ new_url = gen_url(scheme='https') prod_id = entities.Product( organization=self.org_id).create_json()['id'] repo_id = _create_repository(prod_id, name)['id'] real_attrs = entities.Repository(id=repo_id).read_json() self.assertEqual(real_attrs['url'], DOCKER_REGISTRY_HUB) # Update the repository URL real_attrs['url'] = new_url client.put( entities.Repository(id=repo_id).path(), real_attrs, auth=get_server_credentials(), verify=False, ).raise_for_status() new_attrs = entities.Repository(id=repo_id).read_json() self.assertEqual(new_attrs['url'], new_url) self.assertNotEqual(new_attrs['url'], DOCKER_REGISTRY_HUB)
def test_update_name(self, content_type): """@Test: Update a repository's name. @Assert: The repository's name is updated. @Feature: Repository The only data provided with the PUT request is a name. No other information about the repository (such as its URL) is provided. """ if content_type == 'docker' and bz_bug_is_open(1194476): self.skipTest(1194476) repo_id = entities.Repository( content_type=content_type).create_json()['id'] name = entities.Repository.name.gen_value() repository = entities.Repository(id=repo_id) client.put( repository.path(), { 'name': name }, auth=get_server_credentials(), verify=False, ).raise_for_status() self.assertEqual(name, repository.read_json()['name'])
def test_delete(self, attrs): """@Test: Create a repository with attributes ``attrs`` and delete it. @Assert: The repository cannot be fetched after deletion. @Feature: Repository """ repo_id = entities.Repository( # (star-args) pylint:disable=W0142 product=self.prod_id, **attrs).create()['id'] entities.Repository(id=repo_id).delete() with self.assertRaises(HTTPError): entities.Repository(id=repo_id).read_json()
def test_sync(self): """@Test: Create a repo and sync it. @Assert: The repo has more than one RPM. @Feature: Repository """ repo_id = entities.Repository(product=self.prod_id).create()['id'] task_result = entities.Repository(id=repo_id).sync()['result'] self.assertEqual(u'success', task_result) attrs = entities.Repository(id=repo_id).read_json() self.assertGreaterEqual(attrs[u'content_counts'][u'rpm'], 1)
def test_create_attrs(self, attrs): """@Test: Create a repository and provide valid attributes. @Assert: A repository is created with the given attributes. @Feature: Repository """ repo_id = entities.Repository( # (star-args) pylint:disable=W0142 product=self.prod_id, **attrs).create()['id'] real_attrs = entities.Repository(id=repo_id).read_json() for name, value in attrs.items(): self.assertIn(name, real_attrs.keys()) self.assertEqual(value, real_attrs[name])
def enable_rhrepo_and_fetchid(basearch, org_id, product, repo, reposet, releasever): """Enable a RedHat Repository and fetches it's Id. :param str org_id: The organization Id. :param str product: The product name in which repository exists. :param str reposet: The reposet name in which repository exists. :param str repo: The repository name who's Id is to be fetched. :param str basearch: The architecture of the repository. :param str releasever: The releasever of the repository. :return: Returns the repository Id. :rtype: str """ prd_id = entities.Product().fetch_rhproduct_id(name=product, org_id=org_id) reposet_id = entities.Product(id=prd_id).fetch_reposet_id(name=reposet) task = entities.Product(id=prd_id).enable_rhrepo( base_arch=basearch, release_ver=releasever, reposet_id=reposet_id, ) if task['result'] != "success": raise entities.APIResponseError( 'Enabling the RedHat Repository {0} failed. Error: {1}'.format( repo, task['humanized']['errors'])) return entities.Repository().fetch_repoid(name=repo, org_id=org_id)
def test_redhat_sync_1(self): """@Test: Sync RedHat Repository. @Feature: Repositories @Assert: Repository synced should fetch the data successfully. """ cloned_manifest_path = manifests.clone() org_id = entities.Organization().create()['id'] repo = "Red Hat Enterprise Linux 6 Server - RH Common RPMs x86_64 6.3" task = entities.Organization(id=org_id).upload_manifest( path=cloned_manifest_path) self.assertEqual(u'success', task['result'], task['humanized']['errors']) repo_id = utils.enable_rhrepo_and_fetchid( "x86_64", org_id, "Red Hat Enterprise Linux Server", repo, "Red Hat Enterprise Linux 6 Server - RH Common (RPMs)", "6.3", ) task_result = entities.Repository(id=repo_id).sync()['result'] self.assertEqual(task_result, u'success', u"Sync for repository '{0}' failed.".format(repo))
def test_add_docker_repo_to_content_view(self, name): """@Test: Add one Docker-type repository to a non-composite content view @Assert: A repository is created with a Docker repository and the product is added to a non-composite content view @Feature: Docker """ upstream_name = u'busybox' prod_id = entities.Product( organization=self.org_id).create_json()['id'] repo_id = _create_repository(prod_id, name, upstream_name)['id'] real_attrs = entities.Repository(id=repo_id).read_json() self.assertEqual(real_attrs['name'], name) self.assertEqual(real_attrs['docker_upstream_name'], upstream_name) self.assertEqual(real_attrs['content_type'], u'docker') # Create content view and associate docker repo content_view = entities.ContentView(organization=self.org_id, composite=False).create_json() _add_repo_to_content_view(repo_id, content_view['id']) new_attrs = entities.ContentView(id=content_view['id']).read_json() self.assertIn(repo_id, new_attrs['repository_ids'])
def test_create_same_name(self): """@Test: Create two repos with the same name in two organizations. @Assert: The two repositories are sucessfully created and use the given name. @Feature: Repository """ name = entities.Repository.name.gen_value() repo1_attrs = entities.Repository(name=name, product=self.prod_id).create() repo2_attrs = entities.Repository(name=name).create() for attrs in (repo1_attrs, repo2_attrs, entities.Repository(id=repo1_attrs['id']).read_json(), entities.Repository(id=repo2_attrs['id']).read_json()): self.assertEqual(attrs['name'], name)
def test_sync_docker_repo(self): """@Test: Create and sync a Docker-type repository @Assert: A repository is created with a Docker repository and it is synchronized. @Feature: Docker """ prod_id = entities.Product( organization=self.org_id).create_json()['id'] repo_id = _create_repository(prod_id)['id'] task = entities.Repository(id=repo_id).sync() self.assertEqual(u'success', task['result'], task) attrs = entities.Repository(id=repo_id).read_json() self.assertGreaterEqual(attrs[u'content_counts'][u'docker_image'], 1)
def setup_to_create_cv(self, repo_name=None, repo_url=None, repo_type=None, rh_repo=None, org_id=None): """Create product/repo and sync it""" if not rh_repo: repo_name = repo_name or gen_string("alpha", 8) # Creates new custom product via API's product_attrs = entities.Product( organization=org_id or self.org_id).create() # Creates new custom repository via API's repo_attrs = entities.Repository( name=repo_name, url=(repo_url or FAKE_1_YUM_REPO), content_type=(repo_type or REPO_TYPE['yum']), product=product_attrs['id'], ).create() repo_id = repo_attrs['id'] elif rh_repo: # Clone the manifest and fetch it's path. manifest_path = manifests.clone() # Uploads the manifest and returns the result. task = entities.Organization(id=org_id).upload_manifest( path=manifest_path) self.assertEqual(u'success', task['result'], task['humanized']['errors']) # Enables the RedHat repo and fetches it's Id. repo_id = utils.enable_rhrepo_and_fetchid( rh_repo['basearch'], str(org_id), # Org Id is passed as data in API hence str rh_repo['product'], rh_repo['name'], rh_repo['reposet'], rh_repo['releasever']) repo_name = rh_repo['name'] # Sync repository task_result = entities.Repository(id=repo_id).sync()['result'] self.assertEqual(task_result, u'success', u"Sync for repository {0} failed.".format(repo_name))
def setUpClass(cls): # noqa """Set up organization, product and repositories for tests.""" super(CVPublishPromoteTestCase, cls).setUpClass() cls.org = entities.Organization() cls.org.id = cls.org.create_json()['id'] cls.product = entities.Product(organization=cls.org.id) cls.product.id = cls.product.create_json()['id'] cls.yum_repo = entities.Repository(product=cls.product.id) cls.yum_repo.id = cls.yum_repo.create_json()['id'] cls.yum_repo.sync() cls.puppet_repo = entities.Repository( content_type='puppet', product=cls.product.id, ) cls.puppet_repo.id = cls.puppet_repo.create_json()['id'] cls.puppet_repo.upload(PUPPET_MODULE_NTP_PUPPETLABS)
def test_sync_docker_repo(self): """@Test: Create and sync a Docker-type repository @Assert: A repository is created with a Docker repository and it is synchronized. @Feature: Repository """ prod_id = entities.Product(organization=self.org_id).create()['id'] repo_id = entities.Repository(product=prod_id, content_type=u'docker', name=u'busybox', docker_upstream_name=u'busybox', url=DOCKER_REGISTRY_HUB).create()['id'] task_result = entities.Repository(id=repo_id).sync()['result'] self.assertEqual(u'success', task_result) attrs = entities.Repository(id=repo_id).read_json() self.assertGreaterEqual(attrs[u'content_counts'][u'docker_image'], 1)
def test_create_docker_repo(self, name): """@Test: Create a Docker-type repository @Assert: A repository is created with a Docker repository. @Feature: Repository """ upstream_name = u'busybox' content_type = u'docker' prod_id = entities.Product(organization=self.org_id).create()['id'] repo_id = entities.Repository(product=prod_id, content_type=content_type, name=name, docker_upstream_name=upstream_name, url=DOCKER_REGISTRY_HUB).create()['id'] real_attrs = entities.Repository(id=repo_id).read_json() self.assertEqual(real_attrs['name'], name) self.assertEqual(real_attrs['docker_upstream_name'], upstream_name) self.assertEqual(real_attrs['content_type'], content_type)
def test_delete_docker_repo(self, name): """@Test: Create and delete a Docker-type repository @Assert: A repository is created with a Docker image and then deleted. @Feature: Docker """ upstream_name = u'busybox' prod_id = entities.Product( organization=self.org_id).create_json()['id'] repo_id = _create_repository(prod_id, name, upstream_name)['id'] real_attrs = entities.Repository(id=repo_id).read_json() self.assertEqual(real_attrs['name'], name) self.assertEqual(real_attrs['docker_upstream_name'], upstream_name) self.assertEqual(real_attrs['content_type'], u'docker') # Delete it entities.Repository(id=repo_id).delete() with self.assertRaises(HTTPError): entities.Repository(id=repo_id).read_json()
def test_delete_random_docker_repo(self): """@Test: Create Docker-type repositories on multiple products and delete a random repository from a random product. @Assert: Random repository can be deleted from random product without altering the other products. @Feature: Docker """ upstream_name = u'busybox' product_ids = [] repository_ids = [] for _ in range(0, randint(1, 5)): product_ids.append( entities.Product(organization=self.org_id).create_json()['id']) for product_id in product_ids: name = gen_string('utf8', 15) repo_id = _create_repository(product_id, name, upstream_name)['id'] real_attrs = entities.Repository(id=repo_id).read_json() self.assertEqual(real_attrs['name'], name) self.assertEqual(real_attrs['docker_upstream_name'], upstream_name) self.assertEqual(real_attrs['content_type'], u'docker') repository_ids.append(repo_id) # Delete a ramdom repository shuffle(repository_ids) repository_id = repository_ids.pop() entities.Repository(id=repository_id).delete() with self.assertRaises(HTTPError): entities.Repository(id=repository_id).read_json() # Check if others repositories are not touched for repository_id in repository_ids: repository = entities.Repository(id=repository_id).read_json() self.assertIn(repository['product']['id'], product_ids)
def test_update_contents(self): """@Test: Create a repository and upload RPM contents. @Assert: The repository's contents include one RPM. @Feature: Repository """ # Create a repository and upload RPM content. repo_id = entities.Repository(product=self.prod_id).create()['id'] client.post( entities.Repository(id=repo_id).path(which='upload_content'), {}, auth=get_server_credentials(), files={ u'content': open(get_data_file(RPM_TO_UPLOAD), 'rb') }, verify=False, ).raise_for_status() # Verify the repository's contents. attrs = entities.Repository(id=repo_id).read_json() self.assertEqual(attrs[u'content_counts'][u'rpm'], 1)
def test_add_synced_docker_repo_to_content_view(self): """@Test: Create and sync a Docker-type repository @Assert: A repository is created with a Docker repository and it is synchronized. @Feature: Docker """ prod_id = entities.Product( organization=self.org_id).create_json()['id'] repo_id = _create_repository(prod_id)['id'] task = entities.Repository(id=repo_id).sync() self.assertEqual(u'success', task['result'], task) attrs = entities.Repository(id=repo_id).read_json() self.assertGreaterEqual(attrs[u'content_counts'][u'docker_image'], 1) # Create content view and associate docker repo content_view = entities.ContentView(organization=self.org_id, composite=False).create_json() _add_repo_to_content_view(repo_id, content_view['id']) new_attrs = entities.ContentView(id=content_view['id']).read_json() self.assertIn(repo_id, new_attrs['repository_ids'])
def test_create_one_docker_repo(self, name): """@Test: Create one Docker-type repository @Assert: A repository is created with a Docker image. @Feature: Docker """ prod_id = entities.Product( organization=self.org_id).create_json()['id'] repo_id = _create_repository(prod_id, name)['id'] real_attrs = entities.Repository(id=repo_id).read_json() self.assertEqual(real_attrs['name'], name) self.assertEqual(real_attrs['docker_upstream_name'], u'busybox') self.assertEqual(real_attrs['content_type'], u'docker')
def test_syncnow_custom_repos_1(self, repository_name): """@Test: Create Custom yum repos and sync it via the repos page. @Feature: Custom yum Repos - Sync via repos page @Assert: Whether Sync is successful """ # Creates new product product_attrs = entities.Product(organization=self.org_id).create() # Creates new repository entities.Repository(name=repository_name, url=FAKE_1_YUM_REPO, product=product_attrs['id']).create() with Session(self.browser) as session: self.setup_navigate_syncnow(session, product_attrs['name'], repository_name) # prd_sync_is_ok returns boolean values and not objects self.assertTrue(self.prd_sync_is_ok(repository_name))
def _create_repository(prod_id, name=None, upstream_name=None): """Creates a Docker-based repository. :param str name: Name for the repository. If ``None`` then a random value will be generated. :param str upstream_name: A valid name for an existing Docker image. If ``None`` then defaults to ``busybox``. :return: A dictionary representing the created repository. """ if name is None: name = gen_string(gen_choice(STRING_TYPES), 15) if upstream_name is None: upstream_name = u'busybox' return entities.Repository(product=prod_id, content_type=u'docker', name=name, docker_upstream_name=upstream_name, url=DOCKER_REGISTRY_HUB).create_json()
def test_sync_custom_repos(self, repository_name): """@Test: Create Content Custom Sync with minimal input parameters @Feature: Content Custom Sync - Positive Create @Assert: Whether Sync is successful """ # Creates new product product_attrs = entities.Product(organization=self.org_id).create() # Creates new repository entities.Repository(name=repository_name, url=FAKE_1_YUM_REPO, product=product_attrs['id']).create() with Session(self.browser) as session: session.nav.go_to_select_org(Sync.org_name) session.nav.go_to_sync_status() sync = self.sync.sync_custom_repos(product_attrs['name'], [repository_name]) # syn.sync_custom_repos returns boolean values and not objects self.assertTrue(sync)
def test_syncnow_custom_repos_3(self, repository_name): """@Test: Create Custom docker repos and sync it via the repos page. @Feature: Custom docker Repos - Sync via repos page @Assert: Whether Sync is successful """ # Creates new product product_attrs = entities.Product(organization=self.org_id).create() # Creates new puppet repository entities.Repository( name=repository_name, url=DOCKER_REGISTRY_HUB, product=product_attrs['id'], content_type=REPO_TYPE['docker'], ).create() with Session(self.browser) as session: self.setup_navigate_syncnow(session, product_attrs['name'], repository_name) # prd_sync_is_ok returns boolean values and not objects self.assertTrue(self.prd_sync_is_ok(repository_name))
def test_end_to_end(self): """@Test: Perform end to end smoke tests using RH repos. 1. Create new organization and environment 2. Clone and upload manifest 3. Sync a RedHat repository 4. Create content-view 5. Add repository to contet-view 6. Promote/publish content-view 7. Create an activation-key 8. Add product to activation-key 9. Create new virtualmachine 10. Pull rpm from Foreman server and install on client 11. Register client with foreman server using activation-key 12. Install rpm on client @Feature: Smoke test @Assert: All tests should succeed and Content should be successfully fetched by client """ product = "Red Hat Enterprise Linux Server" reposet = ("Red Hat Enterprise Virtualization Agents " "for RHEL 6 Server (RPMs)") repo = ("Red Hat Enterprise Virtualization Agents for RHEL 6 Server " "RPMs x86_64 6Server") activation_key_name = gen_string('alpha') # step 1.1: Create a new organization org = entities.Organization().create() # step 1.2: Create new lifecycle environments lifecycle_env = entities.LifecycleEnvironment( organization=org['id'] ).create() # step 2: Upload manifest manifest_path = manifests.clone() task = entities.Organization( id=org['id'] ).upload_manifest(path=manifest_path) self.assertEqual( u'success', task['result'], task['humanized']['errors'] ) # step 3.1: Enable RH repo and fetch repository_id repo_id = utils.enable_rhrepo_and_fetchid( basearch="x86_64", org_id=org['id'], product=product, repo=repo, reposet=reposet, releasever="6Server", ) # step 3.2: sync repository task_result = entities.Repository(id=repo_id).sync()['result'] self.assertEqual( task_result, u'success', u" Error while syncing repository '{0}' and state is {1}." .format(repo, task_result)) # step 4: Create content view content_view = entities.ContentView(organization=org['id']).create() # step 5: Associate repository to new content view response = client.put( entities.ContentView(id=content_view['id']).path(), {u'repository_ids': [repo_id]}, auth=get_server_credentials(), verify=False, ) response.raise_for_status() # step 6.1: Publish content view task_status = entities.ContentView(id=content_view['id']).publish() self.assertEqual( task_status['result'], u'success', u"Error publishing content-view {0} and state is {1}." .format(content_view['name'], task_status['result'])) # step 6.2: Promote content view to lifecycle_env content_view = entities.ContentView(id=content_view['id']).read_json() self.assertEqual(len(content_view['versions']), 1) task_status = entities.ContentViewVersion( id=content_view['versions'][0]['id'] ).promote(lifecycle_env['id']) self.assertEqual( task_status['result'], u'success', u"Error promoting {0} to {1} and state is {2}." .format(content_view['name'], lifecycle_env['name'], task_status['result'])) # step 7: Create activation key ak_id = entities.ActivationKey( name=activation_key_name, environment=lifecycle_env['id'], organization=org['id'], content_view=content_view['id'], ).create_json()['id'] # Walk through the list of subscriptions. Find the "Red Hat Employee # Subscription" and attach it to the just-created activation key. for subscription in entities.Organization(id=org['id']).subscriptions(): if subscription['product_name'] == "Red Hat Employee Subscription": # 'quantity' must be 1, not subscription['quantity']. Greater # values produce this error: "RuntimeError: Error: Only pools # with multi-entitlement product subscriptions can be added to # the activation key with a quantity greater than one." entities.ActivationKey(id=ak_id).add_subscriptions({ 'quantity': 1, 'subscription_id': subscription['id'], }) break # Create VM package_name = "python-kitchen" with VirtualMachine(distro='rhel66') as vm: # Download and Install rpm result = vm.run( "wget -nd -r -l1 --no-parent -A '*.noarch.rpm' http://{0}/pub/" .format(conf.properties['main.server.hostname']) ) self.assertEqual( result.return_code, 0, "failed to fetch katello-ca rpm: {0}, return code: {1}" .format(result.stderr, result.return_code) ) result = vm.run( 'rpm -i katello-ca-consumer*.noarch.rpm' ) self.assertEqual( result.return_code, 0, "failed to install katello-ca rpm: {0} and return code: {1}" .format(result.stderr, result.return_code) ) # Register client with foreman server using activation-key result = vm.run( u'subscription-manager register --activationkey {0} ' '--org {1} --force' .format(activation_key_name, org['label']) ) self.assertEqual( result.return_code, 0, "failed to register client:: {0} and return code: {1}" .format(result.stderr, result.return_code) ) # Enable Red Hat Enterprise Virtualization Agents repo via cli # As the below repo is disabled by default under ak's prd-content result = vm.run( 'subscription-manager repos --enable ' 'rhel-6-server-rhev-agent-rpms' ) self.assertEqual( result.return_code, 0, "Enabling repo failed: {0} and return code: {1}" .format(result.stderr, result.return_code) ) # Install contents from sat6 server result = vm.run('yum install -y {0}'.format(package_name)) self.assertEqual( result.return_code, 0, "Package install failed: {0} and return code: {1}" .format(result.stderr, result.return_code) ) # Verify if package is installed by query it result = vm.run('rpm -q {0}'.format(package_name)) self.assertIn(package_name, result.stdout[0])
def test_smoke(self): """@Test: Check that basic content can be created 1. Create a new user with admin permissions 2. Using the new user from above: 1. Create a new organization 2. Create two new lifecycle environments 3. Create a custom product 4. Create a custom YUM repository 5. Create a custom PUPPET repository 6. Synchronize both custom repositories 7. Create a new content view 8. Associate both repositories to new content view 9. Publish content view 10. Promote content view to both lifecycles 11. Create a new libvirt compute resource 12. Create a new subnet 13. Create a new domain 14. Create a new hostgroup and associate previous entities to it @Feature: Smoke Test @Assert: All entities are created and associated. """ # prep work # # FIXME: Use a larger charset when authenticating users. # # It is possible to create a user with a wide range of characters. (see # the "User" entity). However, Foreman supports only HTTP Basic # authentication, and the requests lib enforces the latin1 charset in # this auth mode. We then further restrict ourselves to the # alphanumeric charset, because Foreman complains about incomplete # multi-byte chars when latin1 chars are used. # login = gen_string('alphanumeric') password = gen_string('alphanumeric') # step 1: Create a new user with admin permissions entities.User(admin=True, login=login, password=password).create() # step 2.1: Create a new organization org = entities.Organization().create(auth=(login, password)) # step 2.2: Create 2 new lifecycle environments le1 = entities.LifecycleEnvironment(organization=org['id']).create() le2 = entities.LifecycleEnvironment( organization=org['id'], prior=le1['id']).create() # step 2.3: Create a custom product prod = entities.Product(organization=org['id']).create() # step 2.4: Create custom YUM repository repo1 = entities.Repository( product=prod['id'], content_type=u'yum', url=GOOGLE_CHROME_REPO ).create() # step 2.5: Create custom PUPPET repository repo2 = entities.Repository( product=prod['id'], content_type=u'puppet', url=FAKE_0_PUPPET_REPO ).create() # step 2.6: Synchronize both repositories for repo in [repo1, repo2]: response = client.post( entities.Repository(id=repo['id']).path('sync'), { u'ids': [repo['id']], u'organization_id': org['id'] }, auth=get_server_credentials(), verify=False, ).json() self.assertGreater( len(response['id']), 1, u"Was not able to fetch a task ID.") task_status = entities.ForemanTask(id=response['id']).poll() self.assertEqual( task_status['result'], u'success', u"Sync for repository {0} failed.".format(repo['name'])) # step 2.7: Create content view content_view = entities.ContentView(organization=org['id']).create() # step 2.8: Associate YUM repository to new content view response = client.put( entities.ContentView(id=content_view['id']).path(), auth=get_server_credentials(), verify=False, data={u'repository_ids': [repo1['id']]}) # Fetch all available puppet modules puppet_mods = client.get( entities.ContentView(id=content_view['id']).path( 'available_puppet_module_names'), auth=get_server_credentials(), verify=False).json() self.assertGreater( puppet_mods['results'], 0, u"No puppet modules were found") # Select a random puppet module from the results puppet_mod = random.choice(puppet_mods['results']) # ... and associate it to the content view path = entities.ContentView(id=content_view['id']).path( 'content_view_puppet_modules') response = client.post( path, auth=get_server_credentials(), verify=False, data={u'name': puppet_mod['module_name']}) self.assertEqual( response.status_code, httplib.OK, status_code_error(path, httplib.OK, response) ) self.assertEqual( response.json()['name'], puppet_mod['module_name'], ) # step 2.9: Publish content view task_status = entities.ContentView(id=content_view['id']).publish() self.assertEqual( task_status['result'], u'success', u"Publishing {0} failed.".format(content_view['name'])) # step 2.10: Promote content view to both lifecycles content_view = entities.ContentView(id=content_view['id']).read_json() self.assertEqual( len(content_view['versions']), 1, u'There should only be 1 version published.') self.assertEqual( len(content_view['versions'][0]['environment_ids']), 1, u"Content view should be present on 1 lifecycle only") task_status = entities.ContentViewVersion( id=content_view['versions'][0]['id'] ).promote(le1['id']) self.assertEqual( task_status['result'], u'success', u"Promoting {0} to {1} failed.".format( content_view['name'], le1['name'])) # Check that content view exists in 2 lifecycles content_view = entities.ContentView(id=content_view['id']).read_json() self.assertEqual( len(content_view['versions']), 1, u'There should only be 1 version published.') self.assertEqual( len(content_view['versions'][0]['environment_ids']), 2, u"Content view should be present on 2 lifecycles only") task_status = entities.ContentViewVersion( id=content_view['versions'][0]['id'] ).promote(le2['id']) self.assertEqual( task_status['result'], u'success', u"Promoting {0} to {1} failed.".format( content_view['name'], le2['name'])) # Check that content view exists in 2 lifecycles content_view = entities.ContentView(id=content_view['id']).read_json() self.assertEqual( len(content_view['versions']), 1, u'There should only be 1 version published.') self.assertEqual( len(content_view['versions'][0]['environment_ids']), 3, u"Content view should be present on 3 lifecycle only") # BONUS: Create a content host and associate it with promoted # content view and last lifecycle where it exists content_host = entities.System( content_view=content_view['id'], environment=le2['id'] ).create() # Check that content view matches what we passed self.assertEqual( content_host['content_view_id'], content_view['id'], u"Content views do not match." ) # Check that lifecycle environment matches self.assertEqual( content_host['environment']['id'], le2['id'], u"Environments do not match." )
def setUpClass(cls): # noqa """Create a repository which can be repeatedly updated.""" cls.repository = entities.Repository( id=entities.Repository().create()['id'])