def test_pre_scenario_preclient_package_installation(self):
        """Create product and repo from which the package will be installed
        post upgrade

        :id: preupgrade-eedab638-fdc9-41fa-bc81-75dd2790f7be

        :steps:

            1. Create a content host with existing client ak
            2. Create and sync repo from which the package will be
                installed on content host
            3. Add repo to CV and then in Activation key

        :expectedresults:

            1. The content host is created
            2. The new repo and its product has been added to ak using which
                the content host is created

        """
        prior_env = entities.LifecycleEnvironment(
            organization=self.org
        ).search(query={'search': 'name=Library'})[0]
        environment = entities.LifecycleEnvironment(
            organization=self.org,
            prior=prior_env.id,
            label=self.le_lable,
            name=self.le_name
        ).create()
        ak = create_activation_key_for_client_registration(
            ak_name=self.ak_name,
            client_os='rhel7',
            org=self.org,
            environment=environment,
            sat_state='pre'
        )
        rhel7_client = dockerize(
            ak_name=ak.name, distro='rhel7', org_label=self.org.label)
        client_container_id = rhel7_client.values()[0]
        client_name = rhel7_client.keys()[0]
        product, yum_repo = create_yum_test_repo(
            product_name=self.prod_name, repo_url=FAKE_REPO_ZOO3, org=self.org)
        update_product_subscription_in_ak(
            product=product, yum_repo=yum_repo, ak=ak, org=self.org)
        time.sleep(30)
        execute(
            attach_subscription_to_host_from_satellite,
            self.org.id,
            product.name,
            client_name,
            host=get_satellite_host()
        )
        # Refresh subscriptions on client
        execute(
            docker_execute_command,
            client_container_id,
            'subscription-manager refresh',
            host=self.docker_vm
        )
        # Run goferd on client as its docker container
        execute(
            docker_execute_command,
            client_container_id,
            'goferd -f',
            async=True,
            host=self.docker_vm
        )
        create_dict(
            {self.__class__.__name__: rhel7_client}
        )
    def test_post_scenario_postclient_package_installation(self):
        """Post-upgrade scenario that creates and installs the package on
        post-upgrade client remotely and then verifies if the package installed

        :id: postupgrade-1a881c07-595f-425f-aca9-df2337824a8e

        :steps:

            1. Create a content host with existing client ak
            2. Create and sync repo from which the package will be
                installed on content host
            3. Add repo to CV and then in Activation key
            4. Install package on a pre-upgrade client

        :expectedresults:

            1. The content host is created
            2. The new repo and its product has been added to ak using which
                the content host is created
            3. The package is installed on post-upgrade client
        """
        org = entities.Organization(name=self.org_name).create()
        prior_env = entities.LifecycleEnvironment(organization=org).search(
            query={'search': 'name=Library'}
        )[0]
        environment = entities.LifecycleEnvironment(
            organization=org,
            prior=prior_env.id,
            label=self.le_name,
            name=self.le_name
        ).create()
        ak = create_activation_key_for_client_registration(
            ak_name=self.ak_name,
            client_os='rhel7',
            org=org,
            environment=environment,
            sat_state='post'
        )
        rhel7_client = dockerize(
            ak_name=ak.name, distro='rhel7', org_label=org.label)
        client_container_id = rhel7_client.values()[0]
        client_name = rhel7_client.keys()[0].lower()
        product, yum_repo = create_yum_test_repo(
            product_name=self.prod_name, repo_url=FAKE_REPO_ZOO3, org=org)
        update_product_subscription_in_ak(
            product=product, yum_repo=yum_repo, ak=ak, org=org)
        time.sleep(10)
        execute(
            attach_subscription_to_host_from_satellite,
            org.id,
            product.name,
            client_name,
            host=get_satellite_host()
        )
        # Refresh subscriptions on client
        execute(
            docker_execute_command,
            client_container_id,
            'subscription-manager refresh',
            host=self.docker_vm
        )
        # Run goferd on client as its docker container
        execute(
            docker_execute_command,
            client_container_id,
            'goferd -f',
            async=True,
            host=self.docker_vm
        )
        # Holding on for 30 seconds wihle goferd starts
        time.sleep(30)
        client_id = entities.Host().search(
            query={'search': 'name={}'.format(client_name)}
        )[0].id
        entities.Host().install_content(data={
            'organization_id': org.id,
            'included': {'ids': [client_id]},
            'content_type': 'package',
            'content': [self.package_name],
        })
        time.sleep(20)
        # Validate if that package is really installed
        installed_package = execute(
            docker_execute_command,
            client_container_id,
            'rpm -q {}'.format(self.package_name),
            host=self.docker_vm
        )[self.docker_vm]
        self.assertIn(self.package_name, installed_package)
class Scenario_capsule_sync(TestCase):
    """The test class contains pre-upgrade and post-upgrade scenarios to test if
    package added to satellite preupgrade is synced to capsule post upgrade.

    Test Steps:

    1. Before Satellite upgrade, Sync a repo/rpm in satellite.
    2. Upgrade satellite/capsule.
    3. Run capsule sync post upgrade.
    4. Check if the repo/rpm is been synced to capsule.

    """
    cls_name = 'Scenario_capsule_sync'
    sat_host = get_satellite_host()
    env.host_string = sat_host
    cap_host = os.environ.get('RHEV_CAP_HOST',
                              os.environ.get('CAPSULE_HOSTNAME'))
    repo_name = 'capsulesync_TestRepo_' + cls_name
    repo_path = '/var/www/html/pub/preupgradeCapSync_repo/'
    rpm_name = rpm1.split('/')[-1]
    prod_name = 'Scenario_preUpgradeCapSync_' + cls_name
    activation_key = os.environ.get('CAPSULE_AK',
                                    os.environ.get('RHEV_CAPSULE_AK'))
    cv_name = 'Scenario_precapSync_' + cls_name
    _, env_name = hammer.hammer_determine_cv_and_env_from_ak(
        activation_key, '1')
    org_id = '1'
    repo_url = 'http://' + sat_host + '/pub/preupgradeCapSync_repo/'

    def create_repo(self):
        """ Creates a custom yum repository, that will be synced to satellite
        and later to capsule from satellite
        """
        run('rm -rf {}'.format(self.repo_path))
        run('mkdir {}'.format(self.repo_path))
        run('wget {0} -P {1}'.format(rpm1, self.repo_path))
        # Renaming custom rpm to preRepoSync.rpm
        run('createrepo --database {0}'.format(self.repo_path))

    @pre_upgrade
    def test_pre_user_scenario_capsule_sync(self):
        """Pre-upgrade scenario that creates and sync repository with
        rpm in satellite which will be synced in post upgrade scenario.


        :id: preupgrade-eb8970fa-98cc-4a99-99fb-1c12c4e319c9

        :steps:
            1. Before Satellite upgrade, Sync a repo/rpm in satellite.

        :expectedresults: The repo/rpm should be synced to satellite

         """
        self.create_repo()
        print hammer.hammer_product_create(self.prod_name, self.org_id)
        prod_list = hammer.hammer('product list --organization-id {}'.format(
            self.org_id))
        self.assertEqual(
            self.prod_name,
            hammer.get_attribute_value(prod_list, self.prod_name, 'name'))
        print hammer.hammer_repository_create(self.repo_name, self.org_id,
                                              self.prod_name, self.repo_url)
        repo_list = hammer.hammer(
            'repository list --product {0} --organization-id {1}'.format(
                self.prod_name, self.org_id))
        self.assertEqual(
            self.repo_name,
            hammer.get_attribute_value(repo_list, self.repo_name, 'name'))
        print hammer.hammer_repository_synchronize(self.repo_name, self.org_id,
                                                   self.prod_name)
        print hammer.hammer_content_view_create(self.cv_name, self.org_id)
        print hammer.hammer_content_view_add_repository(
            self.cv_name, self.org_id, self.prod_name, self.repo_name)
        print hammer.hammer_content_view_publish(self.cv_name, self.org_id)
        cv_ver = hammer.get_latest_cv_version(self.cv_name)
        env_data = hammer.hammer(
            'lifecycle-environment list --organization-id {0} '
            '--name {1}'.format(self.org_id, self.env_name))
        env_id = hammer.get_attribute_value(env_data, self.env_name, 'id')
        print hammer.hammer_content_view_promote_version(
            self.cv_name, cv_ver, env_id, self.org_id)
        global_dict = {self.__class__.__name__: {'rpm_name': self.rpm_name}}
        create_dict(global_dict)

    @post_upgrade
    def test_post_user_scenario_capsule_sync(self):
        """Post-upgrade scenario that sync capsule from satellite and then
        verifies if the repo/rpm of pre-upgrade scenario is synced to capsule


        :id: postupgrade-eb8970fa-98cc-4a99-99fb-1c12c4e319c9

        :steps:
            1. Run capsule sync post upgrade.
            2. Check if the repo/rpm is been synced to capsule.

        :expectedresults:
            1. The capsule sync should be successful
            2. The repos/rpms from satellite should be synced to satellite

         """
        cap_data = hammer.hammer('capsule list')
        cap_id = hammer.get_attribute_value(cap_data, self.cap_host, 'id')
        org_data = hammer.hammer('organization list')
        org_name = hammer.get_attribute_value(org_data, int(self.org_id),
                                              'name')
        print hammer.hammer(
            'capsule content synchronize --id {0}'.format(cap_id))
        result = execute(
            lambda: run('[ -f /var/lib/pulp/published/yum/http/repos/'
                        '{0}/{1}/{2}/custom/{3}/{4}/Packages/b/{5} ]; echo $?'.
                        format(org_name, self.env_name, self.cv_name, self.
                               prod_name, self.repo_name, self.rpm_name)),
            host=self.cap_host)[self.cap_host]
        self.assertEqual('0', result)
Example #4
0
class ScenarioBug1429201(TestCase):
    """This Class will serve as a whole scenario with pre-upgrade and
    post-upgrade test-case.
    Scenario test to verify if we can create a custom repository and consume it
    via client then we alter the created custom repository and satellite
    will be able to sync back the repo.
    """
    prd_name = 'ScenarioBug1429201' + gen_alpha()
    repo_name = 'ScenarioBug1429201' + gen_alpha()
    lc_name = 'ScenarioBug1429201' + gen_alpha()
    ak_name = 'ScenarioBug1429201' + gen_alpha()
    cv_name = 'ScenarioBug1429201' + gen_alpha()
    docker_vm = os.environ.get('DOCKER_VM')
    org_id = 1
    sat_host = get_satellite_host()
    file_path = '/var/www/html/pub/custom_repo/'
    custom_repo = 'https://' + sat_host + '/pub/custom_repo/'
    _, rpm1_name = os.path.split(rpm1)
    _, rpm2_name = os.path.split(rpm2)

    def setUp(self):
        hammer.set_hammer_config()
        env.host_string = self.sat_host
        env.user = '******'

    @task
    def create_repo(self):
        """ Creates a custom yum repository, that will be synced to satellite
        """
        try:
            run('rm -rf {0}'.format(self.file_path))
            run('mkdir {0}'.format(self.file_path))
        except OSError:
            run('mkdir /var/www/html/pub/custom_repo')
        run('wget {0} -P {1}'.format(rpm1, self.file_path))
        run('createrepo --database {0}'.format(self.file_path))

    @pre_upgrade
    def test_pre_user_scenario_bug_1429201(self):
        """This is pre-upgrade scenario test to verify if we can create a
         custom repository and consume it via client

         :id: 8fb8ec87-efa5-43ed-8cb3-960ef9cd6df2

         :steps:
             1. Create repository RepoFoo that you will later add to your
                Satellite. This repository should contain PackageFoo-1.0.rpm
             2. Install satellite 6.1
             3. Create custom product ProductFoo pointing to repository RepoFoo
             4. Sync RepoFoo
             5. Create content view CVFoo
             6. Add RepoFoo to CVFoo
             7. Publish version 1 of CVFoo

         :expectedresults: The client and product is created successfully

         :BZ: 1429201
         """
        execute(self.create_repo, self, host=self.sat_host)
        # End to End product + ak association
        print hammer.hammer_product_create(self.prd_name, self.org_id)
        print hammer.hammer_repository_create(self.repo_name, self.org_id,
                                              self.prd_name, self.custom_repo)

        print hammer.hammer('lifecycle-environment create --name "{0}" '
                            '--organization-id {1} --prior-id "{2}"'.format(
                                self.lc_name, self.org_id, 1))
        print hammer.hammer_repository_synchronize(self.repo_name, self.org_id,
                                                   self.prd_name)
        print hammer.hammer_content_view_create(self.cv_name, self.org_id)
        print hammer.hammer_content_view_add_repository(
            self.cv_name, self.org_id, self.prd_name, self.repo_name)
        print hammer.hammer_content_view_publish(self.cv_name, self.org_id)
        latest_repo_version = hammer.get_latest_cv_version(self.cv_name)
        lc_result = hammer.hammer('"{0}" info --name "{1}" --organization-id '
                                  '{2}'.format('lifecycle-environment',
                                               self.lc_name, self.org_id))
        lifecycle_id = hammer.get_attribute_value(lc_result, self.lc_name,
                                                  'id')
        print hammer.hammer_content_view_promote_version(
            self.cv_name, latest_repo_version, lifecycle_id, self.org_id)
        print hammer.hammer_activation_key_create(self.ak_name, self.org_id,
                                                  self.cv_name, self.lc_name)
        print hammer.hammer_activation_key_add_subscription(
            self.ak_name, self.org_id, self.prd_name)
        time.sleep(5)
        # Creating a rhel7 vm and subscribing to AK
        container_ids = dockerize(self.ak_name, 'rhel7')
        time.sleep(30)  # Subscription manager needs time to register
        result = execute(docker_execute_command,
                         container_ids.values()[0],
                         'yum list {0} | grep {0}'.format(
                             self.rpm1_name.split('-')[0]),
                         host=self.docker_vm)
        # Info on created entities to assert the test case using hammer info
        prd_info = hammer.hammer('"{0}" info --name "{1}" --organization-id '
                                 '{2}'.format('product', self.prd_name,
                                              self.org_id))
        self.assertEqual(
            self.prd_name,
            hammer.get_attribute_value(prd_info, self.prd_name, 'name'))
        self.assertIsNotNone(container_ids)
        self.assertIn(self.repo_name, result.values()[0])
        global_dict = {
            self.__class__.__name__: {
                'prd_name': self.prd_name,
                'ak_name': self.ak_name,
                'repo_name': self.repo_name,
                'container_ids': container_ids
            }
        }
        create_dict(global_dict)

    @post_upgrade
    def test_post_user_scenario_bug_1429201(self):
        """This is post-upgrade scenario test to verify if we can alter the
        created custom repository and satellite will be able to sync back
        the repo

        :id: 9415c3e5-4699-462f-81bc-4143d8b820f1

        :steps:
            1. Remove PackageFoo-1.0.rpm from RepoFoo
            2. Add PackageFoo-2.0.rpm to RepoFoo
            3. Sync RepoFoo
            4. Publish version 2 of CVFoo
            5. Delete version 1 of CVFoo
            6. run /etc/cron.weekly/katello-remove-orphans
            7. Subscribe ClientA to CVFoo
            8. Try to install PackageFoo-1.0.rpm on ClientA
            9. Notice that yum thinks it's there based on the repo metadata
               but then fails to download it with 404
            10. Try to install PackageFoo-2.0.rpm

        :expectedresults: The clients is present after upgrade and deleted
            rpm is unable to be fetched, while new rpm is pulled and installed
            on client

        :BZ: 1429201
        """
        entity_data = get_entity_data(self.__class__.__name__)
        run('wget {0} -P {1}'.format(rpm2, self.file_path))
        run('rm -rf {0}'.format(self.file_path + self.rpm1_name))
        run('createrepo --update {0}'.format(self.file_path))
        # get entities from pickle
        pkcl_ak_name = entity_data['ak_name']
        container_ids = entity_data['container_ids']
        repo_name = entity_data['repo_name']
        prd_name = entity_data['prd_name']
        cv_name, lc_name = hammer.hammer_determine_cv_and_env_from_ak(
            pkcl_ak_name, self.org_id)
        # Info on created entities to assert the test case using hammer info
        ak_info = hammer.hammer('"{0}" info --name "{1}" --organization-id '
                                '{2}'.format('activation-key', pkcl_ak_name,
                                             self.org_id))
        print hammer.hammer_repository_synchronize(repo_name, self.org_id,
                                                   prd_name)
        print hammer.hammer_content_view_publish(cv_name, self.org_id)
        latest_repo_version = hammer.get_latest_cv_version(cv_name)

        result = hammer.hammer('"{0}" info --name "{1}" --organization-id '
                               '{2}'.format('lifecycle-environment', lc_name,
                                            self.org_id))
        lifecycle_id = hammer.get_attribute_value(result, lc_name, 'id')
        print hammer.hammer_content_view_promote_version(
            cv_name, latest_repo_version, lifecycle_id, self.org_id)

        hammer.hammer('content-view remove --content-view-version-ids {0}'
                      ' --name "{1}" --organization-id {2}'.format(
                          latest_repo_version, cv_name, self.org_id))
        run('/etc/cron.weekly/katello-remove-orphans')
        execute(refresh_subscriptions_on_docker_clients,
                container_ids.values(),
                host=self.docker_vm)
        time.sleep(30)  # Subscription manager needs time to register
        result_fail = execute(docker_execute_command,
                              container_ids.values()[0],
                              'yum list {0} | grep {0}'.format(
                                  self.rpm1_name.split('-')[0]),
                              quiet=True,
                              host=self.docker_vm)  # should be error
        result_pass = execute(docker_execute_command,
                              container_ids.values()[0],
                              'yum install -y {0}'.format(
                                  self.rpm2_name.split('-')[0]),
                              host=self.docker_vm)  # should be successful
        self.assertEqual(
            pkcl_ak_name,
            hammer.get_attribute_value(ak_info, pkcl_ak_name, 'name'))
        self.assertIsNotNone(container_ids)
        self.assertIn('Error', result_fail.values()[0])
        self.assertIn('Complete', result_pass.values()[0])
class Scenario_capsule_sync_2(TestCase):
    """
    The test class contains pre-upgrade and post-upgrade scenarios to test if
    package added postupgrade in satellite is snyced to capsule post upgrade.

    Test Steps:

    1. Upgrade Satellite and Capsule.
    2. Sync a repo/rpm in satellite.
    3. Run capsule sync.
    4. Check if the repo/rpm is been synced to capsule.

    """
    cls_name = 'Scenario_capsule_sync_2'
    sat_host = get_satellite_host()
    env.host_string = sat_host
    env.user = '******'
    hammer.set_hammer_config()
    repo_name = 'capsulesync_TestRepo_' + cls_name
    repo_path = '/var/www/html/pub/postupgradeCapSync_repo/'
    rpm_name = rpm2.split('/')[-1]
    prod_name = 'Scenario_postUpgradeCapSync_' + cls_name
    activation_key = os.environ.get('CAPSULE_AK',
                                    os.environ.get('RHEV_CAPSULE_AK'))
    cv_name = 'Scenario_postcapSync_' + cls_name
    _, env_name = hammer.hammer_determine_cv_and_env_from_ak(
        activation_key, '1')
    org_id = '1'
    repo_url = 'http://' + sat_host + '/pub/postupgradeCapSync_repo/'

    def create_repo(self):
        """ Creates a custom yum repository, that will be synced to satellite
        and later to capsule from satellite
        """
        run('rm -rf {}'.format(self.repo_path))
        run('mkdir {}'.format(self.repo_path))
        run('wget {0} -P {1}'.format(rpm2, self.repo_path))
        # Renaming custom rpm to preRepoSync.rpm
        run('createrepo --database {0}'.format(self.repo_path))

    @post_upgrade
    def test_post_user_scenario_capsule_sync_2(self):
        """Post-upgrade scenario that creates and sync repository with
        rpm, sync capsule with satellite and verifies if the repo/rpm in
        satellite is synced to capsule.


        :id: postupgrade-7c1d3441-3e8d-4ac2-8102-30e18274658c

        :steps:
            1. Post Upgrade , Sync a repo/rpm in satellite.
            2. Run capsule sync.
            3. Check if the repo/rpm is been synced to capsule.

        :expectedresults:
            1. The repo/rpm should be synced to satellite
            2. Capsule sync should be successful
            3. The repo/rpm from satellite should be synced to capsule

        """
        self.create_repo()
        print hammer.hammer_product_create(self.prod_name, self.org_id)
        prod_list = hammer.hammer('product list --organization-id {}'.format(
            self.org_id))
        self.assertEqual(
            self.prod_name,
            hammer.get_attribute_value(prod_list, self.prod_name, 'name'))
        print hammer.hammer_repository_create(self.repo_name, self.org_id,
                                              self.prod_name, self.repo_url)
        repo_list = hammer.hammer(
            'repository list --product {0} --organization-id {1}'.format(
                self.prod_name, self.org_id))
        self.assertEqual(
            self.repo_name,
            hammer.get_attribute_value(repo_list, self.repo_name, 'name'))
        print hammer.hammer_repository_synchronize(self.repo_name, self.org_id,
                                                   self.prod_name)
        print hammer.hammer_content_view_create(self.cv_name, self.org_id)
        print hammer.hammer_content_view_add_repository(
            self.cv_name, self.org_id, self.prod_name, self.repo_name)
        print hammer.hammer_content_view_publish(self.cv_name, self.org_id)
        cv_ver = hammer.get_latest_cv_version(self.cv_name)
        env_data = hammer.hammer(
            'lifecycle-environment list --organization-id {0} '
            '--name {1}'.format(self.org_id, self.env_name))
        env_id = hammer.get_attribute_value(env_data, self.env_name, 'id')
        print hammer.hammer_content_view_promote_version(
            self.cv_name, cv_ver, env_id, self.org_id)
        cap_host = os.environ.get('RHEV_CAP_HOST',
                                  os.environ.get('CAPSULE_HOSTNAME'))
        cap_data = hammer.hammer('capsule list')
        cap_id = hammer.get_attribute_value(cap_data, cap_host, 'id')
        cap_info = {'id': cap_id, 'name': cap_host}
        org_data = hammer.hammer('organization list')
        org_name = hammer.get_attribute_value(org_data, int(self.org_id),
                                              'name')
        print hammer.sync_capsule_content(cap_info, async=False)
        result = execute(
            lambda: run('[ -f /var/lib/pulp/published/yum/http/repos/'
                        '{0}/{1}/{2}/custom/{3}/{4}/{5} ]; echo $?'.format(
                            org_name, self.env_name, self.cv_name, self.
                            prod_name, self.repo_name, self.rpm_name)),
            host=cap_host)[cap_host]
        self.assertEqual('0', result)
class Scenario_manifest_refresh(TestCase):
    """The test class contains pre-upgrade and post-upgrade scenarios to test
    manifest refresh before and after upgrade

    Test Steps:

    1. Before Satellite upgrade, upload a manifest.
    2. Refresh the manifest.
    3. Upgrade satellite.
    4. Refresh the manifest.
    5. Delete the manifest.

    """
    manifest_url = os.environ.get('MANIFEST_URL')
    org_name = 'preupgrade_subscription_org'
    env.host_string = get_satellite_host()

    @pre_upgrade
    def test_pre_manifest_scenario_refresh(self):
        """Pre-upgrade scenario that upload and refresh manifest in satellite
         which will be refreshed in post upgrade scenario.


        :id: preupgrade-29b246aa-2c7f-49f4-870a-7a0075e184b1

        :steps:
            1. Before Satellite upgrade, upload and refresh manifest.

        :expectedresults: Manifest should upload and refresh successfully.
         """
        org = entities.Organization(name=self.org_name).create()
        upload_manifest(self.manifest_url, org.name)
        history = hammer.hammer('subscription manifest-history'
                                ' --organization {0}'.format(org.name))
        self.assertIn("{0} file imported successfully.".format(org.name),
                      history[0]['status message'])
        sub = entities.Subscription(organization=org)
        sub.refresh_manifest(data={'organization_id': org.id})
        self.assertGreater(len(sub.search()), 0)

    @post_upgrade
    def test_post_manifest_scenario_refresh(self):
        """Post-upgrade scenario that verifies manifest refreshed successfully
        and deleted successfully.

        :id: postupgrade-29b246aa-2c7f-49f4-870a-7a0075e184b1

        :steps:
            1. Refresh manifest
            2. Delete manifest

        :expectedresults:
            1. The manifest should refresh and delete successfully.
         """
        org = entities.Organization().search(
            query={'search': 'name={0}'.format(self.org_name)})[0]
        sub = entities.Subscription(organization=org)
        sub.refresh_manifest(data={'organization_id': org.id})
        self.assertGreater(len(sub.search()), 0)
        delete_manifest(self.org_name)
        history = hammer.hammer('subscription manifest-history'
                                ' --organization {0}'.format(self.org_name))
        self.assertIn(
            "Subscriptions deleted by foreman_admin".format(self.org_name),
            history[0]['status message'])
Example #7
0
"""Initializes of config to be used in Scenario Tests
"""
from automation_tools.satellite6.hammer import set_hammer_config
from upgrade_tests.helpers.scenarios import get_satellite_host
from nailgun.config import ServerConfig
from fabric.api import env

# Nailgun Config setup
sat_url = 'https://{}'.format(get_satellite_host())
ServerConfig(url=sat_url, auth=['admin', 'changeme'], verify=False).save()

# Fabric Config setup
env.user = '******'

# Hammer Config Setup
set_hammer_config()