예제 #1
0
    def test_positive_reinstall_on_same_node_after_remove(self):
        """Reinstall capsule on the same node after remove

        :id: fac35a44-0bc9-44e9-a2c3-398e1aa9900c

        :customerscenario: true

        :expectedresults: The capsule successfully reinstalled

        :BZ: 1327442

        :CaseLevel: System
        """
        # Note: capsule-remove has been replaced by katello-remove
        with CapsuleVirtualMachine() as capsule_vm:
            # ensure that capsule refresh-features succeed
            with self.assertNotRaises(CLIReturnCodeError):
                Capsule.refresh_features(
                    {'name': capsule_vm._capsule_hostname})
            # katello-remove command request to confirm by typing Y and then by
            # typing remove
            result = capsule_vm.run("printf 'Y\nremove\n' | katello-remove")
            self.assertEqual(result.return_code, 0)
            # ensure that capsule refresh-features fail
            with self.assertRaises(CLIReturnCodeError):
                Capsule.refresh_features(
                    {'name': capsule_vm._capsule_hostname})
            # reinstall katello certs as they have been removed
            capsule_vm.install_katello_ca()
            # refresh subscription
            capsule_vm.run('subscription-manager refresh')
            # install satellite-capsule package
            result = capsule_vm.run('yum install -y satellite-capsule')
            self.assertEqual(result.return_code, 0)
            # generate capsule certs and installer command
            cert_file_path = '/tmp/{0}-certs.tar'.format(capsule_vm.hostname)
            result = ssh.command('capsule-certs-generate '
                                 '--foreman-proxy-fqdn {0} '
                                 '--certs-tar {1}'.format(
                                     capsule_vm.hostname, cert_file_path))
            self.assertEqual(result.return_code, 0)
            # retrieve the installer command from the result output
            installer_cmd = extract_capsule_satellite_installer_command(
                result.stdout)
            # copy the generated certs to capsule vm
            _, temporary_local_cert_file_path = mkstemp(suffix='-certs.tar')
            ssh.download_file(remote_file=cert_file_path,
                              local_file=temporary_local_cert_file_path,
                              hostname=settings.server.hostname)
            ssh.upload_file(local_file=temporary_local_cert_file_path,
                            remote_file=cert_file_path,
                            hostname=capsule_vm.hostname)
            # delete the temporary file
            os.remove(temporary_local_cert_file_path)
            result = capsule_vm.run(installer_cmd, timeout=1500)
            self.assertEqual(result.return_code, 0)
            # ensure that capsule refresh-features succeed
            with self.assertNotRaises(CLIReturnCodeError):
                Capsule.refresh_features(
                    {'name': capsule_vm._capsule_hostname})
예제 #2
0
파일: vm.py 프로젝트: Ichimonji10/robottelo
 def get(self, remote_path, local_path=None):
     """Get a remote file from the virtual machine."""
     if not self._created:
         raise VirtualMachineError(
             'The virtual machine should be created before getting any file'
         )
     ssh.download_file(remote_path, local_path, hostname=self.ip_addr)
예제 #3
0
def test_positive_logging_from_foreman_proxy():
    """Check PUT to Smart Proxy API to refresh the features is logged and has request ID.

    :id: 0ecd8406-6cf1-4520-b8b6-8a164a1e60c2

    :expectedresults: line of log with PUT has request ID

    :CaseImportance: Medium
    """

    PUT_line_found = False
    request_id = None
    source_log_1 = '/var/log/foreman/production.log'
    test_logfile_1 = '/var/tmp/logfile_1_from_proxy'
    source_log_2 = '/var/log/foreman-proxy/proxy.log'
    test_logfile_2 = '/var/tmp/logfile_2_from_proxy'
    with ssh.get_connection() as connection:
        # get the number of lines in the source logs before the test
        line_count_start_1 = line_count(source_log_1, connection)
        line_count_start_2 = line_count(source_log_2, connection)
        # hammer command for this test
        result = connection.run('hammer proxy refresh-features --id 1')
        assert result.return_code == 0, "BASH command error?"
        # get the number of lines in the source logs after the test
        line_count_end_1 = line_count(source_log_1, connection)
        line_count_end_2 = line_count(source_log_2, connection)
        # get the log lines of interest, put them in test_logfile_1
        cut_lines(line_count_start_1, line_count_end_1, source_log_1,
                  test_logfile_1, connection)
        # get the log lines of interest, put them in test_logfile_2
        cut_lines(line_count_start_2, line_count_end_2, source_log_2,
                  test_logfile_2, connection)
    # use same location on remote and local for log file extract
    ssh.download_file(test_logfile_1)
    # use same location on remote and local for log file extract
    ssh.download_file(test_logfile_2)
    # search the log file extract for the line with PUT to host API
    with open(test_logfile_1) as logfile:
        for line in logfile:
            if re.search(r'Started PUT \"\/api\/smart_proxies\/1\/refresh',
                         line):
                logger.info('Found the line with PUT to foreman proxy API')
                PUT_line_found = True
                # Confirm the request ID was logged in the line with PUT
                match = re.search(r'\[I\|app\|\w{8}\]', line)
                assert match, "Request ID not found"
                logger.info("Request ID found for logging from foreman proxy")
                p = re.compile(r"\w{8}")
                result = p.search(line)
                request_id = result.group(0)
                break
    assert PUT_line_found, "The PUT command to refresh proxies was not found in logs."
    # search the local copy of proxy.log file for the same request ID
    with open(test_logfile_2) as logfile:
        for line in logfile:
            # Confirm request ID was logged in proxy.log
            match = line.find(request_id)
            assert match, "Request ID not found in proxy.log"
            logger.info("Request ID also found in proxy.log")
            break
예제 #4
0
파일: vm.py 프로젝트: vikas86/robottelo
 def get(self, remote_path, local_path=None):
     """Get a remote file from the virtual machine."""
     if not self._created:
         raise VirtualMachineError(
             'The virtual machine should be created before getting any file'
         )
     ssh.download_file(remote_path, local_path, hostname=self.ip_addr)
예제 #5
0
 def _read_config_file(self):
     # Read the virt-who-<id> config file)
     local_path = tempfile.mkstemp(suffix=self.config_file_name)[1]
     ssh.download_file(self.remote_path, local_path, hostname=self.server)
     parser = ConfigParser()
     with open(local_path) as local_fp:
         parser.read_file(local_fp)
     return parser
예제 #6
0
def test_positive_inventory_generate_upload_cli(organization_ak_setup, registered_hosts):
    """Tests Insights inventory generation and upload via foreman-rake commands:
    https://github.com/theforeman/foreman_rh_cloud/blob/master/README.md

    :id: f2da9506-97d4-4d1c-b373-9f71a52b8ab8

    :Steps:

        0. Create a VM and register to insights within org having manifest.

        1. Generate and upload report for all organizations
            # /usr/sbin/foreman-rake rh_cloud_inventory:report:generate_upload

        2. Generate and upload report for specific organization
            # export organization_id=1
            # /usr/sbin/foreman-rake rh_cloud_inventory:report:generate_upload

        3. Generate report for specific organization (don't upload)
            # export organization_id=1
            # export target=/var/lib/foreman/red_hat_inventory/generated_reports/
            # /usr/sbin/foreman-rake rh_cloud_inventory:report:generate

        4. Upload previously generated report
            (needs to be named 'report_for_#{organization_id}.tar.gz')
            # export organization_id=1
            # export target=/var/lib/foreman/red_hat_inventory/generated_reports/
            # /usr/sbin/foreman-rake rh_cloud_inventory:report:upload


    :expectedresults: Inventory is generated and uploaded to cloud.redhat.com.

    :CaseAutomation: Automated

    :CaseLevel: System
    """
    org, ak = organization_ak_setup
    cmd = f'organization_id={org.id} foreman-rake rh_cloud_inventory:report:generate_upload'
    upload_success_msg = f'Generated and uploaded inventory report for organization \'{org.name}\''
    result = ssh.command(cmd)
    assert result.return_code == 0
    assert result.stdout[0] == upload_success_msg

    local_report_path = f'/tmp/report_for_{org.id}.tar.xz'
    remote_report_path = (
        f'/var/lib/foreman/red_hat_inventory/uploads/done/report_for_{org.id}.tar.xz'
    )
    ssh.download_file(remote_report_path, local_report_path)
    local_file_data = get_local_file_data(local_report_path)
    assert local_file_data['checksum'] == get_remote_report_checksum(org.id)
    assert local_file_data['size'] > 0
    assert local_file_data['extractable']
    assert local_file_data['json_files_parsable']

    slices_in_metadata = set(local_file_data['metadata_counts'].keys())
    slices_in_tar = set(local_file_data['slices_counts'].keys())
    assert slices_in_metadata == slices_in_tar
    for slice_name, hosts_count in local_file_data['metadata_counts'].items():
        assert hosts_count == local_file_data['slices_counts'][slice_name]
예제 #7
0
    def test_positive_validate_capsule_certificate(self, file_setup):
        """Check that Capsules cert handles additional proxy names.

        :id: 8b53fc3d-704f-44f4-899e-74654529bfcf

        :customerscenario: true

        :steps:

            1. Generate a Capsule certificate
            2. Confirm proxy server's FQDN for DNS is present
            3. Confirm that format of alternative names does not include []

        :expectedresults: Capsule certs has valid DNS values

        :BZ: 1747581

        :CaseAutomation: Automated
        """
        DNS_Check = False
        with get_connection(timeout=200) as connection:
            # extract the cert from the tar file
            result = connection.run(
                'tar -xf {0}/capsule_certs.tar --directory {0}/ '.format(file_setup['tmp_dir'])
            )
            assert result.return_code == 0, 'Extraction to working directory failed.'
            # Extract raw data from RPM to a file
            result = connection.run(
                'rpm2cpio {0}/ssl-build/{1}/'
                '{1}-qpid-router-server*.rpm'
                '>> {0}/ssl-build/{1}/cert-raw-data'.format(
                    file_setup['tmp_dir'], file_setup['capsule_hostname']
                )
            )
            # Extract the cert data from file cert-raw-data and write to cert-data
            result = connection.run(
                'openssl x509 -noout -text -in {0}/ssl-build/{1}/cert-raw-data'
                '>> {0}/ssl-build/{1}/cert-data'.format(
                    file_setup['tmp_dir'], file_setup['capsule_hostname']
                )
            )
            # use same location on remote and local for cert_file
            download_file(file_setup['caps_cert_file'])
            # search the file for the line with DNS
            with open(file_setup['caps_cert_file']) as file:
                for line in file:
                    if re.search(r'\bDNS:', line):
                        match = re.search(r'{}'.format(file_setup['capsule_hostname']), line)
                        assert match, "No proxy name found."
                        if is_open('BZ:1747581'):
                            DNS_Check = True
                        else:
                            match = re.search(r'\[]', line)
                            assert not match, "Incorrect parsing of alternative proxy name."
                            DNS_Check = True
                        break
                    # if no match for "DNS:" found, then raise error.
            assert DNS_Check, "Cannot find Subject Alternative Name"
예제 #8
0
def test_positive_schedule_generation_and_get_mail(session, module_org,
                                                   module_loc):
    """Schedule generating a report. Request the result be sent via e-mail.

    :id: cd19b90d-836f-4efd-c3bc-d5e09a909a67
    :setup: User with reporting access rights, some Host
    :steps:
        1. Monitor -> Report Templates
        2. Host - Registered Content Hosts -> Generate
        3. Set schedule to current time + 1 minute
        4. Check that the result should be sent via e-mail
        5. Submit
        6. Receive the e-mail
    :expectedresults: After ~1 minute, the same report is generated as if
                      the results were downloaded from WebUI.
                      The result is compressed.
    :CaseImportance: High
    """
    # generate Subscriptions report
    with session:
        session.reporttemplate.schedule(
            'Subscription - General Report',
            values={
                'output_format': 'JSON',
                'generate_at': '1970-01-01 17:10:00',
                'email': True,
                'email_to': 'root@localhost',
            },
        )
    file_path = '/tmp/{}.json'.format(gen_string('alpha'))
    gzip_path = f'{file_path}.gz'
    expect_script = (f'#!/usr/bin/env expect\n'
                     f'spawn mail\n'
                     f'expect "& "\n'
                     f'send "w $ /dev/null\\r"\n'
                     f'expect "Enter filename"\n'
                     f'send "\\r"\n'
                     f'expect "Enter filename"\n'
                     f'send "\\r"\n'
                     f'expect "Enter filename"\n'
                     f'send "\\025{gzip_path}\\r"\n'
                     f'expect "&"\n'
                     f'send "q\\r"\n')
    ssh.command(f'expect -c \'{expect_script}\'',
                hostname=settings.server.hostname)
    ssh.download_file(gzip_path)
    os.system(f'gunzip {gzip_path}')
    with open(file_path) as json_file:
        data = json.load(json_file)
    assert len(data) >= len(
        entities.Subscription(organization=module_org).search()) > 0
    keys_expected = [
        'Available', 'Contract number', 'ID', 'Name', 'Quantity', 'SKU'
    ]
    for subscription in data:
        assert sorted(list(subscription.keys())) == keys_expected
예제 #9
0
파일: log.py 프로젝트: BlackSmith/robottelo
    def __init__(self, remote_path, pattern=None):
        self.remote_path = remote_path
        self.pattern = pattern

        if not os.path.isdir(LOGS_DATA_DIR):
            os.makedirs(LOGS_DATA_DIR)
        self.local_path = os.path.join(LOGS_DATA_DIR,
                                       os.path.basename(remote_path))
        ssh.download_file(remote_path, self.local_path)
        with open(self.local_path) as file_:
            self.data = file_.readlines()
예제 #10
0
    def __init__(self, remote_path, pattern=None):
        self.remote_path = remote_path
        self.pattern = pattern

        if not os.path.isdir(LOGS_DATA_DIR):
            os.makedirs(LOGS_DATA_DIR)
        self.local_path = os.path.join(LOGS_DATA_DIR,
                                       os.path.basename(remote_path))
        ssh.download_file(remote_path, self.local_path)
        with open(self.local_path) as file_:
            self.data = file_.readlines()
예제 #11
0
    def test_positive_logging_from_candlepin(self):
        """Check logging after manifest upload.

        :id: 8c06e501-52d7-4baf-903e-7de9caffb066

        :expectedresults: line of logs with POST has request ID

        :CaseImportance: Medium
        """

        POST_line_found = False
        source_log = '/var/log/candlepin/candlepin.log'
        test_logfile = '/var/tmp/logfile_from_candlepin'
        # regex for a version 4 UUID (8-4-4-12 format)
        regex = r"\b[0-9a-f]{8}\b-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-\b[0-9a-f]{12}\b"
        with ssh.get_connection() as connection:
            # get the number of lines in the source log before the test
            line_count_start = line_count(source_log, connection)
            # command for this test
            with manifests.clone() as manifest:
                upload_file(manifest.content, manifest.filename)
                Subscription.upload({
                    'file': manifest.filename,
                    'organization-id': self.org['id']
                })
            # get the number of lines in the source log after the test
            line_count_end = line_count(source_log, connection)
            # get the log lines of interest, put them in test_logfile
            cut_lines(line_count_start, line_count_end, source_log,
                      test_logfile, connection)
        # use same location on remote and local for log file extract
        ssh.download_file(test_logfile)
        # search the log file extract for the line with POST to candlepin API
        with open(test_logfile) as logfile:
            for line in logfile:
                if re.search(r'verb=POST, uri=/candlepin/owners/{0}',
                             line.format(self.org['name'])):
                    self.logger.info(
                        'Found the line with POST to candlepin API')
                    POST_line_found = True
                    # Confirm the request ID was logged in the line with POST
                    match = re.search(regex, line)
                    assert match, "Request ID not found"
                    self.logger.info(
                        "Request ID found for logging from candlepin")
                    break
        assert POST_line_found, "The POST command to candlepin was not found in logs."
예제 #12
0
def test_positive_logging_from_dynflow(module_org):
    """Check POST to repositories API is logged while enabling a repo \
        and it has the request ID.

    :id: 2d1a5f64-0b1c-4f95-ad20-881134717c4c

    :expectedresults: line of log with POST has request ID

    :CaseImportance: Medium
    """

    POST_line_found = False
    source_log = '/var/log/foreman/production.log'
    test_logfile = '/var/tmp/logfile_dynflow'
    product = entities.Product(organization=module_org).create()
    repo_name = gen_string('alpha')
    with ssh.get_connection() as connection:
        # get the number of lines in the source log before the test
        line_count_start = line_count(source_log, connection)
        # command for this test
        new_repo = entities.Repository(name=repo_name,
                                       product=product).create()
        logger.info(f'Created Repo {new_repo.name} for dynflow log test')
        # get the number of lines in the source log after the test
        line_count_end = line_count(source_log, connection)
        # get the log lines of interest, put them in test_logfile
        cut_lines(line_count_start, line_count_end, source_log, test_logfile,
                  connection)
    # use same location on remote and local for log file extract
    ssh.download_file(test_logfile)
    # search the log file extract for the line with POST to to repositories API
    with open(test_logfile) as logfile:
        for line in logfile:
            if re.search(r'Started POST \"/katello\/api\/v2\/repositories',
                         line):
                logger.info('Found the line with POST to repositories API.')
                POST_line_found = True
                # Confirm the request ID was logged in the line with POST
                match = re.search(r'\[I\|app\|\w{8}\]', line)
                assert match, "Request ID not found"
                logger.info("Request ID found for logging from dynflow ")
    assert POST_line_found, "The POST command to enable a repo was not found in logs."
예제 #13
0
    def csv_to_dataset(csv_files):
        """Process and return remote CSV files.

        Read the remote CSV files, and return a list of dictionaries for them

        :param csv_files: A list of strings, where each string is a path to
            a CSV file on the remote server.
        :returns: A list of dict, where each dict holds the contents of one CSV
            file.

        """
        result = []
        for file in csv_files:
            temp_file = tempfile.mkstemp()[1]
            ssh.download_file(file, temp_file)
            with open(temp_file, "rb") as file:
                reader = csv.DictReader(file)
                result.extend([{key.decode("utf8"): val.decode("utf8") for key, val in row.items()} for row in reader])
            os.remove(temp_file)
        return result
예제 #14
0
    def test_positive_logging_from_foreman_core(self):
        """Check that GET command to Hosts API is logged and has request ID.

        :id: 0785260d-cb81-4351-a7cb-d7841335e2de

        :expectedresults: line of log with GET has request ID

        :CaseImportance: Medium
        """

        GET_line_found = False
        source_log = '/var/log/foreman/production.log'
        test_logfile = '/var/tmp/logfile_from_foreman_core'
        with ssh.get_connection() as connection:
            # get the number of lines in the source log before the test
            line_count_start = line_count(source_log, connection)
            # hammer command for this test
            result = connection.run('hammer host list')
            self.assertEqual(result.return_code, 0, "BASH command error?")
            # get the number of lines in the source log after the test
            line_count_end = line_count(source_log, connection)
            # get the log lines of interest, put them in test_logfile
            cut_lines(line_count_start, line_count_end, source_log,
                      test_logfile, connection)
        # use same location on remote and local for log file extract
        ssh.download_file(test_logfile)
        # search the log file extract for the line with GET to host API
        with open(test_logfile) as logfile:
            for line in logfile:
                if re.search(r'Started GET \"\/api/hosts\?page=1', line):
                    self.logger.info('Found the line with GET to hosts API')
                    GET_line_found = True
                    # Confirm the request ID was logged in the line with GET
                    match = re.search(r'\[I\|app\|\w{8}\]', line)
                    assert match, "Request ID not found"
                    self.logger.info(
                        "Request ID found for logging from foreman core")
                    break
        assert GET_line_found, "The GET command to list hosts was not found in logs."
예제 #15
0
    def csv_to_dataset(csv_files):
        """Process and return remote CSV files.

        Read the remote CSV files, and return a list of dictionaries for them

        :param csv_files: A list of strings, where each string is a path to
            a CSV file on the remote server.
        :returns: A list of dict, where each dict holds the contents of one CSV
            file.

        """
        result = []
        for file in csv_files:
            temp_file = tempfile.mkstemp()[1]
            ssh.download_file(file, temp_file)
            with open(temp_file, 'rb') as file:
                reader = csv.DictReader(file)
                result.extend([{
                    key.decode('utf8'): val.decode('utf8')
                    for key, val in row.items()
                } for row in reader])
            os.remove(temp_file)
        return result
예제 #16
0
    def test_post_performance_tuning_apply(self):
        """In postupgrade scenario, we verify the set tuning parameters and custom-hiera.yaml
        file's content.

        :id: postupgrade-31e26b08-2157-11ea-9223-001a4a1601d8

        :steps:
            1: Download the custom-hiera.yaml after upgrade from upgraded setup.
            2: Compare it with the medium tune custom-hiera file.
            3. Check the tune settings in scenario.yaml file, it should be set as
            "default" with updated medium tune parameters.
            4. Upload the default custom-hiera.yaml file on the upgrade setup.
            5. Run the satellite installer with "default" tune argument(satellite-installer
            --tuning default -s --disable-system-checks).
            6. If something gets wrong with the default tune parameters then we restore the
            default original tune parameter.

        :expectedresults: medium tune parameter should be unchanged after upgrade.

         """
        cmd = ('grep "mongodb::server::storage_engine: \'wiredTiger\'" '
               '/etc/foreman-installer/custom-hiera.yaml')
        mongodb_type = ssh.command(cmd).return_code
        try:
            self._create_custom_hiera_file(mongodb_type, "medium")
            ssh.download_file(
                local_file="custom-hiera-after-upgrade.yaml",
                remote_file="/etc/foreman-installer/custom-hiera.yaml",
            )
            assert filecmp.cmp("custom-hiera.yaml",
                               "custom-hiera-after-upgrade.yaml")

            cmd = 'grep "tuning: default" /etc/foreman-installer/scenarios.d/satellite.yaml'
            tuning_state_after_upgrade = ssh.command(cmd).return_code
            assert tuning_state_after_upgrade == 0

            expected_tune_size, actual_tune_size = self._data_creation_of_set_tune_params(
                MEDIUM_TUNING_DATA, TUNE_DATA_COLLECTION_REGEX,
                MEDIUM_TUNE_PARAM_GROUPS)

            for key, value in actual_tune_size.items():
                assert expected_tune_size[key] == value

            self._create_custom_hiera_file(mongodb_type)
            ssh.upload_file(
                local_file='custom-hiera.yaml',
                remote_file='/etc/foreman-installer/custom-hiera.yaml',
            )
            command_output = ssh.command(
                'satellite-installer --tuning default -s --disable-system-checks',
                timeout=1000,
            )
            command_status = [
                status.strip() for status in command_output.stdout
            ]
            assert 'Success!' in command_status
        finally:
            self._create_custom_hiera_file(mongodb_type)
            ssh.upload_file(
                local_file='custom-hiera.yaml',
                remote_file='/etc/foreman-installer/custom-hiera.yaml',
            )
            os.remove("custom-hiera.yaml")
            command_output = ssh.command(
                'satellite-installer -s --disable-system-checks', timeout=1000)
            command_status = [
                status.strip() for status in command_output.stdout
            ]
            assert 'Success!' in command_status
예제 #17
0
    def _setup_capsule(self):
        """Prepare the virtual machine to host a capsule node"""
        # setup the name resolution
        self._capsule_setup_name_resolution()
        logger.info('adding repofiles required for capsule installation')
        self.create_custom_repos(
            capsule=settings.capsule_repo,
            rhscl=settings.rhscl_repo,
            ansible=settings.ansible_repo,
            maint=settings.satmaintenance_repo
        )
        self.configure_rhel_repo(settings.__dict__[self.distro[:-1] + '_repo'])
        self.run('yum repolist')
        self.run('yum -y install satellite-capsule', timeout=900)
        result = self.run('rpm -q satellite-capsule')
        if result.return_code != 0:
            raise CapsuleVirtualMachineError(
                u'Failed to install satellite-capsule package\n{}'.format(
                    result.stderr)
            )
        cert_file_path = '/tmp/{0}-certs.tar'.format(self.hostname)
        certs_gen = ssh.command(
            'capsule-certs-generate '
            '--foreman-proxy-fqdn {0} '
            '--certs-tar {1}'
            .format(self.hostname, cert_file_path)
        )
        if certs_gen.return_code != 0:
            raise CapsuleVirtualMachineError(
                u'Unable to generate certificate\n{}'
                .format(certs_gen.stderr)
            )
        # copy the certificate to capsule vm
        _, temporary_local_cert_file_path = mkstemp(suffix='-certs.tar')
        logger.info(
            'downloading the certs file: {0}'.format(cert_file_path)
        )
        download_file(
            remote_file=cert_file_path,
            local_file=temporary_local_cert_file_path,
            hostname=settings.server.hostname
        )
        logger.info(
            'uploading the certs file: {0}'.format(cert_file_path)
        )
        upload_file(
            key_filename=settings.server.ssh_key,
            local_file=temporary_local_cert_file_path,
            remote_file=cert_file_path,
            hostname=self.ip_addr
        )
        # delete the temporary file
        os.remove(temporary_local_cert_file_path)

        installer_cmd = extract_capsule_satellite_installer_command(
                            certs_gen.stdout
                        )
        if bz_bug_is_open(1458749):
            if '--scenario foreman-proxy-content' in installer_cmd:
                installer_cmd = installer_cmd.replace(
                     '--scenario foreman-proxy-content', '--scenario capsule')
        result = self.run(installer_cmd, timeout=1500)
        if result.return_code != 0:
            # before exit download the capsule log file
            _, log_path = mkstemp(prefix='capsule_external-', suffix='.log')
            download_file(
                '/var/log/foreman-installer/capsule.log',
                log_path,
                self.ip_addr
            )
            raise CapsuleVirtualMachineError(
                result.return_code, result.stderr,
                u'foreman installer failed at capsule host')

        # manually start pulp_celerybeat service if BZ1446930 is open
        result = self.run('systemctl status pulp_celerybeat.service')
        if 'inactive (dead)' in '\n'.join(result.stdout):
            if bz_bug_is_open(1446930):
                result = self.run('systemctl start pulp_celerybeat.service')
                if result.return_code != 0:
                    raise CapsuleVirtualMachineError(
                        'Failed to start pulp_celerybeat service\n{}'.format(
                            result.stderr)
                    )
            else:
                raise CapsuleVirtualMachineError(
                    'pulp_celerybeat service not running')
예제 #18
0
def oscap_content_path():
    _, file_name = os.path.split(settings.oscap.content_path)
    local_file = f"/tmp/{file_name}"
    ssh.download_file(settings.oscap.content_path, local_file)
    return local_file
예제 #19
0
def oscap_content_path():
    """ Download scap content from satellite and return local path of it."""
    _, file_name = os.path.split(settings.oscap.content_path)
    local_file = f"/tmp/{file_name}"
    ssh.download_file(settings.oscap.content_path, local_file)
    return local_file
예제 #20
0
    def _setup_capsule(self):
        """Prepare the virtual machine to host a capsule node"""
        # setup the name resolution
        self._capsule_setup_name_resolution()
        logger.info('adding repofiles required for capsule installation')
        self.create_custom_repos(capsule=settings.capsule_repo,
                                 rhscl=settings.rhscl_repo,
                                 ansible=settings.ansible_repo,
                                 maint=settings.satmaintenance_repo)
        self.configure_rhel_repo(settings.__dict__[self.distro[:-1] + '_repo'])
        self.run('yum repolist')
        self.run('yum -y install satellite-capsule', timeout=1200)
        result = self.run('rpm -q satellite-capsule')
        if result.return_code != 0:
            raise CapsuleVirtualMachineError(
                u'Failed to install satellite-capsule package\n{}'.format(
                    result.stderr))
        cert_file_path = '/root/{0}-certs.tar'.format(self.hostname)
        certs_gen = ssh.command('capsule-certs-generate '
                                '--foreman-proxy-fqdn {0} '
                                '--certs-tar {1}'.format(
                                    self.hostname, cert_file_path))
        if certs_gen.return_code != 0:
            raise CapsuleVirtualMachineError(
                u'Unable to generate certificate\n{}'.format(certs_gen.stderr))
        # copy the certificate to capsule vm
        _, temporary_local_cert_file_path = mkstemp(suffix='-certs.tar')
        logger.info('downloading the certs file: {0}'.format(cert_file_path))
        download_file(remote_file=cert_file_path,
                      local_file=temporary_local_cert_file_path,
                      hostname=settings.server.hostname)
        logger.info('uploading the certs file: {0}'.format(cert_file_path))
        upload_file(key_filename=settings.server.ssh_key,
                    local_file=temporary_local_cert_file_path,
                    remote_file=cert_file_path,
                    hostname=self.ip_addr)
        # delete the temporary file
        os.remove(temporary_local_cert_file_path)

        installer_cmd = extract_capsule_satellite_installer_command(
            certs_gen.stdout)
        result = self.run(installer_cmd, timeout=1800)
        if result.return_code != 0:
            # before exit download the capsule log file
            _, log_path = mkstemp(prefix='capsule_external-', suffix='.log')
            download_file('/var/log/foreman-installer/capsule.log', log_path,
                          self.ip_addr)
            raise CapsuleVirtualMachineError(
                result.return_code, result.stderr,
                u'foreman installer failed at capsule host')

        # manually start pulp_celerybeat service if BZ1446930 is open
        result = self.run('systemctl status pulp_celerybeat.service')
        if 'inactive (dead)' in '\n'.join(result.stdout):
            if is_open('BZ:1446930'):
                result = self.run('systemctl start pulp_celerybeat.service')
                if result.return_code != 0:
                    raise CapsuleVirtualMachineError(
                        'Failed to start pulp_celerybeat service\n{}'.format(
                            result.stderr))
            else:
                raise CapsuleVirtualMachineError(
                    'pulp_celerybeat service not running')
예제 #21
0
    def test_positive_restore_ak_with_modified_subscription(self):
        """Restore activation key subscription from an exported activation key
        csv file with modified subscription.

        :id: 40b86d1c-88f8-451c-bf19-c5bf11223cb6

        :steps:
            1. Upload a manifest
            2. Create an activation key
            3. Attach RH subscription to the created activation key
            4. Export the organization activation keys to a csv file
            5. Create a new csv file and modify the subscription with an other
               one (the new subscription must have other data than the default
               one)
            6. Import the new csv file to organization activation keys

        :expectedresults: activation key restored with the new subscription

        :BZ: 1296978

        :CaseImportance: Critical
        """
        # upload the organization default manifest
        self._upload_manifest(self.org['id'])
        # Export and download the organization subscriptions to prepare the new
        # subscription (The replacement of the default subscription)
        org_subs_csv_filename = 'subs_{0}.csv'.format(self.org['name'])
        org_subs_csv_remote_file_path = '/tmp/{0}'.format(
            org_subs_csv_filename)
        # export organization subscription to csv file
        CSV_.subscriptions({
            'export': True,
            'file': org_subs_csv_remote_file_path,
            'organization': self.org['name'],
        })
        # download the organization subscriptions
        org_subs_csv_local_file_path = os.path.join(tempfile.gettempdir(),
                                                    org_subs_csv_filename)
        download_file(org_subs_csv_remote_file_path,
                      org_subs_csv_local_file_path)
        _, org_subscriptions = self._read_csv_file(
            org_subs_csv_local_file_path)
        new_subscription = None
        for sub in org_subscriptions:
            if sub['Subscription Name'] == SATELLITE_SUBSCRIPTION_NAME:
                new_subscription = sub
                break
        self.assertIsNotNone(new_subscription)
        # Create an activation key and add the default subscription
        activation_key = make_activation_key({
            'organization-id': self.org['id'],
        })
        activationkey_add_subscription_to_repo({
            'organization-id':
            self.org['id'],
            'activationkey-id':
            activation_key['id'],
            'subscription':
            DEFAULT_SUBSCRIPTION_NAME,
        })
        org_subs = Subscription.list({u'organization-id': self.org['id']})
        default_subscription_id = None
        for sub in org_subs:
            if sub['name'] == DEFAULT_SUBSCRIPTION_NAME:
                default_subscription_id = sub['id']
                break
        self.assertIsNotNone(default_subscription_id,
                             msg='Default subscription not found')
        ak_subs = ActivationKey.subscriptions(
            {
                'organization-id': self.org['id'],
                'id': activation_key['id'],
            },
            output_format='json')
        self.assertEqual(len(ak_subs), 1)
        self.assertEqual(ak_subs[0]['name'], DEFAULT_SUBSCRIPTION_NAME)
        # export activation key data to csv file
        ak_export_file_name = 'ak_{0}_{1}_export.csv'.format(
            self.org['name'], activation_key['name'])
        ak_remote_export_file_path = '/tmp/{0}'.format(ak_export_file_name)
        CSV_.activation_keys({
            'export': True,
            'file': ak_remote_export_file_path,
            'organization': self.org['name'],
        })
        # download the file to local temp dir
        ak_local_export_file_path = os.path.join(tempfile.gettempdir(),
                                                 ak_export_file_name)
        download_file(ak_remote_export_file_path,
                      local_file=ak_local_export_file_path)
        # modify the file with new subscription data and upload it
        field_names, csv_ak_data = self._read_csv_file(
            ak_local_export_file_path)
        self.assertEqual(len(csv_ak_data), 1)
        csv_ak_data = csv_ak_data[0]
        field_names = csv_ak_data.keys()
        self.assertIn('Subscriptions', field_names)
        self.assertIn('Subscriptions', csv_ak_data)
        # The subscription is saved in the following format:
        # """<quantity>|<sku>|<name>|<contract>|<account>"""
        subscription_data = csv_ak_data['Subscriptions'].strip('"').split('|')
        # change the subscription SKU (looks like RH00001)
        subscription_data[1] = new_subscription['Subscription SKU']
        # change the name
        subscription_data[2] = new_subscription['Subscription Name']
        # change the contract number
        subscription_data[3] = new_subscription['Subscription Contract']
        # change the subscription account
        subscription_data[4] = new_subscription['Subscription Account']
        # modify the subscription data and generate a new csv file
        csv_ak_data['Subscriptions'] = '"{0}"'.format(
            '|'.join(subscription_data))
        ak_import_file_name = 'ak_{0}_{1}_import.csv'.format(
            self.org['name'], activation_key['name'])
        ak_local_import_file_path = os.path.join(tempfile.gettempdir(),
                                                 ak_import_file_name)
        self._write_csv_file(ak_local_import_file_path, field_names,
                             [csv_ak_data])
        # upload the generated file
        ak_remote_import_file_path = '/tmp/{0}'.format(ak_import_file_name)
        upload_file(ak_local_import_file_path, ak_remote_import_file_path)
        # import the generated csv file
        CSV_.activation_keys({
            'file': ak_remote_import_file_path,
            'organization': self.org['name'],
        })
        ak_subs = ActivationKey.subscriptions(
            {
                'organization-id': self.org['id'],
                'id': activation_key['id'],
            },
            output_format='json')
        self.assertEqual(len(ak_subs), 1)
        self.assertEqual(ak_subs[0]['name'], SATELLITE_SUBSCRIPTION_NAME)
        self.assertEqual(ak_subs[0]['contract'],
                         new_subscription['Subscription Contract'])
        self.assertEqual(ak_subs[0]['account'],
                         new_subscription['Subscription Account'])
예제 #22
0
    def test_positive_restore_content_hosts_with_modified_subscription(self):
        """Restore content hosts subscription from an exported content host csv
        file with modified subscription.

        :id: d8ac08fe-24e0-41e7-b3d8-0ca13a702a64

        :customerscenario: true

        :steps:
            1. Setup activation key , lifecycle environment and content view
               with RH tools repository
            2. Setup hosts (minimum two) and subscribe them to activation key
            3. Attach RH subscription to the created content hosts
            4. Export the organization content hosts to a csv file
            5. Create a new csv file and modify the subscription with an other
               one (the new subscription must have other data than the default
               one)
            6. Import the new csv file to organization content hosts

        :expectedresults: content hosts restored with the new subscription

        :BZ: 1296978

        :CaseImportance: Critical
        """
        lce = make_lifecycle_environment({'organization-id': self.org['id']})
        activation_key = make_activation_key({
            'organization-id':
            self.org['id'],
            'lifecycle-environment-id':
            lce['id'],
        })
        ActivationKey.update({
            'organization-id': self.org['id'],
            'id': activation_key['id'],
            'auto-attach': 'false',
        })
        # Create RH tools repository and contents, this step should upload
        # the default manifest
        setup_org_for_a_rh_repo(
            {
                'product': PRDS['rhel'],
                'repository-set': REPOSET['rhst7'],
                'repository': REPOS['rhst7']['name'],
                'organization-id': self.org['id'],
                'lifecycle-environment-id': lce['id'],
                'activationkey-id': activation_key['id'],
            },
            force_use_cdn=True)
        # Export and download the organization subscriptions to prepare the new
        # subscription (The replacement of the default subscription)
        org_subs_csv_filename = 'subs_{0}.csv'.format(self.org['name'])
        org_subs_csv_remote_file_path = '/tmp/{0}'.format(
            org_subs_csv_filename)
        # export organization subscription to csv file
        CSV_.subscriptions({
            'export': True,
            'file': org_subs_csv_remote_file_path,
            'organization': self.org['name'],
        })
        # download the organization subscriptions
        org_subs_csv_local_file_path = os.path.join(tempfile.gettempdir(),
                                                    org_subs_csv_filename)
        download_file(org_subs_csv_remote_file_path,
                      org_subs_csv_local_file_path)
        _, org_subscriptions = self._read_csv_file(
            org_subs_csv_local_file_path)
        new_subscription = None
        for sub in org_subscriptions:
            if sub['Subscription Name'] == SATELLITE_SUBSCRIPTION_NAME:
                new_subscription = sub
                break
        self.assertIsNotNone(new_subscription)
        # retrieve the default subscription id
        org_subs = Subscription.list({u'organization-id': self.org['id']})
        default_subscription_id = None
        for sub in org_subs:
            if sub['name'] == DEFAULT_SUBSCRIPTION_NAME:
                default_subscription_id = sub['id']
                break
        self.assertIsNotNone(default_subscription_id,
                             msg='Default subscription not found')
        # create 2 Virtual machines
        with VirtualMachine() as client1, VirtualMachine() as client2:
            hosts = []
            for client in [client1, client2]:
                client.install_katello_ca()
                client.register_contenthost(
                    self.org['label'], activation_key=activation_key['name'])
                self.assertTrue(client.subscribed)
                host = Host.info({'name': client.hostname})
                hosts.append(host)
                Host.subscription_attach({
                    'host-id':
                    host['id'],
                    'subscription-id':
                    default_subscription_id,
                })
                host_subscriptions = ActivationKey.subscriptions(
                    {
                        'organization-id': self.org['id'],
                        'id': activation_key['id'],
                        'host-id': host['id'],
                    },
                    output_format='json')
                self.assertEqual(len(host_subscriptions), 1)
                self.assertEqual(host_subscriptions[0]['name'],
                                 DEFAULT_SUBSCRIPTION_NAME)
            # export the content host data to csv file
            chs_export_file_name = 'chs_export_{0}.csv'.format(
                self.org['label'])
            chs_export_remote_file_path = (
                '/tmp/{0}'.format(chs_export_file_name))
            CSV_.content_hosts({
                'export': True,
                'file': chs_export_remote_file_path,
                'organization': self.org['name'],
            })
            # download the csv file
            chs_export_local_file_path = os.path.join(tempfile.gettempdir(),
                                                      chs_export_file_name)
            download_file(chs_export_remote_file_path,
                          chs_export_local_file_path)
            # modify the content hosts subscription
            field_names, csv_data = self._read_csv_file(
                chs_export_local_file_path)
            # each client is represented by one row of data
            self.assertEqual(len(csv_data), 2)
            for row_data in csv_data:
                # The subscription is saved in the following format:
                # """<quantity>|<sku>|<name>|<contract>|<account>"""
                subscription_data = row_data['Subscriptions'].strip('"').split(
                    '|')
                # change the subscription SKU (looks like RH00001)
                subscription_data[1] = new_subscription['Subscription SKU']
                # change the name
                subscription_data[2] = new_subscription['Subscription Name']
                # change the contract number
                subscription_data[3] = new_subscription[
                    'Subscription Contract']
                # change the subscription account
                subscription_data[4] = new_subscription['Subscription Account']
                # modify the subscription data
                row_data['Subscriptions'] = '"{0}"'.format(
                    '|'.join(subscription_data))
            # generate a new csv file
            chs_import_file_name = 'chs_import_{0}.csv'.format(
                self.org['name'])
            chs_import_local_file_path = os.path.join(tempfile.gettempdir(),
                                                      chs_import_file_name)
            self._write_csv_file(chs_import_local_file_path, field_names,
                                 csv_data)
            # upload the file
            chs_import_remote_file_path = (
                '/tmp/{0}'.format(chs_import_file_name))
            upload_file(chs_import_local_file_path,
                        chs_import_remote_file_path)
            # import content hosts data from csv file
            CSV_.content_hosts({
                'file': chs_import_remote_file_path,
                'organization': self.org['name'],
            })
            for host in hosts:
                host_subscriptions = ActivationKey.subscriptions(
                    {
                        'organization-id': self.org['id'],
                        'id': activation_key['id'],
                        'host-id': host['id'],
                    },
                    output_format='json')
                self.assertEqual(len(host_subscriptions), 1)
                self.assertEqual(host_subscriptions[0]['name'],
                                 SATELLITE_SUBSCRIPTION_NAME)
                self.assertEqual(host_subscriptions[0]['contract'],
                                 new_subscription['Subscription Contract'])
                self.assertEqual(host_subscriptions[0]['account'],
                                 new_subscription['Subscription Account'])
예제 #23
0
    def _setup_capsule(self):
        """Prepare the virtual machine to host a capsule node"""
        # setup the name resolution
        self._capsule_setup_name_resolution()
        logger.info('adding repofiles required for capsule installation')
        self.create_custom_repos(
            capsule=settings.capsule_repo,
            rhscl=settings.rhscl_repo,
            ansible=settings.ansible_repo,
            maint=settings.satmaintenance_repo,
        )
        self.configure_rhel_repo(getattr(settings, f"{self.distro}_repo"))
        self.run('yum repolist')
        self.run('yum -y update')
        self.run('yum -y install satellite-capsule', timeout=1200)
        result = self.run('rpm -q satellite-capsule')
        if result.return_code != 0:
            raise CapsuleVirtualMachineError(
                f'Failed to install satellite-capsule package\n{result.stderr}'
            )
        # update http proxy except list
        result = Settings.list({'search': 'http_proxy_except_list'})[0]
        if result["value"] == "[]":
            except_list = f'[{self.hostname}]'
        else:
            except_list = result["value"][:-1] + f', {self.hostname}]'
        Settings.set({'name': 'http_proxy_except_list', 'value': except_list})
        # generate certificate
        cert_file_path = f'/root/{self.hostname}-certs.tar'
        certs_gen = ssh.command('capsule-certs-generate '
                                '--foreman-proxy-fqdn {} '
                                '--certs-tar {}'.format(
                                    self.hostname, cert_file_path))
        if certs_gen.return_code != 0:
            raise CapsuleVirtualMachineError(
                f'Unable to generate certificate\n{certs_gen.stderr}')
        # copy the certificate to capsule vm
        _, temporary_local_cert_file_path = mkstemp(suffix='-certs.tar')
        logger.info(f'downloading the certs file: {cert_file_path}')
        download_file(
            remote_file=cert_file_path,
            local_file=temporary_local_cert_file_path,
            hostname=settings.server.hostname,
        )
        logger.info(f'uploading the certs file: {cert_file_path}')
        upload_file(
            key_filename=settings.server.ssh_key,
            local_file=temporary_local_cert_file_path,
            remote_file=cert_file_path,
            hostname=self.ip_addr,
        )
        # delete the temporary file
        os.remove(temporary_local_cert_file_path)

        installer_cmd = extract_capsule_satellite_installer_command(
            certs_gen.stdout)
        installer_cmd += " --verbose"
        result = self.run(installer_cmd, timeout=1800)
        if result.return_code != 0:
            # before exit download the capsule log file
            _, log_path = mkstemp(prefix='capsule_external-', suffix='.log')
            download_file('/var/log/foreman-installer/capsule.log', log_path,
                          self.ip_addr)
            raise CapsuleVirtualMachineError(
                result.return_code, result.stderr,
                'foreman installer failed at capsule host')

        # manually start pulp_celerybeat service if BZ1446930 is open
        result = self.run('systemctl status pulp_celerybeat.service')
        if 'inactive (dead)' in '\n'.join(result.stdout):
            if is_open('BZ:1446930'):
                result = self.run('systemctl start pulp_celerybeat.service')
                if result.return_code != 0:
                    raise CapsuleVirtualMachineError(
                        f'Failed to start pulp_celerybeat service\n{result.stderr}'
                    )
            else:
                raise CapsuleVirtualMachineError(
                    'pulp_celerybeat service not running')
예제 #24
0
    def test_positive_reinstall_on_same_node_after_remove(self):
        """Reinstall capsule on the same node after remove

        :id: fac35a44-0bc9-44e9-a2c3-398e1aa9900c

        :customerscenario: true

        :expectedresults: The capsule successfully reinstalled

        :BZ: 1327442

        :CaseLevel: System
        """
        # Note: capsule-remove has been replaced by katello-remove
        with CapsuleVirtualMachine() as capsule_vm:
            # ensure that capsule refresh-features succeed
            with self.assertNotRaises(CLIReturnCodeError):
                Capsule.refresh_features(
                    {'name': capsule_vm._capsule_hostname})
            # katello-remove command request to confirm by typing Y and then by
            # typing remove
            result = capsule_vm.run("printf 'Y\nremove\n' | katello-remove")
            self.assertEqual(result.return_code, 0)
            # ensure that capsule refresh-features fail
            with self.assertRaises(CLIReturnCodeError):
                Capsule.refresh_features(
                    {'name': capsule_vm._capsule_hostname})
            # reinstall katello certs as they have been removed
            capsule_vm.install_katello_ca()
            # install satellite-capsule package
            result = capsule_vm.run('yum install -y satellite-capsule')
            self.assertEqual(result.return_code, 0)
            # generate capsule certs and installer command
            cert_file_path = '/tmp/{0}-certs.tar'.format(capsule_vm.hostname)
            result = ssh.command(
                'capsule-certs-generate '
                '--foreman-proxy-fqdn {0} '
                '--certs-tar {1}'
                .format(capsule_vm.hostname, cert_file_path)
            )
            self.assertEqual(result.return_code, 0)
            # retrieve the installer command from the result output
            installer_cmd = extract_capsule_satellite_installer_command(
                result.stdout
            )
            # copy the generated certs to capsule vm
            _, temporary_local_cert_file_path = mkstemp(suffix='-certs.tar')
            ssh.download_file(
                remote_file=cert_file_path,
                local_file=temporary_local_cert_file_path,
                hostname=settings.server.hostname
            )
            ssh.upload_file(
                local_file=temporary_local_cert_file_path,
                remote_file=cert_file_path,
                hostname=capsule_vm.ip_addr
            )
            # delete the temporary file
            os.remove(temporary_local_cert_file_path)
            result = capsule_vm.run(installer_cmd, timeout=1500)
            self.assertEqual(result.return_code, 0)
            # ensure that capsule refresh-features succeed
            with self.assertNotRaises(CLIReturnCodeError):
                Capsule.refresh_features(
                    {'name': capsule_vm.hostname})
예제 #25
0
def oscap_content_path():
    # download scap content from satellite
    _, file_name = os.path.split(settings.oscap.content_path)
    local_file = "/tmp/{}".format(file_name)
    ssh.download_file(settings.oscap.content_path, local_file)
    return local_file
예제 #26
0
    def test_positive_restore_ak_with_modified_subscription(self):
        """Restore activation key subscription from an exported activation key
        csv file with modified subscription.

        :id: 40b86d1c-88f8-451c-bf19-c5bf11223cb6

        :steps:
            1. Upload a manifest
            2. Create an activation key
            3. Attach RH subscription to the created activation key
            4. Export the organization activation keys to a csv file
            5. Create a new csv file and modify the subscription with an other
               one (the new subscription must have other data than the default
               one)
            6. Import the new csv file to organization activation keys

        :expectedresults: activation key restored with the new subscription

        :BZ: 1296978

        :CaseImportance: Critical
        """
        # upload the organization default manifest
        self._upload_manifest(self.org['id'])
        # Export and download the organization subscriptions to prepare the new
        # subscription (The replacement of the default subscription)
        org_subs_csv_filename = 'subs_{0}.csv'.format(self.org['name'])
        org_subs_csv_remote_file_path = '/tmp/{0}'.format(
            org_subs_csv_filename)
        # export organization subscription to csv file
        CSV_.subscriptions({
            'export': True,
            'file': org_subs_csv_remote_file_path,
            'organization': self.org['name'],
        })
        # download the organization subscriptions
        org_subs_csv_local_file_path = os.path.join(
            tempfile.gettempdir(), org_subs_csv_filename)
        download_file(
            org_subs_csv_remote_file_path, org_subs_csv_local_file_path)
        _, org_subscriptions = self._read_csv_file(
            org_subs_csv_local_file_path)
        new_subscription = None
        for sub in org_subscriptions:
            if sub['Subscription Name'] == SATELLITE_SUBSCRIPTION_NAME:
                new_subscription = sub
                break
        self.assertIsNotNone(new_subscription)
        # Create an activation key and add the default subscription
        activation_key = make_activation_key({
            'organization-id': self.org['id'],
        })
        activationkey_add_subscription_to_repo({
            'organization-id': self.org['id'],
            'activationkey-id': activation_key['id'],
            'subscription': DEFAULT_SUBSCRIPTION_NAME,
        })
        org_subs = Subscription.list({u'organization-id': self.org['id']})
        default_subscription_id = None
        for sub in org_subs:
            if sub['name'] == DEFAULT_SUBSCRIPTION_NAME:
                default_subscription_id = sub['id']
                break
        self.assertIsNotNone(
            default_subscription_id, msg='Default subscription not found')
        ak_subs = ActivationKey.subscriptions({
            'organization-id': self.org['id'],
            'id': activation_key['id'],
        }, output_format='json')
        self.assertEqual(len(ak_subs), 1)
        self.assertEqual(
            ak_subs[0]['name'], DEFAULT_SUBSCRIPTION_NAME)
        # export activation key data to csv file
        ak_export_file_name = 'ak_{0}_{1}_export.csv'.format(
            self.org['name'], activation_key['name'])
        ak_remote_export_file_path = '/tmp/{0}'.format(ak_export_file_name)
        CSV_.activation_keys({
            'export': True,
            'file': ak_remote_export_file_path,
            'organization': self.org['name'],
        })
        # download the file to local temp dir
        ak_local_export_file_path = os.path.join(
            tempfile.gettempdir(), ak_export_file_name)
        download_file(
            ak_remote_export_file_path, local_file=ak_local_export_file_path)
        # modify the file with new subscription data and upload it
        field_names, csv_ak_data = self._read_csv_file(
            ak_local_export_file_path)
        self.assertEqual(len(csv_ak_data), 1)
        csv_ak_data = csv_ak_data[0]
        field_names = csv_ak_data.keys()
        self.assertIn('Subscriptions', field_names)
        self.assertIn('Subscriptions', csv_ak_data)
        # The subscription is saved in the following format:
        # """<quantity>|<sku>|<name>|<contract>|<account>"""
        subscription_data = csv_ak_data['Subscriptions'].strip('"').split('|')
        # change the subscription SKU (looks like RH00001)
        subscription_data[1] = new_subscription['Subscription SKU']
        # change the name
        subscription_data[2] = new_subscription['Subscription Name']
        # change the contract number
        subscription_data[3] = new_subscription['Subscription Contract']
        # change the subscription account
        subscription_data[4] = new_subscription['Subscription Account']
        # modify the subscription data and generate a new csv file
        csv_ak_data['Subscriptions'] = '"{0}"'.format(
            '|'.join(subscription_data))
        ak_import_file_name = 'ak_{0}_{1}_import.csv'.format(
            self.org['name'], activation_key['name'])
        ak_local_import_file_path = os.path.join(
            tempfile.gettempdir(), ak_import_file_name)
        self._write_csv_file(
            ak_local_import_file_path, field_names, [csv_ak_data])
        # upload the generated file
        ak_remote_import_file_path = '/tmp/{0}'.format(ak_import_file_name)
        upload_file(ak_local_import_file_path, ak_remote_import_file_path)
        # import the generated csv file
        CSV_.activation_keys({
            'file': ak_remote_import_file_path,
            'organization': self.org['name'],
        })
        ak_subs = ActivationKey.subscriptions({
            'organization-id': self.org['id'],
            'id': activation_key['id'],
        }, output_format='json')
        self.assertEqual(len(ak_subs), 1)
        self.assertEqual(
            ak_subs[0]['name'], SATELLITE_SUBSCRIPTION_NAME)
        self.assertEqual(
            ak_subs[0]['contract'],
            new_subscription['Subscription Contract'])
        self.assertEqual(
            ak_subs[0]['account'], new_subscription['Subscription Account'])
예제 #27
0
    def test_positive_restore_content_hosts_with_modified_subscription(self):
        """Restore content hosts subscription from an exported content host csv
        file with modified subscription.

        :id: d8ac08fe-24e0-41e7-b3d8-0ca13a702a64

        :customerscenario: true

        :steps:
            1. Setup activation key , lifecycle environment and content view
               with RH tools repository
            2. Setup hosts (minimum two) and subscribe them to activation key
            3. Attach RH subscription to the created content hosts
            4. Export the organization content hosts to a csv file
            5. Create a new csv file and modify the subscription with an other
               one (the new subscription must have other data than the default
               one)
            6. Import the new csv file to organization content hosts

        :expectedresults: content hosts restored with the new subscription

        :BZ: 1296978

        :CaseImportance: Critical
        """
        lce = make_lifecycle_environment({'organization-id': self.org['id']})
        activation_key = make_activation_key({
            'organization-id': self.org['id'],
            'lifecycle-environment-id': lce['id'],
        })
        ActivationKey.update({
            'organization-id': self.org['id'],
            'id': activation_key['id'],
            'auto-attach': 'false',
        })
        # Create RH tools repository and contents, this step should upload
        # the default manifest
        setup_org_for_a_rh_repo({
            'product': PRDS['rhel'],
            'repository-set': REPOSET['rhst7'],
            'repository': REPOS['rhst7']['name'],
            'organization-id': self.org['id'],
            'lifecycle-environment-id': lce['id'],
            'activationkey-id': activation_key['id'],
        }, force_use_cdn=True)
        # Export and download the organization subscriptions to prepare the new
        # subscription (The replacement of the default subscription)
        org_subs_csv_filename = 'subs_{0}.csv'.format(self.org['name'])
        org_subs_csv_remote_file_path = '/tmp/{0}'.format(
            org_subs_csv_filename)
        # export organization subscription to csv file
        CSV_.subscriptions({
            'export': True,
            'file': org_subs_csv_remote_file_path,
            'organization': self.org['name'],
        })
        # download the organization subscriptions
        org_subs_csv_local_file_path = os.path.join(
            tempfile.gettempdir(), org_subs_csv_filename)
        download_file(
            org_subs_csv_remote_file_path, org_subs_csv_local_file_path)
        _, org_subscriptions = self._read_csv_file(
            org_subs_csv_local_file_path)
        new_subscription = None
        for sub in org_subscriptions:
            if sub['Subscription Name'] == SATELLITE_SUBSCRIPTION_NAME:
                new_subscription = sub
                break
        self.assertIsNotNone(new_subscription)
        # retrieve the default subscription id
        org_subs = Subscription.list({u'organization-id': self.org['id']})
        default_subscription_id = None
        for sub in org_subs:
            if sub['name'] == DEFAULT_SUBSCRIPTION_NAME:
                default_subscription_id = sub['id']
                break
        self.assertIsNotNone(
            default_subscription_id, msg='Default subscription not found')
        # create 2 Virtual machines
        with VirtualMachine() as client1, VirtualMachine() as client2:
            hosts = []
            for client in [client1, client2]:
                client.install_katello_ca()
                client.register_contenthost(
                    self.org['label'], activation_key=activation_key['name'])
                self.assertTrue(client.subscribed)
                host = Host.info({'name': client.hostname})
                hosts.append(host)
                Host.subscription_attach({
                    'host-id': host['id'],
                    'subscription-id': default_subscription_id,
                })
                host_subscriptions = ActivationKey.subscriptions({
                    'organization-id': self.org['id'],
                    'id': activation_key['id'],
                    'host-id': host['id'],
                }, output_format='json')
                self.assertEqual(len(host_subscriptions), 1)
                self.assertEqual(
                    host_subscriptions[0]['name'], DEFAULT_SUBSCRIPTION_NAME)
            # export the content host data to csv file
            chs_export_file_name = 'chs_export_{0}.csv'.format(
                self.org['label'])
            chs_export_remote_file_path = (
                '/tmp/{0}'.format(chs_export_file_name)
            )
            CSV_.content_hosts({
                'export': True,
                'file': chs_export_remote_file_path,
                'organization': self.org['name'],
            })
            # download the csv file
            chs_export_local_file_path = os.path.join(
                tempfile.gettempdir(), chs_export_file_name)
            download_file(
                chs_export_remote_file_path, chs_export_local_file_path)
            # modify the content hosts subscription
            field_names, csv_data = self._read_csv_file(
                chs_export_local_file_path)
            # each client is represented by one row of data
            self.assertEqual(len(csv_data), 2)
            for row_data in csv_data:
                # The subscription is saved in the following format:
                # """<quantity>|<sku>|<name>|<contract>|<account>"""
                subscription_data = row_data['Subscriptions'].strip(
                    '"').split('|')
                # change the subscription SKU (looks like RH00001)
                subscription_data[1] = new_subscription['Subscription SKU']
                # change the name
                subscription_data[2] = new_subscription['Subscription Name']
                # change the contract number
                subscription_data[3] = new_subscription[
                    'Subscription Contract']
                # change the subscription account
                subscription_data[4] = new_subscription[
                    'Subscription Account']
                # modify the subscription data
                row_data['Subscriptions'] = '"{0}"'.format(
                    '|'.join(subscription_data))
            # generate a new csv file
            chs_import_file_name = 'chs_import_{0}.csv'.format(
                self.org['name'])
            chs_import_local_file_path = os.path.join(
                tempfile.gettempdir(), chs_import_file_name)
            self._write_csv_file(
                chs_import_local_file_path, field_names, csv_data)
            # upload the file
            chs_import_remote_file_path = (
                '/tmp/{0}'.format(chs_import_file_name)
            )
            upload_file(
                chs_import_local_file_path, chs_import_remote_file_path)
            # import content hosts data from csv file
            CSV_.content_hosts({
                'file': chs_import_remote_file_path,
                'organization': self.org['name'],
            })
            for host in hosts:
                host_subscriptions = ActivationKey.subscriptions({
                    'organization-id': self.org['id'],
                    'id': activation_key['id'],
                    'host-id': host['id'],
                }, output_format='json')
                self.assertEqual(len(host_subscriptions), 1)
                self.assertEqual(
                    host_subscriptions[0]['name'], SATELLITE_SUBSCRIPTION_NAME)
                self.assertEqual(
                    host_subscriptions[0]['contract'],
                    new_subscription['Subscription Contract'])
                self.assertEqual(
                    host_subscriptions[0]['account'],
                    new_subscription['Subscription Account'])