Example #1
0
def browser_setup(get_appliance, provider, vm_to_analyze, fs_type, mgmt_sys_api_clients):
    '''Overrides env.conf and points a browser to the appliance IP passed to it.

    Once finished with the test, it checks if any tests need the appliance and delete it if not the
    appliance specified in conf/env.yaml.
    '''
    global appliance_vm_name
    global test_list

    test_list.remove(['', provider, vm_to_analyze, fs_type])
    with browser_session(base_url='https://' + get_appliance):
        yield nav.home_page_logged_in(testsetup)

        # cleanup provisioned appliance if not more tests for it
        if ('appliances_provider' not in cfme_data['basic_info'].keys() or
                provider != cfme_data['basic_info']['appliances_provider']):
            more_same_provider_tests = False
            for outstanding_test in test_list:
                if outstanding_test[1] == provider:
                    logger.debug("More provider tests found")
                    more_same_provider_tests = True
                    break
            if not more_same_provider_tests:
                # if rhev,  remove direct_lun disk before delete
                if cfme_data['management_systems'][provider]['type'] == 'rhevm':
                    logger.info('Removing RHEV direct_lun hook...')
                    run_command("./scripts/connect_directlun.py --remove --provider " +
                        provider + " --vm_name " + appliance_vm_name)
                # delete appliance
                logger.info("Delete provisioned appliance: " + appliance_list[provider])
                destroy_cmd = ('./scripts/clone_template.py --provider ' + provider + ' '
                    '--destroy --vm_name ' + appliance_vm_name + ' ')
                run_command(destroy_cmd)
Example #2
0
    def __enter__(self):
        # More mozwebqa bootstrapping, start the browser, expose some
        # client attrs on testsetup, navigate to the requested url,
        # return a Page instance with the current testsetup
        # This should mimic the behavior of mozwebqa as closely as possible
        self.testsetup.selenium_client.start()
        copy_attrs = (
            'selenium',
            'timeout',
            'default_implicit_wait'
        )
        for attr in copy_attrs:
            setattr(self.testsetup, attr, getattr(self.testsetup.selenium_client, attr))

        self.testsetup.base_url = self.url
        self.testsetup.selenium.maximize_window()
        self.testsetup.selenium.get(self.url)

        # Is it a page or a fixture?
        if type(self.page_or_fixture) == types.FunctionType:
            # Function! It's a fixture and we should use it...
            home_page_logged_in = navigation.home_page_logged_in(self.testsetup)
            # If you passed in the home_page_logged_in fixture, this will be funny.
            return self.page_or_fixture(home_page_logged_in)
        else:
            # Not a function! It's probably a Page class that we should set up
            return self.page_or_fixture(self.testsetup)
Example #3
0
def browser_setup(get_appliance, provider, vm_to_analyze, fs_type,
                  mgmt_sys_api_clients):
    '''Overrides env.conf and points a browser to the appliance IP passed to it.

    Once finished with the test, it checks if any tests need the appliance and delete it if not the
    appliance specified in conf/env.yaml.
    '''
    global appliance_vm_name
    global test_list

    test_list.remove(['', provider, vm_to_analyze, fs_type])
    with browser_session(base_url='https://' + get_appliance):
        yield nav.home_page_logged_in(testsetup)

        # cleanup provisioned appliance if not more tests for it
        if ('appliances_provider' not in cfme_data['basic_info'].keys()
                or provider != cfme_data['basic_info']['appliances_provider']):
            more_same_provider_tests = False
            for outstanding_test in test_list:
                if outstanding_test[1] == provider:
                    logger.debug("More provider tests found")
                    more_same_provider_tests = True
                    break
            if not more_same_provider_tests:
                # if rhev,  remove direct_lun disk before delete
                if cfme_data['management_systems'][provider][
                        'type'] == 'rhevm':
                    logger.info('Removing RHEV direct_lun hook...')
                    run_command(
                        "./scripts/connect_directlun.py --remove --provider " +
                        provider + " --vm_name " + appliance_vm_name)
                # delete appliance
                logger.info("Delete provisioned appliance: " +
                            appliance_list[provider])
                destroy_cmd = ('./scripts/clone_template.py --provider ' +
                               provider + ' '
                               '--destroy --vm_name ' + appliance_vm_name +
                               ' ')
                run_command(destroy_cmd)
Example #4
0
    def test_app_migration(self, backup_test, soft_assert):
        vm_name = "migtest_" + backup_test
        provider = cfme_data["basic_info"]["appliances_provider"]
        test_data = migration_tests["backup_tests"][backup_test]
        template = cfme_data['basic_info']['appliance_template_big_db_disk']

        # provision appliance and configure
        appliance = provision_appliance(
            vm_name_prefix=vm_name, template=template, provider_name=provider)
        logger.info("appliance IP address: " + str(appliance.address))
        appliance.enable_internal_db()
        appliance.wait_for_web_ui()

        # start restore and migration
        appliance_ssh = appliance.ssh_client()
        appliance_ssh.put_file("./scripts/restore.py", "/root")
        appliance_ssh.run_command("curl -o restore_scripts.gz " +
            cfme_data["basic_info"]["restore_scripts_url"])
        if "restore_fixes_url" in cfme_data["basic_info"].keys():
            appliance_ssh.run_command("curl -o fix_scripts.gz " +
                cfme_data["basic_info"]["restore_fixes_url"])
        appliance_ssh.run_command("curl -o backup.gz " + test_data['url'])
        logger.info("Running db restore/migration...")
        rc, output = appliance_ssh.run_command("/root/restore.py --scripts " +
            "/root/restore_scripts.gz --backupfile /root/backup.gz")
        soft_assert(rc == 0)

        # re-init the connection, times out over long migrations
        appliance_ssh.close()
        appliance_ssh = appliance.ssh_client()
        appliance_ssh.get_file("/root/output.log", ".")

        # Log the restore/migration output
        process = sub.Popen("cat ./output.log; rm -rf ./output.log",
            shell=True, stdout=sub.PIPE, stderr=sub.PIPE)
        output, error = process.communicate()
        logger.info("Running cmd:   cat ./output.log; rm -rf ./output.log")
        logger.info("Output: \n" + output)

        # get database table counts
        this_db = appliance.db
        session = this_db.session
        logger.info("Checking db table counts after migration...")
        db_counts = {}
        for table_name in sorted(test_data['counts'].keys()):
            db_counts[table_name] = session.query(this_db[table_name]).count()

        # start up evmserverd and poke ui
        appliance_ssh.run_command("service evmserverd start")
        appliance.wait_for_web_ui()
        with appliance.browser_session():
            nav.home_page_logged_in()
            nav_to_roles().edit_current_role_list("ems_inventory ems_operations")
            setup_provider(provider)
            provider_details = nav.infra_providers_pg().load_provider_details(
                cfme_data["management_systems"][provider]["name"])
            vm_details = provider_details.all_vms().find_vm_page(
                appliance.vm_name, None, False, True, 6)
            soft_assert(vm_details.on_vm_details(appliance.vm_name))

        # check table counts vs what we are expecting
        for table_name in sorted(test_data['counts'].keys()):
            expected_count = test_data['counts'][table_name]
            actual_count = db_counts[table_name]
            soft_assert(actual_count == expected_count, 'Table ' + table_name + '(' +
                str(actual_count) + ') not matching expected(' + str(expected_count) + ')')

        # delete appliance
        logger.info("Delete provisioned appliance: " + appliance.address)
        appliance.destroy()
Example #5
0
def setup_for_event_testing(ssh_client, db_session, listener_info, providers):
    # FIX THE ENV ERROR IF PRESENT
    if ssh_client.run_command("ruby -v")[0] != 0:
        success = ssh_client.run_command("echo 'source /etc/default/evm' >> .bashrc")[0] == 0
        assert success, "Issuing the patch command was unsuccessful"
        # Verify it works
        assert ssh_client.run_command("ruby -v")[0] == 0, "Patch failed"

    # IMPORT AUTOMATE NAMESPACE
    qe_automate_namespace_xml = "qe_event_handler.xml"
    qe_automate_namespace_script = "qe_event_handler.rb"
    local_automate_script = local(__file__)\
        .new(basename="../data/%s" % qe_automate_namespace_script)\
        .strpath
    local_automate_file = local(__file__)\
        .new(basename="../data/%s" % qe_automate_namespace_xml)\
        .strpath
    tmp_automate_file = "/tmp/%s" % qe_automate_namespace_xml

    # Change the information
    with open(local_automate_file, "r") as input_xml, \
            open(tmp_automate_file, "w") as output_xml:
        tree = etree.parse(input_xml)
        root = tree.getroot()

        def set_text(xpath, text):
            field = root.xpath(xpath)
            assert len(field) == 1
            field[0].text = text
        set_text("//MiqAeSchema/MiqAeField[@name='url']",
                 re.sub(r"^http://([^/]+)/?$", "\\1", listener_info.host))
        set_text("//MiqAeSchema/MiqAeField[@name='port']", str(listener_info.port))

        # Put the custom script from an external file
        with open(local_automate_script, "r") as script:
            set_text("//MiqAeMethod[@name='relay_events']",
                     etree.CDATA(script.read()))

        et = etree.ElementTree(root)
        et.write(output_xml)

    # copy xml file to appliance
    # but before that, let's check whether it's there because we may have already applied this file
    if ssh_client.run_command("ls /root/%s" % qe_automate_namespace_xml)[0] != 0:
        ssh_client.put_file(tmp_automate_file, '/root/')

        # run rake cmd on appliance to import automate namespace
        rake_cmd = "evm:automate:import FILE=/root/%s" % \
            qe_automate_namespace_xml
        return_code, stdout = ssh_client.run_rake_command(rake_cmd)
        try:
            assert return_code == 0, "namespace import was unsuccessful"
        except AssertionError:
            # We didn't successfully do that so remove the file to know
            # that it's needed to do it again when run again
            ssh_client.run_command("rm -f /root/%s" % qe_automate_namespace_xml)
            raise

    # CREATE AUTOMATE INSTANCE HOOK
    if db_session.query(db.MiqAeInstance.name)\
            .filter(db.MiqAeInstance.name == "RelayEvents").count() == 0:   # Check presence
        automate_explorer_pg = nav.automate_explorer_pg()
        parent_class = "Automation Requests (Request)"
        instance_details = [
            "RelayEvents",
            "RelayEvents",
            "relationship hook to link to custom QE events relay namespace"
        ]
        instance_row = 2
        instance_value = "/QE/Automation/APIMethods/relay_events?event=$evm.object['event']"

        class_pg = automate_explorer_pg.click_on_class_access_node(parent_class)
        if not class_pg.is_instance_present("RelayEvents"):
            instance_pg = class_pg.click_on_add_new_instance()
            instance_pg.fill_instance_info(*instance_details)
            instance_pg.fill_instance_field_row_info(instance_row, instance_value)
            class_pg = instance_pg.click_on_add_system_button()
            assert class_pg.flash_message_class == 'Automate Instance "%s" was added' %\
                instance_details[0]

    # IMPORT POLICIES
    policy_yaml = "profile_relay_events.yaml"
    policy_path = local(__file__).new(basename="../data/%s" % policy_yaml)

    home_pg = nav.home_page_logged_in()
    import_pg = home_pg.header.site_navigation_menu("Control")\
        .sub_navigation_menu("Import / Export")\
        .click()
    if not import_pg.has_profile_available("Automate event policies"):
        import_pg = import_pg.import_policies(policy_path.strpath)
        assert import_pg.flash.message == "Press commit to Import"
        import_pg = import_pg.click_on_commit()
        assert "was uploaded successfully" in import_pg.flash.message

    # ASSIGN POLICY PROFILES
    for provider in providers:
        assign_policy_profile_to_infra_provider("Automate event policies", provider)
def test_fixture(mozwebqa, fixture_name):
    home_pg = navigation.home_page_logged_in(mozwebqa)
    fixture_pg = getattr(navigation, fixture_name)(home_pg)
    Assert.true(fixture_pg.is_the_current_page)
Example #7
0
#!/usr/bin/env python2
from IPython import embed

from fixtures import navigation as nav
from utils.browser import browser_session, testsetup

with browser_session() as browser:
    pg = nav.home_page_logged_in(testsetup)
    embed()
Example #8
0
def duckwebqa_loggedin(browser):
    # On login to rule them all!
    yield home_page_logged_in(utils.browser.testsetup)
def test_fixture(mozwebqa, fixture_name):
    home_pg = navigation.home_page_logged_in(mozwebqa)
    fixture_pg = getattr(navigation, fixture_name)(home_pg)
    Assert.true(fixture_pg.is_the_current_page)