Ejemplo n.º 1
0
def test_dialog_default_value_integer(
        appliance, generic_catalog_item_with_imported_dialog, file_name):
    """
    Bugzilla:
        1554780
    Polarion:
        assignee: nansari
        casecomponent: Services
        initialEstimate: 1/4h
        testtype: functional
        startsin: 5.10
    """
    catalog_item, _, ele_label = generic_catalog_item_with_imported_dialog
    service_catalogs = ServiceCatalogs(appliance, catalog_item.catalog,
                                       catalog_item.name)

    # download yaml file
    fs = FTPClientWrapper(cfme_data.ftpserver.entities.dialogs)
    file_path = fs.download(file_name)
    with open(file_path, "r") as stream:
        dialog_data = yaml.load(stream, Loader=yaml.BaseLoader)
        default_drop = dialog_data[0]["dialog_tabs"][0]["dialog_groups"][0][
            "dialog_fields"][0]["default_value"]
        default_radio = dialog_data[0]["dialog_tabs"][0]["dialog_groups"][0][
            "dialog_fields"][1]["default_value"]

    view = navigate_to(service_catalogs, "Order")
    assert (view.fields("dropdown").read() == default_drop
            and view.fields("radio").read() == default_radio)
Ejemplo n.º 2
0
def import_datastore(appliance, import_data):
    """This fixture will help to import datastore file.

    To invoke this fixture, we need to pass parametrize import data with the help
    of `DatastoreImport`namedtuple.

    Usage:
        .. code-block:: python

        @pytest.mark.parametrize(
        "import_data", [DatastoreImport("datastore.zip", "from_domain_name", "to_domain_name")]
        )
        def test_foo(import_datastore, import_data):
            pass
    """

    # Download datastore file from FTP server
    fs = FTPClientWrapper(cfme_data.ftpserver.entities.datastores)
    file_path = fs.download(import_data.file_name)

    # Import datastore file to appliance
    datastore = appliance.collections.automate_import_exports.instantiate(
        import_type="file", file_path=file_path)
    domain = datastore.import_domain_from(import_data.from_domain,
                                          import_data.to_domain)
    assert domain.exists
    with update(domain):
        domain.enabled = True
    yield domain
    domain.delete_if_exists()
Ejemplo n.º 3
0
def test_crud_imported_domains(import_data, temp_appliance_preconfig):
    """
    Bugzilla:
        1753586

    Polarion:
        assignee: dgaikwad
        initialEstimate: 1/8h
        caseposneg: positive
        casecomponent: Automate
    """
    # Download datastore file from FTP server
    fs = FTPClientWrapper(cfme_data.ftpserver.entities.datastores)
    file_path = fs.download(import_data.file_name)

    # Import datastore file to appliance
    datastore = temp_appliance_preconfig.collections.automate_import_exports.instantiate(
        import_type="file", file_path=file_path)
    domain = datastore.import_domain_from(import_data.from_domain,
                                          import_data.to_domain)
    assert domain.exists
    if import_data.file_name == "bz_1753586_system.zip":
        # Imported domains with source - "system" can not be deleted or updated as those are
        # defaults like ManageIQ and RedHat domains.
        view = navigate_to(domain, "Details")
        assert not view.configuration.is_displayed
    else:
        view = navigate_to(domain.parent, "All")
        with update(domain):
            domain.description = fauxfactory.gen_alpha()
        domain.delete()
        view.flash.assert_message(
            f'Automate Domain "{domain.description}": Delete successful')
Ejemplo n.º 4
0
def test_upload_delete_custom_image_on_bundle(catalog_bundle):
    """
    Bugzilla:
        1487056
    Polarion:
        assignee: nansari
        casecomponent: Services
        initialEstimate: 1/8h
        testSteps:
            1. Create a catalog item
            2. Create a catalog  bundle
            3. Upload a custom image to bundle
            4. remove the image
        expectedResults:
            1.
            2.
            3.
            4. Upload and Remove image should work
    """
    image = 'logo.png'
    fs = FTPClientWrapper(cfme_data.ftpserver.entities.others)
    file_path = fs.download(image)

    view = navigate_to(catalog_bundle, 'Details')
    view.entities.upload_image.fill(file_path)
    view.entities.upload_button.click()
    view.flash.assert_message(
        f'Custom Image file "{image}" successfully uploaded')

    view.entities.remove.click()
    view.flash.assert_message('Custom Image successfully removed')
Ejemplo n.º 5
0
def test_dialog_not_required_default_value(
        appliance, generic_catalog_item_with_imported_dialog, file_name):
    """
    Bugzilla:
        1783375
    Polarion:
        assignee: nansari
        casecomponent: Services
        initialEstimate: 1/16h
        startsin: 5.10
    """
    catalog_item, _, ele_label = generic_catalog_item_with_imported_dialog
    service_catalogs = ServiceCatalogs(appliance, catalog_item.catalog,
                                       catalog_item.name)

    # download yaml file
    fs = FTPClientWrapper(cfme_data.ftpserver.entities.dialogs)
    file_path = fs.download(file_name)
    with open(file_path) as stream:
        dialog_data = yaml.load(stream, Loader=yaml.BaseLoader)
        default_drop = dialog_data[0]["dialog_tabs"][0]["dialog_groups"][0][
            "dialog_fields"][0]["default_value"]

    view = navigate_to(service_catalogs, "Order")
    assert view.fields("dropdown_list_1").read() == default_drop
def import_dialog(appliance, file_name):
    """This fixture will help to import dialog file."""

    # Download dialog file from FTP server
    fs = FTPClientWrapper(cfme_data.ftpserver.entities.dialogs)
    file_path = fs.download(file_name)

    # Import dialog yml to appliance
    import_export = DialogImportExport(appliance)
    import_export.import_dialog(file_path)

    # Read yml to get the field name
    with open(file_path) as stream:
        dialog = yaml.load(stream, Loader=yaml.BaseLoader)
        # It returns list of dicts
        description = dialog[0].get("description")
        label = dialog[0].get("label")
        ele_label = dialog[0]['dialog_tabs'][0]['dialog_groups'][0][
            'dialog_fields'][0]['name']

    # File name contains '.yml' or '.yaml', Hence replacing it.
    sd = appliance.collections.service_dialogs.instantiate(
        label=label, description=description)
    yield sd, ele_label
    sd.delete_if_exists()
Ejemplo n.º 7
0
def test_existing_domain_child_override(appliance, custom_domain, import_data):
    """
    PR:
        https://github.com/ManageIQ/manageiq-ui-classic/pull/4912

    Bugzilla:
        1752875

    Polarion:
        assignee: ghubale
        initialEstimate: 1/8h
        caseposneg: positive
        casecomponent: Automate
        setup: First three steps are performed manually to have datastore zip file
            1. Create custom domain and copy class - "ManageIQ/System/Process"
            2. Lock this domain
            3. Navigate to Automation > automate > Import/export and click on "export all classes
               and instances to file"
            4. Go to custom domain and unlock it. Remove instance - "ManageIQ/System/Process/" and
               copy - "ManageIQ/System/Process/Request" (you can copy more classes or methods or
               instances) to custom domain and again lock the domain.
        testSteps:
            1. Navigate to Import/Export page and import the exported file
            2. Select "Select domain you wish to import from:" - "custom_domain" and check Toggle
               All/None
            3. Click on commit button.
            4. Then navigate to custom domain and unlock it
            5. Perform step 1, 2 and 3(In this case, domain will get imported)
            6. Go to custom domain
        expectedResults:
            1.
            2.
            3. You should see flash message: "Error: Selected domain is locked"
            4.
            5. Selected domain imported successfully
            6. You should see existing as well as imported namespace, class, instance or method
    """
    # Download datastore file from FTP server
    fs = FTPClientWrapper(cfme_data.ftpserver.entities.datastores)
    file_path = fs.download(import_data.file_name)

    # Import datastore file to appliance
    datastore = appliance.collections.automate_import_exports.instantiate(
        import_type="file", file_path=file_path)
    datastore.import_domain_from(import_data.from_domain,
                                 import_data.to_domain)
    view = appliance.browser.create_view(FileImportSelectorView)
    view.flash.assert_message("Error: Selected domain is locked")
    custom_domain.unlock()
    datastore.import_domain_from(import_data.from_domain,
                                 import_data.to_domain)
    view.flash.assert_no_error()
    view = navigate_to(custom_domain, 'Details')
    if not BZ(1752875).blocks:
        assert (view.datastore.tree.has_path('Datastore',
                                             f'{custom_domain.name}', 'System',
                                             'Request'))
    assert view.datastore.tree.has_path('Datastore', f'{custom_domain.name}',
                                        'System', 'Process')
Ejemplo n.º 8
0
def yaml_path(yaml_name):
    """ Returns yaml path of the file with yaml_name name"""
    yaml_name = "{}.yaml".format(yaml_name)

    try:
        fs = FTPClientWrapper(cfme_data.ftpserver.entities.reports)
        file_path = fs.download(yaml_name, os.path.join("/tmp", yaml_name))
    except (FTPException, AttributeError):
        logger.exception("FTP download or YAML lookup of %s failed, defaulting to local", yaml_name)
        file_path = data_path.join("ui", "intelligence", yaml_name).realpath().strpath
        logger.info("Importing from data path: %s", file_path)

    return file_path
def test_import_domain_containing_playbook_method(request, appliance,
                                                  setup_ansible_repository,
                                                  import_data):
    """This test case tests support of Export/Import of Domain with Ansible Method

    Bugzilla:
        1677575

    Polarion:
        assignee: ghubale
        initialEstimate: 1/8h
        caseimportance: high
        caseposneg: positive
        testtype: functional
        startsin: 5.11
        casecomponent: Automate
        tags: automate
        setup:
            1. Add playbook repository
            2. Create a new automate method with playbook type.
            3. Fill the required fields, for instance repository and playbook.
            4. Export this datastore.
            5. Playbook method fields are stored as a names instead of IDs. (this is not
               possible via automate need to check manually in method yaml)
        testSteps:
            1. Import the exported datastore and change name of playbook in method yaml to invalid
               playbook name(Note: These test steps needs to execute manually and then import
               datastore)
        expectedResults:
            1. Proper error should be displayed while importing datastore with invalid playbook
    """
    # Download datastore file from FTP server
    fs = FTPClientWrapper(cfme_data.ftpserver.entities.datastores)
    file_path = fs.download(import_data.file_name)

    # Import datastore file to appliance
    datastore = appliance.collections.automate_import_exports.instantiate(
        import_type="file", file_path=file_path)
    domain = datastore.import_domain_from(import_data.from_domain,
                                          import_data.to_domain)
    request.addfinalizer(domain.delete_if_exists)
    view = appliance.browser.create_view(FileImportSelectorView)
    # setup_ansible_repository.name is ansible repository name already there in datastore yml which
    # we are going to import.
    error_msg = (
        f"Playbook 'invalid_1677575.yml' not found in repository '{setup_ansible_repository.name}'"
    )
    view.flash.assert_message(text=error_msg, partial=True)
Ejemplo n.º 10
0
    def _report(file_name, menu_name):
        collection = appliance.collections.reports

        # download the report from server
        fs = FTPClientWrapper(cfme_data.ftpserver.entities.reports)
        file_path = fs.download(file_name)

        # import the report
        collection.import_report(file_path)

        # instantiate report and return it
        report = collection.instantiate(type="My Company (All Groups)",
                                        subtype="Custom",
                                        menu_name=menu_name)
        request.addfinalizer(report.delete_if_exists)
        return report
Ejemplo n.º 11
0
def restore_db(temp_appliance_preconfig, file_name):
    try:
        db_file = FTPClientWrapper(cfme_data.ftpserver.entities.databases).get_file(file_name)
    except FTPException:
        pytest.skip("Failed to fetch the file from FTP server.")

    db_path = f"/tmp/{db_file.name}"

    # Download the customer db on appliance
    result = temp_appliance_preconfig.ssh_client.run_command(
        f"curl -o {db_path} ftp://{db_file.link}"
    )
    if not result.success:
        pytest.fail("Failed to download the file to the appliance.")

    def _check_file_size(file_path, expected_size):
        return temp_appliance_preconfig.ssh_client.run_command(
            f"stat {file_path} | grep {expected_size}"
        ).success

    # assert the whole database was downloaded to make sure we do not test with broken database
    if not _check_file_size(db_path, db_file.filesize):
        pytest.skip("File downloaded to the appliance, but it looks broken.")

    is_major = True if temp_appliance_preconfig.version > "5.11" else False
    temp_appliance_preconfig.db.restore_database(db_path, is_major=is_major)
Ejemplo n.º 12
0
def test_upload_blank_file(appliance, upload_file):
    """
    Bugzilla:
        1720611

    Polarion:
        assignee: dgaikwad
        initialEstimate: 1/8h
        caseposneg: negative
        startsin: 5.10
        casecomponent: Automate
        testSteps:
            1. Create blank zip(test.zip) and yaml(test.yml) file
            2. Navigate to Automation > Automate > Import/Export and upload test.zip file
            3. Navigate to Automation > Automate > Customization > Import/Export and upload test.yml
        expectedResults:
            1.
            2. Error message should be displayed
            3. Error message should be displayed
    """
    # Download datastore file from FTP server
    fs = FTPClientWrapper(cfme_data.ftpserver.entities.datastores)
    file_path = fs.download(upload_file)

    if upload_file == "dialog_blank.yml":
        with LogValidator("/var/www/miq/vmdb/log/production.log",
                          failure_patterns=[".*FATAL.*"]).waiting(timeout=120):

            # Import dialog yml to appliance
            import_export = DialogImportExport(appliance)
            view = navigate_to(import_export, "DialogImportExport")
            view.upload_file.fill(file_path)
            view.upload.click()
            view.flash.assert_message('Error: the uploaded file is blank')
    else:
        # Import datastore file to appliance
        datastore = appliance.collections.automate_import_exports.instantiate(
            import_type="file", file_path=file_path)
        view = navigate_to(appliance.collections.automate_import_exports,
                           "All")
        with LogValidator("/var/www/miq/vmdb/log/production.log",
                          failure_patterns=[".*FATAL.*"]).waiting(timeout=120):
            view.import_file.upload_file.fill(datastore.file_path)
            view.import_file.upload.click()
            view.flash.assert_message(
                "Error: import processing failed: domain: *")
Ejemplo n.º 13
0
def get_custom_report(appliance):
    collection = appliance.collections.reports

    # download the report from server
    fs = FTPClientWrapper(cfme_data.ftpserver.entities.reports)
    file_path = fs.download("testing_report.yaml")

    # import the report
    collection.import_report(file_path)

    # instantiate report and return it
    report = collection.instantiate(
        type="My Company (All Groups)",
        subtype="Custom",
        menu_name="testing report",
        title="testing report title",
    )
    yield report
    report.delete_if_exists()
Ejemplo n.º 14
0
def test_overwrite_import_domain(local_domain, appliance, file_name):
    """
    Note: This PR automates this scenario via rake commands. But this RFE is not yet fixed as it has
    bug to apply this scenario via UI.

    Bugzilla:
        1753860

    Polarion:
        assignee: dgaikwad
        initialEstimate: 1/8h
        caseposneg: positive
        casecomponent: Automate
        setup:
            1. Create custom domain, namespace, class, instance, method. Do not delete this domain.
            2. Navigate to automation > automate > import/export and export all classes and
               instances to a file
            3. Extract the file and update __domain__.yaml file of custom domain as below:
               >> description: test_desc
               >> enabled: false
               Note: These steps needs to perform manually
        testSteps:
            1. Compress this domain file and import it via UI.
        expectedResults:
            1. Description and enabled status of existing domain should update.
    """
    datastore_file = FTPClientWrapper(
        cfme_data.ftpserver.entities.datastores).get_file(file_name)
    file_path = os.path.join("/tmp", datastore_file.name)

    # Download the datastore file on appliance
    assert appliance.ssh_client.run_command(
        f"curl -o {file_path} ftp://{datastore_file.link}").success

    # Rake command to update domain
    rake_cmd = {
        enable:
        (f"evm:automate:import PREVIEW=false DOMAIN=bz_1753860 IMPORT_AS=bz_1753860 "
         f"ZIP_FILE={file_path} SYSTEM=false ENABLED={enable} OVERWRITE=true")
        for enable in ['false', 'true']
    }

    for status, cmd in rake_cmd.items():
        appliance.ssh_client.run_rake_command(cmd)
        view = navigate_to(local_domain.parent, "All")

        # Need to refresh domain to get updates after performing rake command
        view.browser.refresh()

        # Checking domain's enabled status on all page
        assert view.domains.row(
            name__contains=local_domain.name)["Enabled"].text == status
Ejemplo n.º 15
0
def test_dialog_default_value_selection(
        appliance, custom_categories, import_datastore, import_data,
        generic_catalog_item_with_imported_dialog, file_name):
    """
    Bugzilla:
        1579405
    Polarion:
        assignee: nansari
        casecomponent: Services
        initialEstimate: 1/4h
        testtype: functional
        startsin: 5.10
    """

    catalog_item, _, ele_label = generic_catalog_item_with_imported_dialog
    service_catalogs = ServiceCatalogs(appliance, catalog_item.catalog,
                                       catalog_item.name)

    # download yaml file
    fs = FTPClientWrapper(cfme_data.ftpserver.entities.dialogs)
    file_path = fs.download(file_name)
    with open(file_path) as stream:
        dialog_data = yaml.load(stream, Loader=yaml.BaseLoader)

        environment = dialog_data[0]["dialog_tabs"][0]["dialog_groups"][0][
            "dialog_fields"][0]["default_value"]
        vm_size = dialog_data[0]["dialog_tabs"][0]["dialog_groups"][0][
            "dialog_fields"][1]["default_value"]
        network = dialog_data[0]["dialog_tabs"][0]["dialog_groups"][0][
            "dialog_fields"][2]["default_value"]
        additional_disks = dialog_data[0]["dialog_tabs"][0]["dialog_groups"][
            0]["dialog_fields"][3]["default_value"]

    view = navigate_to(service_catalogs, "Order")
    assert (view.fields("environment").read() == environment
            and view.fields("instance").read() == vm_size
            and view.fields("network").read() == network
            and view.fields("number_disk").read() == additional_disks)
Ejemplo n.º 16
0
def test_miq_schedule_validation_failed(temp_appliance_preconfig):
    """
    Bugzilla:
        1729441
        1740229

    Polarion:
        assignee: pvala
        casecomponent: Reporting
        initialEstimate: 1/10h
        testSteps:
            1. Restore the customer database and monitor the EVM logs while restoring.
            2. Restart the server.
        expectedResults:
            1. Should not encounter the following message(refer BZ description) in the logs.
            2. Server should restart successfully.
    """
    appliance = temp_appliance_preconfig
    dump = FTPClientWrapper(
        cfme_data.ftpserver.entities.databases).get_file("miqschedule_dump")
    dump_destination = os.path.join("/tmp", dump.name)

    # Download the customer db on appliance
    if not appliance.ssh_client.run_command(
            f"curl -o {dump_destination} ftp://{dump.link}").success:
        pytest.fail("Failed to download the file to the appliance.")

    # In case the failure pattern is encountered, the following piece of code will raise
    # a TimedOutError from appliance.wait_for_miq_ready
    # The matched_patterns will only be found on successful database restore.
    with LogValidator(
            "/var/www/miq/vmdb/log/evm.log",
            failure_patterns=
        [
            ".* ERROR .*Validation failed: MiqSchedule:.*Name has already been taken.*Method.*"
        ],
            matched_patterns=
        [
            ".*INFO.* Widget: .*chart_server_availability.* file has been .* disk.*",
            ".*INFO.* : MIQ.*EvmDatabase.seed.* Seeding MiqAction.*"
        ]).waiting():
        appliance.db.restore_database(
            db_path=dump_destination,
            is_major=bool(appliance.version > "5.11"))
Ejemplo n.º 17
0
def test_redhat_domain_sync_after_upgrade(temp_appliance_preconfig, file_name):
    """
    Bugzilla:
        1693362

    Polarion:
        assignee: ghubale
        initialEstimate: 1/8h
        caseposneg: positive
        casecomponent: Automate
        testSteps:
            1. Either dump database of appliance with version X to appliance with version Y
               or upgrade the appliance
            2. grep 'domain version on disk differs from db version' /var/www/miq/vmdb/log/evm.log
            3. Check last_startup.txt file
        expectedResults:
            1.
            2. You should find this string in logs: RedHat domain version on disk differs from db
               version
            3. You should find this string in file: RedHat domain version on disk differs from db
               version
    """
    db_file = FTPClientWrapper(
        cfme_data.ftpserver.entities.databases).get_file(file_name)
    db_path = os.path.join("/tmp", db_file.name)

    # Download the customer db on appliance
    assert temp_appliance_preconfig.ssh_client.run_command(
        f"curl -o {db_path} ftp://{db_file.link}").success

    with LogValidator(
            "/var/www/miq/vmdb/log/evm.log",
            matched_patterns=[
                ".*domain version on disk differs from db version.*",
                ".*RedHat domain version on disk differs from db version.*",
                ".*ManageIQ domain version on disk differs from db version.*"
            ],
    ).waiting(timeout=1000):
        temp_appliance_preconfig.db.restore_database(
            db_path, is_major=bool(temp_appliance_preconfig.version > "5.11"))
Ejemplo n.º 18
0
def image_file_path(file_name):
    """ Returns file path of the file"""
    fs = FTPClientWrapper(cfme_data.ftpserver.entities.others)
    file_path = fs.download(file_name, os.path.join("/tmp", file_name))
    return file_path