def log_monitor(user_obj, temp_appliance_preconfig_long):
    """Search evm.log for any plaintext password"""
    result = LogValidator("/var/www/miq/vmdb/log/evm.log",
                          failure_patterns=[f"{user_obj.credential.secret}"],
                          hostname=temp_appliance_preconfig_long.hostname)
    result.start_monitoring()
    yield result
示例#2
0
def test_delete_tag_from_category(custom_instance):
    """
    Bugzilla:
        1744514
        1767901

    Polarion:
        assignee: ghubale
        casecomponent: Automate
        initialEstimate: 1/12h
    """
    instance = custom_instance(ruby_code=tag_delete_from_category)

    with LogValidator(
            "/var/www/miq/vmdb/log/automation.log",
            matched_patterns=[
                f'.*Tag exists: {value}.*' for value in ['true', 'false']
            ],
    ).waiting(timeout=120):
        # Executing automate method using simulation
        simulate(
            appliance=instance.klass.appliance,
            message="create",
            request="Call_Instance",
            execute_methods=True,
            attributes_values={
                "namespace": instance.klass.namespace.name,
                "class": instance.klass.name,
                "instance": instance.name,
            },
        )
示例#3
0
def test_retire_vm_now(setup_provider, create_vm, new_user):
    """
    Bugzilla:
        1747159

    Polarion:
        assignee: dgaikwad
        initialEstimate: 1/8h
        caseposneg: positive
        casecomponent: Automate
        setup:
            1. Add infrastructure provider
            2. Provision VM
            3. Create new user with group EvmGroup-vm_user
        testSteps:
            1. Select 'Retire this vm' from the UI to retire the VM
            2. Check evm.logs
        expectedResults:
            1. VM should be retired
            2. No errors in evm logs
    """
    with new_user:
        with LogValidator(
                "/var/www/miq/vmdb/log/evm.log",
                failure_patterns=[
                    ".*ERROR.*NoMethodError]: undefined method `tenant_id'.*"
                ]).waiting(timeout=720):
            create_vm.retire()
            assert create_vm.wait_for_vm_state_change(desired_state="retired",
                                                      timeout=720,
                                                      from_details=True)
def test_refresh_with_empty_iot_hub_azure(request, provider, setup_provider):
    """
    Polarion:
        assignee: anikifor
        casecomponent: Cloud
        caseimportance: low
        initialEstimate: 1/6h
        setup: prepare env
               create an IoT Hub in Azure (using free tier pricing is good enough):
               $ az iot hub create --name rmanes-iothub --resource-group iot_rg
        testSteps:
            1. refresh azure provider
        expectedResults:
            1. no errors found in logs
    Bugzilla:
        1495318
    """
    result = LogValidator("/var/www/miq/vmdb/log/evm.log",
                          failure_patterns=[r".*ERROR.*"])
    result.start_monitoring()
    azure = provider.mgmt
    if not azure.has_iothub():
        iothub_name = f"potatoiothub_{fauxfactory.gen_alpha()}"
        azure.create_iothub(iothub_name)
        request.addfinalizer(lambda: azure.delete_iothub(iothub_name))
        assert azure.has_iothub()
    provider.refresh_provider_relationships(wait=600)
    assert result.validate(wait="60s")
def test_retry_migration_plan(cancel_migration_plan):
    """
    Test to cancel migration and then retry migration
    Polarion:
        assignee: sshveta
        initialEstimate: 1/4h
        caseimportance: medium
        caseposneg: positive
        testtype: functional
        startsin: 5.10
        casecomponent: V2V

    Bugzilla:
        1755632
        1746592
    """
    migration_plan = cancel_migration_plan
    view = navigate_to(migration_plan, "Complete")
    # Retry Migration
    view.plans_completed_list.migrate_plan(migration_plan.name)
    assert migration_plan.wait_for_state("Started")

    # Automating BZ 1755632
    retry_interval_log = LogValidator(
        '/var/www/miq/vmdb/log/evm.log',
        matched_patterns=[r'.*to Automate for delivery in \[60\] seconds.*']
    )
    retry_interval_log.start_monitoring()
    # search logs and wait for validation
    assert (retry_interval_log.validate(wait="150s"))

    assert migration_plan.wait_for_state("In_Progress")
    assert migration_plan.wait_for_state("Completed")
    assert migration_plan.wait_for_state("Successful")
def test_appliance_console_restore_db_ha(request, unconfigured_appliances,
                                         app_creds):
    """Configure HA environment with providers, run backup/restore on configuration,
    Confirm that ha failover continues to work correctly and providers still exist.

    Polarion:
        assignee: jhenner
        caseimportance: high
        casecomponent: Appliance
        initialEstimate: 1/4h
    Bugzilla:
        1740515
    """
    pwd = app_creds["password"]
    appl1, appl2, appl3 = configure_appliances_ha(unconfigured_appliances, pwd)

    # Add infra/cloud providers and create db backup
    provider_app_crud(VMwareProvider, appl3).setup()
    provider_app_crud(EC2Provider, appl3).setup()
    appl1.db.backup()

    providers_before_restore = set(appl3.managed_provider_names)
    # Restore DB on the second appliance
    appl3.evmserverd.stop()
    appl1.rh_postgresql95_repmgr.stop()
    appl2.rh_postgresql95_repmgr.stop()
    appl1.db.drop()
    appl1.db.create()
    fetch_v2key(appl3, appl1)
    restore_db(appl1)

    appl1.appliance_console.reconfigure_primary_replication_node(pwd)
    appl2.appliance_console.reconfigure_standby_replication_node(
        pwd, appl1.hostname)

    appl3.appliance_console.configure_automatic_failover(
        primary_ip=appl1.hostname)
    appl3.evm_failover_monitor.restart()

    appl3.evmserverd.start()
    appl3.wait_for_web_ui()
    # Assert providers still exist after restore
    assert providers_before_restore == set(appl3.managed_provider_names), (
        'Restored DB is missing some providers')

    with LogValidator(evm_log,
                      matched_patterns=['Starting to execute failover'],
                      hostname=appl3.hostname).waiting(timeout=450):
        # Cause failover to occur
        appl1.db_service.stop()

    appl3.evmserverd.wait_for_running()
    appl3.wait_for_web_ui()
    # Assert providers still exist after ha failover
    assert providers_before_restore == set(appl3.managed_provider_names), (
        'Restored DB is missing some providers')
    # Verify that existing provider can detect new VMs after restore/failover
    virtual_crud = provider_app_crud(VMwareProvider, appl3)
    vm = provision_vm(request, virtual_crud)
    assert vm.mgmt.is_running, "vm not running"
示例#7
0
def test_service_provisioning_email(request, appliance, catalog_item):
    """
    Polarion:
        assignee: nansari
        casecomponent: Services
        caseposneg: negative
        initialEstimate: 1/4h

    Bugzilla:
        1668004
    """
    result = LogValidator("/var/www/miq/vmdb/log/automation.log",
                          failure_patterns=[".*Error during substitution.*"])
    result.start_monitoring()
    service_catalogs = ServiceCatalogs(appliance, catalog_item.catalog,
                                       catalog_item.name)
    service_catalogs.order()
    request_description = (
        "Provisioning Service [{catalog_item_name}] from [{catalog_item_name}]"
        .format(catalog_item_name=catalog_item.name))
    provision_request = appliance.collections.requests.instantiate(
        request_description)
    provision_request.wait_for_request(method='ui')
    request.addfinalizer(provision_request.remove_request)
    assert result.validate(wait="60s")
def test_action_power_on_audit(request, vm, vm_off, policy_for_testing):
    """ This test tests action 'Generate Audit Event'.

    This test sets the policy that it logs powering on of the VM. Then it powers up the vm and
    checks whether audit logs contain message about that.

    Metadata:
        test_flag: actions, provision

    Polarion:
        assignee: dgaikwad
        initialEstimate: 1/6h
        casecomponent: Control
    """
    policy_result = LogValidator(
        "/var/www/miq/vmdb/log/audit.log",
        matched_patterns=[
            r'.*policy: \[{}\], event: \[VM Power On\]'.format(
                policy_for_testing.description)
        ])
    policy_result.start_monitoring()
    # Set up the policy and prepare finalizer
    policy_for_testing.assign_actions_to_event("VM Power On",
                                               ["Generate Audit Event"])

    @request.addfinalizer
    def _cleanup():
        policy_for_testing.unassign_events("VM Power On")

    # Start the VM
    vm.mgmt.ensure_state(VmState.RUNNING)

    # Search the logs and wait for validation
    assert policy_result.validate("180s")
示例#9
0
def test_attribute_override(appliance, request, provider, setup_provider,
                            buttongroup):
    """ Test custom button attribute override

    Polarion:
        assignee: ndhandre
        initialEstimate: 1/4h
        caseimportance: medium
        caseposneg: positive
        testtype: nonfunctional
        startsin: 5.9
        casecomponent: CustomButton
        tags: custom_button
        testSteps:
            1. create a custom button to request the call_instance_with_message
            2. set the message to create
            3. set the attributes instance, class, namespace to "whatever"
            4. set the attribute message to "my_message"
            5. save it

    Bugzilla:
        1651099
    """
    attributes = [
        ("class", "Request"),
        ("instance", "TestNotification"),
        ("message", "digitronik_msg"),
        ("namespace", "/System"),
    ]
    req = "call_instance_with_message"
    patterns = [
        "[miqaedb:/System/Request/TestNotification#create]",
        "[miqaedb:/System/Request/TestNotification#digitronik_msg]"
    ]

    group = buttongroup("PROVIDER")
    button = group.buttons.create(
        text="btn_{}".format(fauxfactory.gen_alphanumeric(3)),
        hover="hover_{}".format(fauxfactory.gen_alphanumeric(3)),
        system="Request",
        request=req,
        attributes=attributes,
    )
    request.addfinalizer(button.delete_if_exists)

    # Initialize Log Checks
    log = LogValidator("/var/www/miq/vmdb/log/automation.log",
                       matched_patterns=patterns)
    log.start_monitoring()

    # Execute button
    view = navigate_to(provider, "Details")
    custom_button_group = Dropdown(view, group.hover)
    custom_button_group.item_select(button.text)

    # Simulate button
    button.simulate(provider.name, request=req)

    # validate log requests for simulation and actual execution
    log.validate(wait="120s")
示例#10
0
def test_automate_method_with_dialog(request, appliance, catalog,
                                     setup_dynamic_dialog):
    """
    Polarion:
        assignee: dgaikwad
        initialEstimate: 1/15h
        caseimportance: medium
        caseposneg: positive
        testtype: functional
        startsin: 5.7
        casecomponent: Automate
        tags: automate
    """
    catalog_item = appliance.collections.catalog_items.create(
        appliance.collections.catalog_items.GENERIC,
        name=fauxfactory.gen_alphanumeric(),
        description="my catalog",
        display_in=True,
        catalog=catalog,
        dialog=setup_dynamic_dialog.label)
    request.addfinalizer(catalog_item.delete_if_exists)
    with LogValidator("/var/www/miq/vmdb/log/automation.log",
                      matched_patterns=[".*Hello World.*"
                                        ]).waiting(timeout=120):
        service_catalogs = ServiceCatalogs(appliance,
                                           catalog=catalog_item.catalog,
                                           name=catalog_item.name)
        provision_request = service_catalogs.order()
        provision_request.wait_for_request()
        request.addfinalizer(provision_request.remove_request)
示例#11
0
def test_error_message_azure(order_stack):
    """
    Starting with 5.8, error messages generated by azure when provisioning
    from orchestration template will be included in the Last Message
    field.  Users will no longer have to drill down to Stack/Resources to
    figure out the error.
    This is currently working correctly as of 5.8.0.12

    Bugzilla:
        1410794

    Polarion:
        assignee: anikifor
        casecomponent: Cloud
        caseimportance: medium
        initialEstimate: 1/4h
        setup: Easiest way to do this is provision an azure vm from orchestration
               catalog item and just add a short password like "test".  This will
               fail on the azure side and the error will be displayed in the request
               details.
        startsin: 5.8
    """
    msg = "Orchestration stack deployment error: The supplied password must be"
    with LogValidator('/var/www/miq/vmdb/log/evm.log',
                      matched_patterns=[msg],
                      ).waiting(timeout=450):
        provision_request = order_stack
        provision_request.wait_for_request(method='ui')
        assert not provision_request.is_succeeded()
示例#12
0
def test_read_dialog_timeout_ec2_stack(order_stack):
    """
    Bugzilla:
        1698439
    Polarion:
        assignee: nansari
        startsin: 5.10
        casecomponent: Services
        initialEstimate: 1/6h
        testSteps:
            1. create an aws template with an optional value "timeout"
            2. create a dialog that will offer an option to overwrite "timeout"
               with a custom value typed at input
            3. Navigate to order page of service
            4. provision using a non-zero value in timeout
        expectedResults:
            1.
            2.
            3.
            4. the value input should be passed
    """
    msg = "<AEMethod groupsequencecheck>.*dialog_stack_timeout: 20"
    with LogValidator('/var/www/miq/vmdb/log/evm.log',
                      matched_patterns=[msg],
                      ).waiting(timeout=450):
        provision_request = order_stack
        provision_request.wait_for_request(method='ui')
        provision_request.is_succeeded()
def test_dialog_dynamic_field_refresh_in_log(appliance, import_datastore, import_data,
                                             generic_catalog_item_with_imported_dialog):
    """
    Bugzilla:
        1559999

    Polarion:
        assignee: nansari
        startsin: 5.10
        casecomponent: Services
        initialEstimate: 1/16h
        setup:
            1. Import Datastore and dialog
        testSteps:
            1. Add service catalog with above imported dialog
            2. Navigate to order page of service
            3. In service Order page
            4. Click on "refresh" for field 2
        expectedResults:
            1.
            2.
            3.
            4. Only text Field2 should be refreshed in automation log
    """
    catalog_item, sd, ele_label = generic_catalog_item_with_imported_dialog

    service_catalogs = ServiceCatalogs(appliance, catalog_item.catalog, catalog_item.name)
    view = navigate_to(service_catalogs, "Order")
    with LogValidator(
            "/var/www/miq/vmdb/log/automation.log",
            matched_patterns=['.*Refreshing field : RefreshField2.*'],
            failure_patterns=[".*Refreshing field : RefreshField1.*"]).waiting(timeout=120):
        view.fields('Refresh2').refresh.click()
示例#14
0
def test_provider_details_page_refresh_after_clear_cookies(
        appliance, request, setup_provider, provider):
    """
    Bugzilla:
        1642948
    Polarion:
        assignee: pvala
        casecomponent: WebUI
        caseimportance: medium
        initialEstimate: 1/12h
        testSteps:
            1. Navigate to a provider's Details page
            2. Reboot the appliance
            3. Click a button or refresh the page or do something on the page and see what happens.
        expectedResults:
            1.
            2.
            3. You'll be redirected to the Login Page.
    """
    view = navigate_to(provider, "Details")
    appliance.reboot()

    # When the test runs a second time for cloud provider, it raises an error,
    # this finalizer is workaround for it.
    request.addfinalizer(lambda: navigate_to(appliance.server, "LoggedIn"))

    with LogValidator("/var/www/miq/vmdb/log/production.log",
                      failure_patterns=[r".*FATAL.*"]).waiting():
        view.browser.refresh()

    login_view = appliance.server.create_view(LoginPage, wait="40s")
    assert login_view.is_displayed
def test_clicking_created_catalog_item_in_the_list(appliance,
                                                   generic_catalog_item):
    """
    Bugzilla:
        1702343

    Polarion:
        assignee: nansari
        startsin: 5.10
        casecomponent: Services
        initialEstimate: 1/6h
        testSteps:
            1. Go to Services > Catalogs > Catalog Items accordion
            2. Configuration > Add a New Catalog Item, choose some Catalog Item type
            3. Fill in the required info and click on Add button
            4. After successfully saving the Catalog Item, click on the same Catalog Item in list
        expectedResults:
            1.
            2.
            3.
            4. Catalog Item's summary screen should appear
    """
    with LogValidator("/var/www/miq/vmdb/log/evm.log",
                      failure_patterns=[".*ERROR.*"]).waiting(timeout=120):
        view = navigate_to(appliance.collections.catalog_items, "All")
        for cat_item in view.table:
            if cat_item[2].text == generic_catalog_item.name:
                cat_item[2].click()
                break
        assert view.title.text == f'Service Catalog Item "{generic_catalog_item.name}"'
示例#16
0
def test_dynamic_dialog_field_to_static_field(
        appliance, import_datastore,
        generic_catalog_item_with_imported_dialog):
    """
    Bugzilla:
        1614436
    Polarion:
        assignee: nansari
        casecomponent: Services
        testtype: functional
        initialEstimate: 1/4h
        startsin: 5.10
    """
    auto_log = '/var/www/miq/vmdb/log/automation.log'
    catalog_item, sd, ele_label = generic_catalog_item_with_imported_dialog
    service_catalogs = ServiceCatalogs(appliance, catalog_item.catalog,
                                       catalog_item.name)

    navigate_to(sd, "Edit")
    # update dynamic field to static
    view = appliance.browser.create_view(EditElementView)
    view.element.edit_element(ele_label)
    view.element_information.dynamic_chkbox.fill(False)
    view.ele_save_button.click()
    view.save_button.click()

    # Text area field should not be loaded in automation log
    with LogValidator(auto_log, failure_patterns=["TEXT AREA REFRESH DIALOG"
                                                  ]).waiting(timeout=120):
        navigate_to(service_catalogs, "Order")
示例#17
0
def test_dynamic_dropdown_refresh_load(
        appliance, import_datastore, import_data,
        generic_catalog_item_with_imported_dialog, context):
    """
    Bugzilla:
        1576873
    Polarion:
        assignee: nansari
        startsin: 5.10
        casecomponent: Services
        initialEstimate: 1/16h
    """
    catalog_item, _, ele_label = generic_catalog_item_with_imported_dialog

    service_catalogs = ServiceCatalogs(appliance, catalog_item.catalog,
                                       catalog_item.name)

    with appliance.context.use(context):
        if context == ViaSSUI:
            view = ssui_nav(service_catalogs, "Details")
        else:
            view = navigate_to(service_catalogs, "Order")
        with LogValidator("/var/www/miq/vmdb/log/automation.log",
                          matched_patterns=['We are in B'],
                          failure_patterns=["We are in A"
                                            ]).waiting(timeout=120):
            view.fields(ele_label).dropdown.fill("b")
示例#18
0
def test_snapshot_crud(create_vm, provider):
    """Tests snapshot crud

    Metadata:
        test_flag: snapshot, provision

    Polarion:
        assignee: prichard
        casecomponent: Infra
        initialEstimate: 1/6h
    """
    result = LogValidator(
        "/var/www/miq/vmdb/log/evm.log",
        failure_patterns=[r".*ERROR.*"],
    )
    result.start_monitoring()
    # has_name is false if testing RHEVMProvider
    snapshot = new_snapshot(create_vm,
                            has_name=(not provider.one_of(RHEVMProvider)))
    snapshot.create()
    # check for the size as "read" check
    if provider.appliance.version >= "5.11" and provider.one_of(RHEVMProvider):
        assert snapshot.size
    snapshot.delete()
    provider.refresh_provider_relationships(wait=600)
    assert result.validate(wait="60s")
示例#19
0
def test_check_system_request_calls_depr_conf_mgmt(appliance, copy_instance):
    """
    Polarion:
        assignee: dgaikwad
        initialEstimate: 1/8h
        caseimportance: low
        caseposneg: positive
        testtype: functional
        startsin: 5.10
        casecomponent: Automate
        tags: automate
        setup:
            1. Copy /System/Request/ansible_tower_job instance to new domain
        testSteps:
            1. Run that instance(ansible_tower_job) using simulation
            2. See automation log
        expectedResults:
            1.
            2. The /System/Request/ansible_tower_job instance should call the newer
               "/AutomationManagement/AnsibleTower/Operations/StateMachines/Job/default" method

    Bugzilla:
        1615444
    """
    search = '/AutomationManagement/AnsibleTower/Operations/StateMachines/Job/default'
    result = LogValidator(
        "/var/www/miq/vmdb/log/automation.log", matched_patterns=[f".*{search}.*"]
    )
    result.start_monitoring()
    # Executing the automate instance - 'ansible_tower_job' using simulation
    simulate(
        appliance=appliance,
        request=copy_instance.name
    )
    assert result.validate(wait="60s")
示例#20
0
def test_configuration_dropdown_roles_by_server(appliance, request):
    """
    Polarion:
        assignee: pvala
        casecomponent: Configuration
        caseimportance: high
        initialEstimate: 1/15h
        testSteps:
            1. Navigate to Settings -> Configuration -> Diagnostics -> CFME Region ->
                Roles by Servers.
            2. Select a Role and check the `Configuration` dropdown in toolbar.
            3. Check the `Suspend Role` option.
            4. Click the `Suspend Role` option and suspend the role
                and monitor production.log for error -
                `Error caught: [ActiveRecord::RecordNotFound] Couldn't find MiqServer with 'id'=0`
        expectedResults:
            1.
            2. `Configuration` dropdown must be enabled/active.
            3. `Suspend Role` must be enabled.
            4. Role must be suspended and there must be no error in the logs.

    Bugzilla:
        1715466
        1455283
        1404280
        1734393
    """
    # 1
    view = navigate_to(appliance.server.zone.region, "RolesByServers")

    # 2
    view.rolesbyservers.tree.select_item("SmartState Analysis")
    assert view.rolesbyservers.configuration.is_displayed

    # 3
    assert view.rolesbyservers.configuration.item_enabled("Suspend Role")

    # 4
    log = LogValidator(
        "/var/www/miq/vmdb/log/production.log",
        failure_patterns=[
            ".*Error caught: .*ActiveRecord::RecordNotFound.* Couldn't find MiqServer with 'id'=.*"
        ],
    )

    log.start_monitoring()
    view.rolesbyservers.configuration.item_select("Suspend Role",
                                                  handle_alert=True)

    request.addfinalizer(lambda: view.rolesbyservers.configuration.item_select(
        "Start Role", handle_alert=True))

    view.flash.assert_message("Suspend successfully initiated")

    assert log.validate(wait="20s")

    if BZ(1734393, forced_streams=["5.10"]).blocks:
        view.rolesbyservers.tree.select_item("SmartState Analysis")
    assert "available" in view.rolesbyservers.tree.currently_selected_role
示例#21
0
def test_domain_lock_disabled(klass):
    """
    Polarion:
        assignee: ghubale
        casecomponent: Automate
        caseimportance: medium
        initialEstimate: 1/16h
        tags: automate
    """
    schema_field = fauxfactory.gen_alphanumeric()
    # Disable automate domain
    with update(klass.namespace.domain):
        klass.namespace.domain.enabled = False

    # Adding schema for executing automate method
    klass.schema.add_fields({'name': schema_field, 'type': 'Method', 'data_type': 'String'})

    # Adding automate method
    method = klass.methods.create(
        name=fauxfactory.gen_alphanumeric(),
        display_name=fauxfactory.gen_alphanumeric(),
        location='inline'
    )

    # Adding instance to call automate method
    instance = klass.instances.create(
        name=fauxfactory.gen_alphanumeric(),
        display_name=fauxfactory.gen_alphanumeric(),
        description=fauxfactory.gen_alphanumeric(),
        fields={schema_field: {'value': method.name}}
    )

    result = LogValidator(
        "/var/www/miq/vmdb/log/automation.log",
        matched_patterns=[r".*ERROR.*"],
    )
    result.start_monitoring()

    # Executing automate method using simulation
    simulate(
        appliance=klass.appliance,
        attributes_values={
            "namespace": klass.namespace.name,
            "class": klass.name,
            "instance": instance.name,
        },
        message="create",
        request="Call_Instance",
        execute_methods=True,
    )
    assert result.validate(wait="60s")

    klass.namespace.domain.lock()
    view = navigate_to(klass.namespace.domain, 'Details')
    assert 'Disabled' in view.title.text
    assert 'Locked' in view.title.text

    # Need to unlock the domain to perform teardown on domain, namespace, class
    klass.namespace.domain.unlock()
示例#22
0
def test_miq_password_decrypt(klass):
    """
    Polarion:
        assignee: ghubale
        casecomponent: Automate
        initialEstimate: 1/3h

    Bugzilla:
        1720432
    """
    # Ruby script for decrypting password
    script = (
        'require "manageiq-password"\n'
        'root_password = MiqPassword.encrypt("abc")\n'
        '$evm.log("info", "Root Password is #{root_password}")\n'
        'root_password_decrypted = MiqPassword.decrypt(root_password)\n'
        '$evm.log("info", "Decrypted password is #{root_password_decrypted}")')

    # Adding schema for executing method
    klass.schema.add_fields({
        'name': 'execute',
        'type': 'Method',
        'data_type': 'String'
    })

    # Adding automate method
    method = klass.methods.create(name=fauxfactory.gen_alphanumeric(),
                                  display_name=fauxfactory.gen_alphanumeric(),
                                  location='inline',
                                  script=script)

    # Adding instance to call automate method
    instance = klass.instances.create(
        name=fauxfactory.gen_alphanumeric(),
        display_name=fauxfactory.gen_alphanumeric(),
        description=fauxfactory.gen_alphanumeric(),
        fields={'execute': {
            'value': method.name
        }})

    result = LogValidator(
        "/var/www/miq/vmdb/log/automation.log",
        matched_patterns=[".*Decrypted password is abc.*"],
    )
    result.start_monitoring()

    # Executing method via simulation to check decrypted password
    simulate(
        appliance=klass.appliance,
        attributes_values={
            "namespace": klass.namespace.name,
            "class": klass.name,
            "instance": instance.name,
        },
        message="create",
        request="Call_Instance",
        execute_methods=True,
    )
    assert result.validate()
示例#23
0
def test_list_of_diff_vm_storages_via_rails(appliance, setup_provider,
                                            provider, testing_vm,
                                            custom_instance):
    """
    Bugzilla:
        1574444

    Polarion:
        assignee: dgaikwad
        initialEstimate: 1/8h
        caseimportance: medium
        caseposneg: positive
        testtype: functional
        startsin: 5.9
        casecomponent: Automate
        testSteps:
            1. vmware = $evm.vmdb('ems').find_by_name('vmware 6.5 (nested)') ;
            2. vm = vmware.vms.select { |v| v.name == 'dgaikwad-cfme510' }.first ;
            3. vm.storage
            4. vm.storages
        expectedResults:
            1.
            2.
            3. Returns only one storage
            4. Returns available storages
    """
    list_storages = dedent(
        f'vmware = $evm.vmdb("ems").find_by_name("{provider.name}")\n'
        'vm = vmware.vms.select {|v| v.name == '
        f'"{testing_vm.name}"'
        '}.first\n'
        'storage = vm.storage\n'
        'storage_name = storage.name\n'
        '$evm.log(:info, "storage name: #{storage_name}")\n'
        'storages = vm.storages\n'
        'storage_name = storages[0].name\n'
        '$evm.log(:info, "storages name: #{storage_name}")\n')
    instance = custom_instance(ruby_code=list_storages)

    with LogValidator(
            "/var/www/miq/vmdb/log/automation.log",
            matched_patterns=[
                f".*storage name: {testing_vm.datastore.name}.*",
                f".*storages name: {testing_vm.datastore.name}.*",
            ],
    ).waiting(timeout=120):

        # Executing automate method using simulation
        simulate(
            appliance=appliance,
            message="create",
            request="Call_Instance",
            execute_methods=True,
            attributes_values={
                "namespace": instance.klass.namespace.name,
                "class": instance.klass.name,
                "instance": instance.name,
            },
        )
示例#24
0
def test_upload_blank_file(appliance, upload_file):
    """
    Bugzilla:
        1720611

    Polarion:
        assignee: dgaikwad
        initialEstimate: 1/8h
        caseposneg: negative
        startsin: 5.10
        casecomponent: Automate
        testSteps:
            1. Create blank zip(test.zip) and yaml(test.yml) file
            2. Navigate to Automation > Automate > Import/Export and upload test.zip file
            3. Navigate to Automation > Automate > Customization > Import/Export and upload test.yml
        expectedResults:
            1.
            2. Error message should be displayed
            3. Error message should be displayed
    """
    # Download datastore file from FTP server
    fs = FTPClientWrapper(cfme_data.ftpserver.entities.datastores)
    file_path = fs.download(upload_file)

    if upload_file == "dialog_blank.yml":
        with LogValidator("/var/www/miq/vmdb/log/production.log",
                          failure_patterns=[".*FATAL.*"]).waiting(timeout=120):

            # Import dialog yml to appliance
            import_export = DialogImportExport(appliance)
            view = navigate_to(import_export, "DialogImportExport")
            view.upload_file.fill(file_path)
            view.upload.click()
            view.flash.assert_message('Error: the uploaded file is blank')
    else:
        # Import datastore file to appliance
        datastore = appliance.collections.automate_import_exports.instantiate(
            import_type="file", file_path=file_path)
        view = navigate_to(appliance.collections.automate_import_exports,
                           "All")
        with LogValidator("/var/www/miq/vmdb/log/production.log",
                          failure_patterns=[".*FATAL.*"]).waiting(timeout=120):
            view.import_file.upload_file.fill(datastore.file_path)
            view.import_file.upload.click()
            view.flash.assert_message(
                "Error: import processing failed: domain: *")
示例#25
0
def test_update_ha(ha_appliances_with_providers, appliance, update_strategy,
                   request, old_version):
    """ Tests updating an appliance with providers using webui, also confirms that the
            provisioning continues to function correctly after the update has completed

    Polarion:
        assignee: jhenner
        caseimportance: high
        casecomponent: Appliance
        initialEstimate: 1/4h

    Bugzilla:
        1704835
    """
    evm_log = '/var/www/miq/vmdb/log/evm.log'
    update_strategy(ha_appliances_with_providers[2])
    wait_for(do_appliance_versions_match,
             func_args=(appliance, ha_appliances_with_providers[2]),
             num_sec=900,
             delay=20,
             handle_exception=True,
             message='Waiting for appliance to update')

    with LogValidator(
            evm_log,
            matched_patterns=['Starting database failover monitor'],
            hostname=ha_appliances_with_providers[2].hostname).waiting(
                wait=60):
        ha_appliances_with_providers[2].evm_failover_monitor.restart()
        assert ha_appliances_with_providers[2].evm_failover_monitor.running

    with LogValidator(
            evm_log,
            matched_patterns=['Starting to execute failover'],
            hostname=ha_appliances_with_providers[2].hostname).waiting(
                wait=450):
        # Cause failover to occur
        ha_appliances_with_providers[0].db_service.stop()

    ha_appliances_with_providers[2].evmserverd.wait_for_running()
    ha_appliances_with_providers[2].wait_for_miq_ready()
    # Verify that existing provider can detect new VMs
    virtual_crud = provider_app_crud(VMwareProvider,
                                     ha_appliances_with_providers[2])
    vm = provision_vm(request, virtual_crud)
    assert vm.provider.mgmt.does_vm_exist(vm.name), "vm not provisioned"
示例#26
0
def test_quota_exceed_mail_with_more_info_link(configure_mail, appliance,
                                               provider, set_roottenant_quota,
                                               custom_prov_data, prov_data,
                                               extra_msg, vm_name,
                                               template_name):
    """
    Bugzilla:
        1579031
        1759123

    Polarion:
        assignee: ghubale
        initialEstimate: 1/12h
        caseimportance: high
        caseposneg: positive
        testtype: functional
        startsin: 5.9
        casecomponent: Infra
        tags: quota
        setup:
            1. Copy instance ManageIQ/Configuration/Email/Default to custom domain
            2. Enter values for fields: approver, default_recipient, from and signature
        testSteps:
            1. Provide valid mail address while provisioning Vm to exceed quota
        expectedResults:
            1. Quota exceed mail should be sent
    """
    approver, default_recipient, from_user = configure_mail
    mail_to = fauxfactory.gen_email()
    prov_data.update(custom_prov_data)
    prov_data['catalog']['vm_name'] = vm_name

    with LogValidator(
            "/var/www/miq/vmdb/log/automation.log",
            matched_patterns=
        [
            f'"to"=>"{default_recipient}", "from"=>"{from_user}".*.Virtual Machine Request from '
            f'{mail_to} was Denied."',
            f'"to"=>"{mail_to}", "from"=>"{from_user}".*.Your Virtual Machine Request was Approved,'
            f' pending Quota Validation.".*'
        ],
    ).waiting(timeout=120):

        do_vm_provisioning(appliance,
                           template_name=template_name,
                           provider=provider,
                           vm_name=vm_name,
                           provisioning_data=prov_data,
                           wait=False,
                           request=None,
                           email=mail_to)

        # nav to requests page to check quota validation
        request_description = f'Provision from [{template_name}] to [{vm_name}{extra_msg}]'
        provision_request = appliance.collections.requests.instantiate(
            request_description)
        provision_request.wait_for_request(method='ui')
        assert provision_request.row.reason.text == "Quota Exceeded"
示例#27
0
def test_service_ansible_verbosity(
    appliance,
    request,
    local_ansible_catalog_item,
    ansible_service_catalog,
    ansible_service_request,
    ansible_service,
    verbosity,
):
    """Check if the different Verbosity levels can be applied to service and
    monitor the std out
    Bugzilla:
        1460788
    Polarion:
        assignee: sbulage
        casecomponent: Ansible
        caseimportance: medium
        initialEstimate: 1/6h
        tags: ansible_embed
    """
    # Adding index 0 which will give pattern for e.g. pattern = "verbosity"=>0.
    pattern = '"verbosity"=>{}'.format(verbosity[0])
    with update(local_ansible_catalog_item):
        local_ansible_catalog_item.provisioning = {"verbosity": verbosity}
        local_ansible_catalog_item.retirement = {"verbosity": verbosity}
    # Log Validator
    log = LogValidator("/var/www/miq/vmdb/log/evm.log",
                       matched_patterns=[pattern])
    # Start Log check or given pattern
    log.start_monitoring()

    @request.addfinalizer
    def _revert():
        service = MyService(appliance, local_ansible_catalog_item.name)
        if ansible_service_request.exists():
            ansible_service_request.wait_for_request()
            appliance.rest_api.collections.service_requests.action.delete(
                id=service_request.id)
        if service.exists:
            service.delete()

    ansible_service_catalog.order()
    ansible_service_request.wait_for_request()
    # 'request_descr' and 'service_request' being used in finalizer to remove
    # first service request
    request_descr = (
        f"Provisioning Service [{local_ansible_catalog_item.name}] "
        f"from [{local_ansible_catalog_item.name}]")
    service_request = appliance.rest_api.collections.service_requests.get(
        description=request_descr)
    # Searching string '"verbosity"=>0' (example) in evm.log as Standard Output
    # is being logging in evm.log
    assert log.validate(wait="60s")
    logger.info("Pattern found {}".format(log.matched_patterns))

    view = navigate_to(ansible_service, "Details")
    assert verbosity[0] == view.provisioning.details.get_text_of("Verbosity")
    assert verbosity[0] == view.retirement.details.get_text_of("Verbosity")
示例#28
0
def test_verify_purging_of_old_records(request, appliance):
    """
    Verify that tables are being purged regularly.

    Bugzilla:
        1348625

    Polarion:
        assignee: tpapaioa
        casecomponent: Appliance
        initialEstimate: 1/6h
        startsin: 5.8
    """
    old_settings = appliance.advanced_settings

    @request.addfinalizer
    def _restore_advanced_settings():
        _update_advanced_settings_restart(appliance, old_settings)

    purge_settings = {
        'container_entities_purge_interval': '5.minutes',
        'binary_blob_purge_interval': '5.minutes',
        'compliance_purge_interval': '5.minutes',
        'drift_state_purge_interval': '5.minutes',
        'event_streams_purge_interval': '5.minutes',
        'notifications_purge_interval': '5.minutes',
        'performance_realtime_purging_interval': '5.minutes',
        'performance_rollup_purging_interval': '5.minutes',
        'policy_events_purge_interval': '5.minutes',
        'report_result_purge_interval': '5.minutes',
        'task_purge_interval': '5.minutes',
        'vim_performance_states_purge_interval': '5.minutes'
    }

    new_settings = {
        'workers': {
            'worker_base': {
                'schedule_worker': purge_settings
            }
        }
    }

    obj_types = ('Binary blobs', 'Compliances', 'Container groups',
                 'Container images', 'Container nodes', 'Container projects',
                 'Container quota items', 'Container quotas', 'Containers',
                 'all daily metrics', 'Drift states', 'Event streams',
                 'all hourly metrics', 'Miq report results', 'Miq tasks',
                 'Notifications', 'orphans in Vim performance states',
                 'Policy events')

    matched_patterns = [f"Purging {obj_type}" for obj_type in obj_types]

    with LogValidator(
            '/var/www/miq/vmdb/log/evm.log',
            matched_patterns=matched_patterns,
    ).waiting(wait=600, delay=30):
        _update_advanced_settings_restart(appliance, new_settings)
示例#29
0
def waiting_for_ha_monitor_started(appl, standby_server_ip, timeout):
    if appl.version < '5.10':
        with LogValidator("/var/www/miq/vmdb/config/failover_databases.yml",
                          matched_patterns=[standby_server_ip],
                          hostname=appl.hostname).waiting(timeout=timeout):
            yield
    else:
        yield
        wait_for(lambda: appl.evm_failover_monitor.running, timeout=300)
def test_appliance_console_external_auth_all(app_creds, ipa_crud,
                                             configured_appliance):
    """ Commands:
    1. 'ap' launches appliance_console,
    2. '' clears info screen,
    3. '12/15' change ext auth options,
    4. 'auth_type' auth type to change,
    5. '4' apply changes.

    Polarion:
        assignee: sbulage
        caseimportance: high
        initialEstimate: 1/4h
    """

    evm_tail = LogValidator('/var/www/miq/vmdb/log/evm.log',
                            matched_patterns=[
                                '.*sso_enabled to true.*',
                                '.*saml_enabled to true.*',
                                '.*local_login_disabled to true.*'
                            ],
                            hostname=configured_appliance.hostname,
                            username=app_creds['sshlogin'],
                            password=app_creds['password'])
    evm_tail.fix_before_start()
    command_set = ('ap', '', '11', '1', '2', '3', '4')
    configured_appliance.appliance_console.run_commands(command_set)
    evm_tail.validate_logs()

    evm_tail = LogValidator('/var/www/miq/vmdb/log/evm.log',
                            matched_patterns=[
                                '.*sso_enabled to false.*',
                                '.*saml_enabled to false.*',
                                '.*local_login_disabled to false.*'
                            ],
                            hostname=configured_appliance.hostname,
                            username=app_creds['sshlogin'],
                            password=app_creds['password'])

    evm_tail.fix_before_start()
    command_set = ('ap', '', '11', '1', '2', '3', '4')
    configured_appliance.appliance_console.run_commands(command_set)
    evm_tail.validate_logs()