def test_refresh_with_empty_iot_hub_azure(request, provider, setup_provider):
    """
    Polarion:
        assignee: anikifor
        casecomponent: Cloud
        caseimportance: low
        initialEstimate: 1/6h
        setup: prepare env
               create an IoT Hub in Azure (using free tier pricing is good enough):
               $ az iot hub create --name rmanes-iothub --resource-group iot_rg
        testSteps:
            1. refresh azure provider
        expectedResults:
            1. no errors found in logs
    Bugzilla:
        1495318
    """
    result = LogValidator("/var/www/miq/vmdb/log/evm.log",
                          failure_patterns=[r".*ERROR.*"])
    result.start_monitoring()
    azure = provider.mgmt
    if not azure.has_iothub():
        iothub_name = fauxfactory.gen_alpha(18, start="potatoiothub_")
        azure.create_iothub(iothub_name)
        request.addfinalizer(lambda: azure.delete_iothub(iothub_name))
        assert azure.has_iothub()
    provider.refresh_provider_relationships(wait=600)
    assert result.validate(wait="60s")
def test_retry_migration_plan(cancel_migration_plan):
    """
    Test to cancel migration and then retry migration
    Polarion:
        assignee: sshveta
        initialEstimate: 1/4h
        caseimportance: medium
        caseposneg: positive
        testtype: functional
        startsin: 5.10
        casecomponent: V2V

    Bugzilla:
        1755632
        1746592
    """
    migration_plan = cancel_migration_plan
    view = navigate_to(migration_plan, "Complete")
    # Retry Migration
    view.plans_completed_list.migrate_plan(migration_plan.name)
    assert migration_plan.wait_for_state("Started")

    # Automating BZ 1755632
    retry_interval_log = LogValidator(
        '/var/www/miq/vmdb/log/evm.log',
        matched_patterns=[r'.*to Automate for delivery in \[60\] seconds.*'])
    retry_interval_log.start_monitoring()
    # search logs and wait for validation
    assert (retry_interval_log.validate(wait="150s"))

    assert migration_plan.wait_for_state("In_Progress")
    assert migration_plan.wait_for_state("Completed")
    assert migration_plan.wait_for_state("Successful")
def test_appliance_console_external_auth_all(app_creds, ipa_crud, configured_appliance):
    """'ap' launches appliance_console, '' clears info screen, '12/15' change ext auth options,
    'auth_type' auth type to change, '4' apply changes."""

    evm_tail = LogValidator('/var/www/miq/vmdb/log/evm.log',
                            matched_patterns=['.*sso_enabled to true.*',
                                              '.*saml_enabled to true.*',
                                              '.*local_login_disabled to true.*'],
                            hostname=configured_appliance.hostname,
                            username=app_creds['sshlogin'],
                            password=app_creds['password'])
    evm_tail.fix_before_start()
    command_set = ('ap', '', '11', '1', '2', '3', '4')
    configured_appliance.appliance_console.run_commands(command_set)
    evm_tail.validate_logs()

    evm_tail = LogValidator('/var/www/miq/vmdb/log/evm.log',
                            matched_patterns=['.*sso_enabled to false.*',
                                              '.*saml_enabled to false.*',
                                              '.*local_login_disabled to false.*'],
                            hostname=configured_appliance.hostname,
                            username=app_creds['sshlogin'],
                            password=app_creds['password'])

    evm_tail.fix_before_start()
    command_set = ('ap', '', '11', '1', '2', '3', '4')
    configured_appliance.appliance_console.run_commands(command_set)
    evm_tail.validate_logs()
def test_black_console_external_auth(auth_type, app_creds, ipa_crud):
    """'ap' launches appliance_console, '' clears info screen, '12/15' change ext auth options,
    'auth_type' auth type to change, '4' apply changes."""

    evm_tail = LogValidator('/var/www/miq/vmdb/log/evm.log',
                            matched_patterns=['.*{} to true.*'.format(auth_type.option)],
                            hostname=ipa_crud.hostname,
                            username=app_creds['sshlogin'],
                            password=app_creds['password'])
    evm_tail.fix_before_start()
    opt = '12' if ipa_crud.version >= "5.8" else '15'
    command_set = ('ap', '', opt, auth_type.index, '4')
    ipa_crud.appliance_console.run_commands(command_set)
    evm_tail.validate_logs()

    evm_tail = LogValidator('/var/www/miq/vmdb/log/evm.log',
                            matched_patterns=['.*{} to false.*'.format(auth_type.option)],
                            hostname=ipa_crud.hostname,
                            username=app_creds['sshlogin'],
                            password=app_creds['password'])

    evm_tail.fix_before_start()
    opt = '12' if ipa_crud.version >= "5.8" else '15'
    command_set = ('ap', '', opt, auth_type.index, '4')
    ipa_crud.appliance_console.run_commands(command_set)
    evm_tail.validate_logs()
Esempio n. 5
0
def test_appliance_console_cli_external_auth(auth_type, ipa_crud,
                                             configured_appliance):
    """
    Polarion:
        assignee: dgaikwad
        caseimportance: high
        casecomponent: Auth
        initialEstimate: 1/4h
    """
    evm_tail = LogValidator('/var/www/miq/vmdb/log/evm.log',
                            matched_patterns=[f'.*{auth_type} to true.*'],
                            hostname=configured_appliance.hostname)
    evm_tail.start_monitoring()
    cmd_set = f'appliance_console_cli --extauth-opts="/authentication/{auth_type}=true"'
    assert configured_appliance.ssh_client.run_command(cmd_set)
    assert evm_tail.validate(wait="30s")

    evm_tail = LogValidator('/var/www/miq/vmdb/log/evm.log',
                            matched_patterns=[f'.*{auth_type} to false.*'],
                            hostname=configured_appliance.hostname)

    evm_tail.start_monitoring()
    cmd_unset = f'appliance_console_cli --extauth-opts="/authentication/{auth_type}=false"'
    assert configured_appliance.ssh_client.run_command(cmd_unset)
    assert evm_tail.validate(wait="30s")
def log_monitor(user_obj, temp_appliance_preconfig_long):
    """Search evm.log for any plaintext password"""
    result = LogValidator("/var/www/miq/vmdb/log/evm.log",
                          failure_patterns=[f"{user_obj.credential.secret}"],
                          hostname=temp_appliance_preconfig_long.hostname)
    result.start_monitoring()
    yield result
Esempio n. 7
0
def test_load_service_dialog(appliance, import_datastore,
                             generic_catalog_item_with_imported_dialog):
    """
    Bugzilla:
        1595776

    Polarion:
        assignee: nansari
        startsin: 5.10
        casecomponent: Services
        initialEstimate: 1/16h
    """
    auto_log = '/var/www/miq/vmdb/log/automation.log'
    catalog_item, _, _ = generic_catalog_item_with_imported_dialog
    service_catalogs = ServiceCatalogs(appliance, catalog_item.catalog, catalog_item.name)

    with LogValidator(auto_log, matched_patterns=["Service dialog load - Begin"]
                      ).waiting(timeout=120):
        view = navigate_to(service_catalogs, "Order")

    with LogValidator(auto_log, failure_patterns=["Service dialog load - Begin"]
                      ).waiting(timeout=120):
        view.submit_button.click()
        description = f'Provisioning Service [{catalog_item.name}] from [{catalog_item.name}]'
        provision_request = appliance.collections.requests.instantiate(description)
        provision_request.wait_for_request(method='ui')
def test_appliance_console_external_auth(auth_type, ipa_crud, configured_appliance):
    """ Commands:
    1. 'ap' launches appliance_console,
    2. RETURN clears info screen,
    3. '13' change ext auth options,
    4. 'auth_type' auth type to change,
    5. '4' apply changes.

    Polarion:
        assignee: mnadeem
        caseimportance: high
        casecomponent: Auth
        initialEstimate: 1/4h
    """
    # TODO this depends on the auth_type options being disabled when the test is run
    # TODO it assumes that first switch is to true, then false.

    evm_tail = LogValidator('/var/www/miq/vmdb/log/evm.log',
                            matched_patterns=['.*{} to true.*'.format(auth_type.option)],
                            hostname=configured_appliance.hostname)
    evm_tail.fix_before_start()
    command_set = ('ap', RETURN, '13', auth_type.index, '5', RETURN, RETURN)
    configured_appliance.appliance_console.run_commands(command_set, timeout=30)
    evm_tail.validate_logs()

    evm_tail = LogValidator('/var/www/miq/vmdb/log/evm.log',
                            matched_patterns=['.*{} to false.*'.format(auth_type.option)],
                            hostname=configured_appliance.hostname)

    evm_tail.fix_before_start()
    command_set = ('ap', RETURN, '13', auth_type.index, '5', RETURN, RETURN)
    configured_appliance.appliance_console.run_commands(command_set, timeout=30)
    evm_tail.validate_logs()
Esempio n. 9
0
def test_appliance_console_cli_external_auth(auth_type, ipa_crud, app_creds, configured_appliance):
    """
    Polarion:
        assignee: sbulage
        caseimportance: high
        initialEstimate: 1/4h
    """
    evm_tail = LogValidator('/var/www/miq/vmdb/log/evm.log',
                            matched_patterns=['.*{} to true.*'.format(auth_type)],
                            hostname=configured_appliance.hostname,
                            username=app_creds['sshlogin'],
                            password=app_creds['sshpass'])
    evm_tail.fix_before_start()
    cmd_set = 'appliance_console_cli --extauth-opts="/authentication/{}=true"'.format(auth_type)
    assert configured_appliance.ssh_client.run_command(cmd_set)
    evm_tail.validate_logs()

    evm_tail = LogValidator('/var/www/miq/vmdb/log/evm.log',
                            matched_patterns=['.*{} to false.*'.format(auth_type)],
                            hostname=configured_appliance.hostname,
                            username=app_creds['sshlogin'],
                            password=app_creds['sshpass'])

    evm_tail.fix_before_start()
    cmd_unset = 'appliance_console_cli --extauth-opts="/authentication/{}=false"'.format(auth_type)
    assert configured_appliance.ssh_client.run_command(cmd_unset)
    evm_tail.validate_logs()
Esempio n. 10
0
def test_service_provisioning_email(request, appliance, catalog_item):
    """
    Polarion:
        assignee: nansari
        casecomponent: Services
        caseposneg: negative
        initialEstimate: 1/4h

    Bugzilla:
        1668004
    """
    result = LogValidator("/var/www/miq/vmdb/log/automation.log",
                          failure_patterns=[".*Error during substitution.*"])
    result.start_monitoring()
    service_catalogs = ServiceCatalogs(appliance, catalog_item.catalog,
                                       catalog_item.name)
    service_catalogs.order()
    request_description = (
        "Provisioning Service [{catalog_item_name}] from [{catalog_item_name}]"
        .format(catalog_item_name=catalog_item.name))
    provision_request = appliance.collections.requests.instantiate(
        request_description)
    provision_request.wait_for_request(method='ui')
    request.addfinalizer(provision_request.remove_request)
    assert result.validate(wait="60s")
Esempio n. 11
0
def middleware_evm_log_no_error():
    evm_tail = LogValidator('/var/www/miq/vmdb/log/evm.log',
                            skip_patterns=['.*ERROR.*API.*MIQ(Api::ApiController.api_error).*'],
                            failure_patterns=['.*ERROR.*'])
    evm_tail.fix_before_start()
    yield
    evm_tail.validate_logs()
Esempio n. 12
0
def test_check_system_request_calls_depr_conf_mgmt(appliance, copy_instance):
    """
    Polarion:
        assignee: ghubale
        initialEstimate: 1/8h
        caseimportance: low
        caseposneg: positive
        testtype: functional
        startsin: 5.10
        casecomponent: Automate
        tags: automate
        setup:
            1. Copy /System/Request/ansible_tower_job instance to new domain
        testSteps:
            1. Run that instance(ansible_tower_job) using simulation
            2. See automation log
        expectedResults:
            1.
            2. The /System/Request/ansible_tower_job instance should call the newer
               "/AutomationManagement/AnsibleTower/Operations/StateMachines/Job/default" method

    Bugzilla:
        1615444
    """
    search = '/AutomationManagement/AnsibleTower/Operations/StateMachines/Job/default'
    result = LogValidator(
        "/var/www/miq/vmdb/log/automation.log", matched_patterns=[".*{}.*".format(search)]
    )
    result.start_monitoring()
    # Executing the automate instance - 'ansible_tower_job' using simulation
    simulate(
        appliance=appliance,
        request=copy_instance.name
    )
    assert result.validate(wait="60s")
Esempio n. 13
0
def test_action_power_on_audit(request, vm, vm_off, policy_for_testing):
    """ This test tests action 'Generate Audit Event'.

    This test sets the policy that it logs powering on of the VM. Then it powers up the vm and
    checks whether audit logs contain message about that.

    Metadata:
        test_flag: actions, provision

    Polarion:
        assignee: dgaikwad
        initialEstimate: 1/6h
        casecomponent: Control
    """
    policy_result = LogValidator(
        "/var/www/miq/vmdb/log/audit.log",
        matched_patterns=[
            r'.*policy: \[{}\], event: \[VM Power On\]'.format(
                policy_for_testing.description
            )
        ]
    )
    policy_result.start_monitoring()
    # Set up the policy and prepare finalizer
    policy_for_testing.assign_actions_to_event("VM Power On", ["Generate Audit Event"])

    @request.addfinalizer
    def _cleanup():
        policy_for_testing.unassign_events("VM Power On")

    # Start the VM
    vm.mgmt.ensure_state(VmState.RUNNING)

    # Search the logs and wait for validation
    assert policy_result.validate("180s")
Esempio n. 14
0
def test_delete_vm_on_provider_side(vm_test, provider):
    """ Delete VM on the provider side and refresh relationships in CFME

    Polarion:
        assignee: anikifor
        initialEstimate: 1/4h
        casecomponent: Infra

    Bugzilla:
        1592430
    """
    logs = LogValidator("/var/www/miq/vmdb/log/evm.log",
                        failure_patterns=[".*ERROR.*"])
    logs.start_monitoring()
    vm_test.cleanup_on_provider()
    provider.refresh_provider_relationships()
    try:
        wait_for(provider.is_refreshed,
                 func_kwargs={'refresh_delta': 10},
                 timeout=600)
    except TimedOutError:
        pytest.fail(
            "Provider failed to refresh after VM was removed from the provider"
        )
    assert logs.validate(wait="60s")
Esempio n. 15
0
def test_action_power_on_audit(request, vm, vm_off, appliance, policy_for_testing):
    """ This test tests action 'Generate Audit Event'.

    This test sets the policy that it logs powering on of the VM. Then it powers up the vm and
    checks whether audit logs contain message about that.

    Metadata:
        test_flag: actions, provision

    Polarion:
        assignee: jdupuy
        initialEstimate: 1/6h
        casecomponent: Control
    """
    policy_result = LogValidator(
        "/var/www/miq/vmdb/log/audit.log",
        matched_patterns=[
            r'.*policy: \[{}\], event: \[VM Power On\]'.format(
                policy_for_testing.description
            )
        ]
    )
    policy_result.fix_before_start()
    # Set up the policy and prepare finalizer
    policy_for_testing.assign_actions_to_event("VM Power On", ["Generate Audit Event"])

    @request.addfinalizer
    def _cleanup():
        policy_for_testing.unassign_events("VM Power On")

    # Start the VM
    vm.mgmt.ensure_state(VmState.RUNNING)

    # Search the logs and wait for validation
    policy_result.wait_for_log_validation()
def test_appliance_console_external_auth(auth_type, app_creds, ipa_crud, configured_appliance):
    """'ap' launches appliance_console, '' clears info screen, '11' change ext auth options,
    'auth_type' auth type to change, '4' apply changes."""
    # TODO this depends on the auth_type options being disabled when the test is run
    # TODO it assumes that first switch is to true, then false.

    evm_tail = LogValidator('/var/www/miq/vmdb/log/evm.log',
                            matched_patterns=['.*{} to true.*'.format(auth_type.option)],
                            hostname=configured_appliance.hostname,
                            username=app_creds['sshlogin'],
                            password=app_creds['sshpass'])
    evm_tail.fix_before_start()
    command_set = ('ap', '', '11', auth_type.index, '4')
    configured_appliance.appliance_console.run_commands(command_set)
    evm_tail.validate_logs()

    evm_tail = LogValidator('/var/www/miq/vmdb/log/evm.log',
                            matched_patterns=['.*{} to false.*'.format(auth_type.option)],
                            hostname=configured_appliance.hostname,
                            username=app_creds['sshlogin'],
                            password=app_creds['sshpass'])

    evm_tail.fix_before_start()
    command_set = ('ap', '', '11', auth_type.index, '4')
    configured_appliance.appliance_console.run_commands(command_set)
    evm_tail.validate_logs()
Esempio n. 17
0
def test_snapshot_crud(create_vm, provider):
    """Tests snapshot crud

    Metadata:
        test_flag: snapshot, provision

    Polarion:
        assignee: prichard
        casecomponent: Infra
        initialEstimate: 1/6h
    """
    result = LogValidator(
        "/var/www/miq/vmdb/log/evm.log",
        failure_patterns=[r".*ERROR.*"],
    )
    result.start_monitoring()
    # has_name is false if testing RHEVMProvider
    snapshot = new_snapshot(create_vm,
                            has_name=(not provider.one_of(RHEVMProvider)))
    snapshot.create()
    # check for the size as "read" check
    if provider.appliance.version >= "5.11" and provider.one_of(RHEVMProvider):
        assert snapshot.size
    snapshot.delete()
    provider.refresh_provider_relationships(wait=600)
    assert result.validate(wait="60s")
Esempio n. 18
0
def test_miq_password_decrypt(klass):
    """
    Polarion:
        assignee: ghubale
        casecomponent: Automate
        initialEstimate: 1/3h

    Bugzilla:
        1720432
    """
    # Ruby script for decrypting password
    script = (
        'require "manageiq-password"\n'
        'root_password = MiqPassword.encrypt("abc")\n'
        '$evm.log("info", "Root Password is #{root_password}")\n'
        'root_password_decrypted = MiqPassword.decrypt(root_password)\n'
        '$evm.log("info", "Decrypted password is #{root_password_decrypted}")')

    # Adding schema for executing method
    klass.schema.add_fields({
        'name': 'execute',
        'type': 'Method',
        'data_type': 'String'
    })

    # Adding automate method
    method = klass.methods.create(name=fauxfactory.gen_alphanumeric(),
                                  display_name=fauxfactory.gen_alphanumeric(),
                                  location='inline',
                                  script=script)

    # Adding instance to call automate method
    instance = klass.instances.create(
        name=fauxfactory.gen_alphanumeric(),
        display_name=fauxfactory.gen_alphanumeric(),
        description=fauxfactory.gen_alphanumeric(),
        fields={'execute': {
            'value': method.name
        }})

    result = LogValidator(
        "/var/www/miq/vmdb/log/automation.log",
        matched_patterns=[".*Decrypted password is abc.*"],
    )
    result.start_monitoring()

    # Executing method via simulation to check decrypted password
    simulate(
        appliance=klass.appliance,
        attributes_values={
            "namespace": klass.namespace.name,
            "class": klass.name,
            "instance": instance.name,
        },
        message="create",
        request="Call_Instance",
        execute_methods=True,
    )
    assert result.validate()
Esempio n. 19
0
def test_configuration_dropdown_roles_by_server(appliance, request):
    """
    Polarion:
        assignee: pvala
        casecomponent: Configuration
        caseimportance: high
        initialEstimate: 1/15h
        testSteps:
            1. Navigate to Settings -> Configuration -> Diagnostics -> CFME Region ->
                Roles by Servers.
            2. Select a Role and check the `Configuration` dropdown in toolbar.
            3. Check the `Suspend Role` option.
            4. Click the `Suspend Role` option and suspend the role
                and monitor production.log for error -
                `Error caught: [ActiveRecord::RecordNotFound] Couldn't find MiqServer with 'id'=0`
        expectedResults:
            1.
            2. `Configuration` dropdown must be enabled/active.
            3. `Suspend Role` must be enabled.
            4. Role must be suspended and there must be no error in the logs.

    Bugzilla:
        1715466
        1455283
        1404280
        1734393
    """
    # 1
    view = navigate_to(appliance.server.zone.region, "RolesByServers")

    # 2
    view.rolesbyservers.tree.select_item("SmartState Analysis")
    assert view.rolesbyservers.configuration.is_displayed

    # 3
    assert view.rolesbyservers.configuration.item_enabled("Suspend Role")

    # 4
    log = LogValidator(
        "/var/www/miq/vmdb/log/production.log",
        failure_patterns=[
            ".*Error caught: .*ActiveRecord::RecordNotFound.* Couldn't find MiqServer with 'id'=.*"
        ],
    )

    log.start_monitoring()
    view.rolesbyservers.configuration.item_select("Suspend Role",
                                                  handle_alert=True)

    request.addfinalizer(lambda: view.rolesbyservers.configuration.item_select(
        "Start Role", handle_alert=True))

    view.flash.assert_message("Suspend successfully initiated")

    assert log.validate(wait="20s")

    if BZ(1734393, forced_streams=["5.10"]).blocks:
        view.rolesbyservers.tree.select_item("SmartState Analysis")
    assert "available" in view.rolesbyservers.tree.currently_selected_role
Esempio n. 20
0
def test_domain_lock_disabled(klass):
    """
    Polarion:
        assignee: ghubale
        casecomponent: Automate
        caseimportance: medium
        initialEstimate: 1/16h
        tags: automate
    """
    schema_field = fauxfactory.gen_alphanumeric()
    # Disable automate domain
    with update(klass.namespace.domain):
        klass.namespace.domain.enabled = False

    # Adding schema for executing automate method
    klass.schema.add_fields({'name': schema_field, 'type': 'Method', 'data_type': 'String'})

    # Adding automate method
    method = klass.methods.create(
        name=fauxfactory.gen_alphanumeric(),
        display_name=fauxfactory.gen_alphanumeric(),
        location='inline'
    )

    # Adding instance to call automate method
    instance = klass.instances.create(
        name=fauxfactory.gen_alphanumeric(),
        display_name=fauxfactory.gen_alphanumeric(),
        description=fauxfactory.gen_alphanumeric(),
        fields={schema_field: {'value': method.name}}
    )

    result = LogValidator(
        "/var/www/miq/vmdb/log/automation.log",
        matched_patterns=[r".*ERROR.*"],
    )
    result.start_monitoring()

    # Executing automate method using simulation
    simulate(
        appliance=klass.appliance,
        attributes_values={
            "namespace": klass.namespace.name,
            "class": klass.name,
            "instance": instance.name,
        },
        message="create",
        request="Call_Instance",
        execute_methods=True,
    )
    assert result.validate(wait="60s")

    klass.namespace.domain.lock()
    view = navigate_to(klass.namespace.domain, 'Details')
    assert 'Disabled' in view.title.text
    assert 'Locked' in view.title.text

    # Need to unlock the domain to perform teardown on domain, namespace, class
    klass.namespace.domain.unlock()
Esempio n. 21
0
def test_service_ansible_verbosity(
    appliance,
    request,
    local_ansible_catalog_item,
    ansible_service_catalog,
    ansible_service_request,
    ansible_service,
    verbosity,
):
    """Check if the different Verbosity levels can be applied to service and
    monitor the std out
    Bugzilla:
        1460788
    Polarion:
        assignee: sbulage
        casecomponent: Ansible
        caseimportance: medium
        initialEstimate: 1/6h
        tags: ansible_embed
    """
    # Adding index 0 which will give pattern for e.g. pattern = "verbosity"=>0.
    pattern = '"verbosity"=>{}'.format(verbosity[0])
    with update(local_ansible_catalog_item):
        local_ansible_catalog_item.provisioning = {"verbosity": verbosity}
        local_ansible_catalog_item.retirement = {"verbosity": verbosity}
    # Log Validator
    log = LogValidator("/var/www/miq/vmdb/log/evm.log",
                       matched_patterns=[pattern])
    # Start Log check or given pattern
    log.start_monitoring()

    @request.addfinalizer
    def _revert():
        service = MyService(appliance, local_ansible_catalog_item.name)
        if ansible_service_request.exists():
            ansible_service_request.wait_for_request()
            appliance.rest_api.collections.service_requests.action.delete(
                id=service_request.id)
        if service.exists:
            service.delete()

    ansible_service_catalog.order()
    ansible_service_request.wait_for_request()
    # 'request_descr' and 'service_request' being used in finalizer to remove
    # first service request
    request_descr = (
        f"Provisioning Service [{local_ansible_catalog_item.name}] "
        f"from [{local_ansible_catalog_item.name}]")
    service_request = appliance.rest_api.collections.service_requests.get(
        description=request_descr)
    # Searching string '"verbosity"=>0' (example) in evm.log as Standard Output
    # is being logging in evm.log
    assert log.validate(wait="60s")
    logger.info("Pattern found {}".format(log.matched_patterns))

    view = navigate_to(ansible_service, "Details")
    assert verbosity[0] == view.provisioning.details.get_text_of("Verbosity")
    assert verbosity[0] == view.retirement.details.get_text_of("Verbosity")
Esempio n. 22
0
def test_action_prevent_host_ssa(request, host, host_policy):
    """Tests preventing Smart State Analysis on a host.

    This test sets the policy that prevents host analysis.

    Bugzilla:
        1437910

    Metadata:
        test_flag: actions, provision

    Polarion:
        assignee: jdupuy
        initialEstimate: 1/4h
        casecomponent: Control
    """
    host_policy.assign_actions_to_event(
        "Host Analysis Request", ["Prevent current event from proceeding"])

    @request.addfinalizer
    def _cleanup():
        host_policy.unassign_events("Host Analysis Request")

    policy_result = LogValidator(
        "/var/www/miq/vmdb/log/policy.log",
        matched_patterns=[
            '.*Prevent current event from proceeding.*Host Analysis Request.*{}'
            .format(host.name)
        ])
    policy_result.start_monitoring()

    view = navigate_to(host, "Details")

    def _scan():
        return view.entities.summary("Relationships").get_text_of(
            "Drift History")

    original = _scan()
    view.toolbar.configuration.item_select("Perform SmartState Analysis",
                                           handle_alert=True)
    view.flash.assert_success_message(
        '"{}": Analysis successfully initiated'.format(host.name))
    try:
        wait_for(
            lambda: _scan() != original,
            num_sec=60,
            delay=5,
            fail_func=view.browser.refresh,
            message="Check if Drift History field is changed",
        )
    except TimedOutError:
        assert policy_result.validate(wait="120s")
    else:
        pytest.fail("CFME did not prevent analysing the Host {}".format(
            host.name))
Esempio n. 23
0
def test_send_text_custom_report_with_long_condition(appliance, setup_provider,
                                                     smtp_test, request,
                                                     get_report):
    """
    Polarion:
        assignee: pvala
        casecomponent: Reporting
        caseimportance: medium
        initialEstimate: 1/3h
        setup:
            1. Create a report containing 1 or 2 columns
                and add a report filter with a long condition.(Refer BZ for more detail)
            2. Create a schedule for the report and check send_txt.
        testSteps:
            1. Queue the schedule and monitor evm log.
        expectedResults:
            1. There should be no error in the log and report must be sent successfully.

    Bugzilla:
        1677839
    """
    report = get_report("long_condition_report.yaml",
                        "test_long_condition_report")
    data = {
        "timer": {
            "hour": "12",
            "minute": "10"
        },
        "email": {
            "to_emails": "*****@*****.**"
        },
        "email_options": {
            "send_if_empty": True,
            "send_txt": True
        },
    }
    schedule = report.create_schedule(**data)
    request.addfinalizer(schedule.delete_if_exists)

    # prepare LogValidator
    log = LogValidator("/var/www/miq/vmdb/log/evm.log",
                       failure_patterns=[".*negative argument.*"])

    log.start_monitoring()
    schedule.queue()

    # assert that the mail was sent
    assert (len(
        smtp_test.wait_for_emails(wait=200,
                                  to_address=data["email"]["to_emails"])) == 1)
    # assert that the pattern was not found in the logs
    assert log.validate(), "Found error message in the logs."
Esempio n. 24
0
def test_infrastructure_hosts_refresh_multi(appliance,
                                            setup_provider_min_hosts,
                                            provider):
    """
    Polarion:
        assignee: prichard
        casecomponent: Infra
        caseimportance: low
        initialEstimate: 1/6h
        testSteps:
            1. Navigate to the Compute > Infrastructure > Providers view.
            2. Click on a provider quadicon, and then the hosts link along the top row of the view.
            3. Select all hosts (need at least 2 hosts) by checking the box in upper left of
               quadicons.
            4. Click "Refresh Relationships and Power States" under the Configuration
               dropdowm, and then click "OK" when prompted.
        expectedResults:
            1. Providers view is displayed.
            2. Hosts view is displayed.
            3.
            4. "Refresh initiated for X Hosts from the CFME Database" is displayed in green
               banner where "X" is the number of selected hosts. Properties for each host are
               refreshed. Making changes to test pre-commithooks
    """
    num_refresh = 2
    my_slice = slice(0, num_refresh, None)
    hosts_view = navigate_to(provider.collections.hosts, "All")
    num_hosts = hosts_view.entities.paginator.items_amount
    if num_hosts < num_refresh:
        pytest.skip('not enough hosts in appliance UI to run test')
    evm_tail = LogValidator(
        '/var/www/miq/vmdb/log/evm.log',
        matched_patterns=[
            f"'Refresh Provider' successfully initiated for "
            f"{num_refresh} Hosts"
        ],
        hostname=appliance.hostname)
    evm_tail.start_monitoring()
    for h in hosts_view.entities.get_all(slice=my_slice):
        h.check()
    hosts_view.toolbar.configuration.item_select(
        'Refresh Relationships and Power States', handle_alert=True)
    hosts_view.flash.assert_success_message(
        f'Refresh initiated for {num_refresh} Hosts from the CFME Database')
    try:
        wait_for(provider.is_refreshed,
                 func_kwargs={'force_refresh': False},
                 num_sec=300,
                 delay=10)
    except TimedOutError:
        pytest.fail("Hosts were not refreshed within given time")
    assert evm_tail.validate(wait="30s")
Esempio n. 25
0
def test_action_prevent_host_ssa(request, appliance, host, host_policy):
    """Tests preventing Smart State Analysis on a host.

    This test sets the policy that prevents host analysis.

    Bugzilla:
        1437910

    Metadata:
        test_flag: actions, provision

    Polarion:
        assignee: jdupuy
        initialEstimate: 1/4h
        casecomponent: Control
    """
    host_policy.assign_actions_to_event("Host Analysis Request",
        ["Prevent current event from proceeding"])

    @request.addfinalizer
    def _cleanup():
        host_policy.unassign_events("Host Analysis Request")

    policy_result = LogValidator(
        "/var/www/miq/vmdb/log/policy.log",
        matched_patterns=[
            '.*Prevent current event from proceeding.*Host Analysis Request.*{}'.format(host.name)
        ]
    )
    policy_result.fix_before_start()

    view = navigate_to(host, "Details")

    def _scan():
        return view.entities.summary("Relationships").get_text_of("Drift History")

    original = _scan()
    view.toolbar.configuration.item_select("Perform SmartState Analysis", handle_alert=True)
    view.flash.assert_success_message('"{}": Analysis successfully initiated'.format(host.name))
    try:
        wait_for(
            lambda: _scan() != original,
            num_sec=60,
            delay=5,
            fail_func=view.browser.refresh,
            message="Check if Drift History field is changed",
        )
    except TimedOutError:
        policy_result.validate_logs()
    else:
        pytest.fail("CFME did not prevent analysing the Host {}".format(host.name))
Esempio n. 26
0
def test_update_ha(ha_appliances_with_providers, appliance, update_strategy,
                   request, old_version):
    """ Tests updating an appliance with providers using webui, also confirms that the
            provisioning continues to function correctly after the update has completed

    Polarion:
        assignee: jhenner
        caseimportance: high
        casecomponent: Appliance
        initialEstimate: 1/4h
    """
    evm_log = '/var/www/miq/vmdb/log/evm.log'
    update_strategy(ha_appliances_with_providers[2])
    wait_for(do_appliance_versions_match,
             func_args=(appliance, ha_appliances_with_providers[2]),
             num_sec=900,
             delay=20,
             handle_exception=True,
             message='Waiting for appliance to update')

    if BZ(1704835,
          forced_streams=get_stream(
              ha_appliances_with_providers[2].version)).blocks:
        with LogValidator(
                evm_log,
                matched_patterns=[r'Starting database failover monitor'],
                hostname=ha_appliances_with_providers[2].hostname).waiting(
                    wait=30):
            ha_appliances_with_providers[2].evm_failover_monitor.restart()

    assert ha_appliances_with_providers[2].evm_failover_monitor.running

    with LogValidator(
            evm_log,
            matched_patterns=['Starting to execute failover'],
            hostname=ha_appliances_with_providers[2].hostname).waiting(
                wait=450):
        # Cause failover to occur
        result = ha_appliances_with_providers[0].ssh_client.run_command(
            'systemctl stop $APPLIANCE_PG_SERVICE', timeout=15)
        assert result.success, "Failed to stop APPLIANCE_PG_SERVICE: {}".format(
            result.output)

    ha_appliances_with_providers[2].evmserverd.wait_for_running()
    ha_appliances_with_providers[2].wait_for_web_ui()
    # Verify that existing provider can detect new VMs
    virtual_crud = provider_app_crud(VMwareProvider,
                                     ha_appliances_with_providers[2])
    vm = provision_vm(request, virtual_crud)
    assert vm.provider.mgmt.does_vm_exist(vm.name), "vm not provisioned"
def test_configuration_database_garbage_collection(appliance):
    """
        Navigate to Settings -> Configuration -> Diagnostics -> CFME Region -> Database
        Submit Run database Garbage Collection Now a check UI/logs for errors.
    """
    evm_tail = LogValidator('/var/www/miq/vmdb/log/evm.log',
                            matched_patterns=[
                                '.*Queued the action: \[Database GC\] being run for user:.*'],
                            failure_patterns=['.*ERROR.*'])
    evm_tail.fix_before_start()
    view = navigate_to(appliance.server.zone.region, 'Database')
    view.submit_db_garbage_collection_button.click()
    view.flash.assert_message('Database Garbage Collection successfully initiated')
    evm_tail.validate_logs()
def test_automate_methods_from_dynamic_dialog_should_run_as_per_designed(
        request, appliance, import_datastore, import_data, import_dialog,
        catalog, soft_assert):
    """
    Bugzilla:
        1571000

    Polarion:
        assignee: nansari
        casecomponent: Services
        testtype: functional
        initialEstimate: 1/16h
        startsin: 5.9
        tags: service
    """
    sd, ele_label = import_dialog
    catalog_item = appliance.collections.catalog_items.create(
        appliance.collections.catalog_items.GENERIC,
        name=fauxfactory.gen_alphanumeric(),
        description=fauxfactory.gen_alphanumeric(),
        display_in=True,
        catalog=catalog,
        dialog=sd,
    )
    request.addfinalizer(catalog_item.delete_if_exists)
    service_catalogs = ServiceCatalogs(appliance, catalog_item.catalog,
                                       catalog_item.name)
    patterns = [
        ".*CC- dialog_instance1 value=.*",
        ".*CC- dialog_instance2 value=.*",
        ".*CC- dialog_instance3 value=.*",
    ]

    # Checking if automates method gets triggered by three instances for once after ordering catalog
    # item
    result = LogValidator("/var/www/miq/vmdb/log/automation.log",
                          matched_patterns=patterns)
    result.start_monitoring()
    view = navigate_to(service_catalogs, "Order")
    for pattern in patterns:
        soft_assert(result.matches[pattern] == 1)

    with LogValidator("/var/www/miq/vmdb/log/automation.log",
                      failure_patterns=patterns).waiting(timeout=120):
        # Values like 'label1'(value of ele_label), 'label2' and label3 are element names of three
        # different text boxes attached with service dialog
        for ele_name in [ele_label, "label2", "label3"]:
            # Checking if automate method is not triggered after updating values of dialog widgets
            view.fields(ele_name).input.fill(fauxfactory.gen_alphanumeric())
Esempio n. 29
0
def test_custom_button_automate_service_vm(request, appliance, service_vm,
                                           button_group):
    """ Test custom button execution on SSUI vm resource detail page

    Polarion:
        assignee: ndhandre
        initialEstimate: 1/2h
        caseposneg: positive
        testtype: functional
        startsin: 5.9
        casecomponent: CustomButton
        tags: custom_button

    Bugzilla:
        1427430
        1450473
        1454910
    """

    service, _ = service_vm
    with appliance.context.use(ViaUI):
        button = button_group.buttons.create(
            text=fauxfactory.gen_alphanumeric(),
            hover=fauxfactory.gen_alphanumeric(),
            system="Request",
            request="InspectMe",
        )
        request.addfinalizer(button.delete_if_exists)

    # Check for UI and SSUI destinations.
    for context in [ViaUI, ViaSSUI]:
        with appliance.context.use(context):
            nav_to = ssui_nav if context is ViaSSUI else ui_nav

            # Navigate to VM Details page of service
            view = nav_to(service, "VMDetails")

            # start log check
            log = LogValidator("/var/www/miq/vmdb/log/automation.log",
                               matched_patterns=["Attributes - Begin"])
            log.start_monitoring()

            # Execute custom button on service vm
            custom_button_group = Dropdown(view, button_group.text)
            custom_button_group.item_select(button.text)

            # validate request in log
            assert log.validate(wait="120s")
Esempio n. 30
0
def test_clicking_created_catalog_item_in_the_list(appliance,
                                                   generic_catalog_item):
    """
    Bugzilla:
        1702343

    Polarion:
        assignee: nansari
        startsin: 5.10
        casecomponent: Services
        initialEstimate: 1/6h
        testSteps:
            1. Go to Services > Catalogs > Catalog Items accordion
            2. Configuration > Add a New Catalog Item, choose some Catalog Item type
            3. Fill in the required info and click on Add button
            4. After successfully saving the Catalog Item, click on the same Catalog Item in list
        expectedResults:
            1.
            2.
            3.
            4. Catalog Item's summary screen should appear
    """
    with LogValidator("/var/www/miq/vmdb/log/evm.log",
                      failure_patterns=[".*ERROR.*"]).waiting(timeout=120):
        view = navigate_to(appliance.collections.catalog_items, "All")
        for cat_item in view.table:
            if cat_item[2].text == generic_catalog_item.name:
                cat_item[2].click()
                break
        assert view.title.text == f'Service Catalog Item "{generic_catalog_item.name}"'
Esempio n. 31
0
def test_error_message_azure(order_stack):
    """
    Starting with 5.8, error messages generated by azure when provisioning
    from orchestration template will be included in the Last Message
    field.  Users will no longer have to drill down to Stack/Resources to
    figure out the error.
    This is currently working correctly as of 5.8.0.12

    Bugzilla:
        1410794

    Polarion:
        assignee: anikifor
        casecomponent: Cloud
        caseimportance: medium
        initialEstimate: 1/4h
        setup: Easiest way to do this is provision an azure vm from orchestration
               catalog item and just add a short password like "test".  This will
               fail on the azure side and the error will be displayed in the request
               details.
        startsin: 5.8
    """
    msg = "Orchestration stack deployment error: The supplied password must be"
    with LogValidator(
            '/var/www/miq/vmdb/log/evm.log',
            matched_patterns=[msg],
    ).waiting(timeout=450):
        provision_request = order_stack
        provision_request.wait_for_request(method='ui')
        assert not provision_request.is_succeeded()
Esempio n. 32
0
def test_dynamic_dropdown_refresh_load(
        appliance, import_datastore, import_data,
        generic_catalog_item_with_imported_dialog, context):
    """
    Bugzilla:
        1576873
    Polarion:
        assignee: nansari
        startsin: 5.10
        casecomponent: Services
        initialEstimate: 1/16h
    """
    catalog_item, _, ele_label = generic_catalog_item_with_imported_dialog

    service_catalogs = ServiceCatalogs(appliance, catalog_item.catalog,
                                       catalog_item.name)

    with appliance.context.use(context):
        if context == ViaSSUI:
            view = ssui_nav(service_catalogs, "Details")
        else:
            view = navigate_to(service_catalogs, "Order")
        with LogValidator("/var/www/miq/vmdb/log/automation.log",
                          matched_patterns=['We are in B'],
                          failure_patterns=["We are in A"
                                            ]).waiting(timeout=120):
            view.fields(ele_label).dropdown.fill("b")
Esempio n. 33
0
def test_provider_details_page_refresh_after_clear_cookies(
    appliance, request, setup_provider, provider
):
    """
    Bugzilla:
        1642948
    Polarion:
        assignee: pvala
        casecomponent: WebUI
        caseimportance: medium
        initialEstimate: 1/12h
        testSteps:
            1. Navigate to a provider's Details page
            2. Reboot the appliance
            3. Click a button or refresh the page or do something on the page and see what happens.
        expectedResults:
            1.
            2.
            3. You'll be redirected to the Login Page.
    """
    view = navigate_to(provider, "Details")
    appliance.reboot()

    # When the test runs a second time for cloud provider, it raises an error,
    # this finalizer is workaround for it.
    request.addfinalizer(lambda: navigate_to(appliance.server, "LoggedIn"))

    with LogValidator(
        "/var/www/miq/vmdb/log/production.log", failure_patterns=[r".*FATAL.*"]
    ).waiting():
        view.browser.refresh()

    login_view = appliance.server.create_view(LoginPage, wait="40s")
    assert login_view.is_displayed
Esempio n. 34
0
def test_retire_vm_now(setup_provider, create_vm, new_user):
    """
    Bugzilla:
        1747159

    Polarion:
        assignee: ghubale
        initialEstimate: 1/8h
        caseposneg: positive
        casecomponent: Automate
        setup:
            1. Add infrastructure provider
            2. Provision VM
            3. Create new user with group EvmGroup-vm_user
        testSteps:
            1. Select 'Retire this vm' from the UI to retire the VM
            2. Check evm.logs
        expectedResults:
            1. VM should be retired
            2. No errors in evm logs
    """
    with new_user:
        with LogValidator(
                "/var/www/miq/vmdb/log/evm.log",
                failure_patterns=[
                    ".*ERROR.*NoMethodError]: undefined method `tenant_id'.*"
                ]).waiting(timeout=720):
            create_vm.retire()
            assert create_vm.wait_for_vm_state_change(desired_state="retired",
                                                      timeout=720,
                                                      from_details=True)
Esempio n. 35
0
def test_dynamic_dialog_field_to_static_field(
        appliance, import_datastore,
        generic_catalog_item_with_imported_dialog):
    """
    Bugzilla:
        1614436
    Polarion:
        assignee: nansari
        casecomponent: Services
        testtype: functional
        initialEstimate: 1/4h
        startsin: 5.10
    """
    auto_log = '/var/www/miq/vmdb/log/automation.log'
    catalog_item, sd, ele_label = generic_catalog_item_with_imported_dialog
    service_catalogs = ServiceCatalogs(appliance, catalog_item.catalog,
                                       catalog_item.name)

    navigate_to(sd, "Edit")
    # update dynamic field to static
    view = appliance.browser.create_view(EditElementView)
    view.element.edit_element(ele_label)
    view.element_information.dynamic_chkbox.fill(False)
    view.ele_save_button.click()
    view.save_button.click()

    # Text area field should not be loaded in automation log
    with LogValidator(auto_log, failure_patterns=["TEXT AREA REFRESH DIALOG"
                                                  ]).waiting(timeout=120):
        navigate_to(service_catalogs, "Order")
Esempio n. 36
0
def test_automate_method_with_dialog(request, appliance, catalog,
                                     setup_dynamic_dialog):
    """
    Polarion:
        assignee: ghubale
        initialEstimate: 1/15h
        caseimportance: medium
        caseposneg: positive
        testtype: functional
        startsin: 5.7
        casecomponent: Automate
        tags: automate
    """
    catalog_item = appliance.collections.catalog_items.create(
        appliance.collections.catalog_items.GENERIC,
        name=fauxfactory.gen_alphanumeric(),
        description="my catalog",
        display_in=True,
        catalog=catalog,
        dialog=setup_dynamic_dialog.label)
    request.addfinalizer(catalog_item.delete_if_exists)
    with LogValidator("/var/www/miq/vmdb/log/automation.log",
                      matched_patterns=[".*Hello World.*"
                                        ]).waiting(timeout=120):
        service_catalogs = ServiceCatalogs(appliance,
                                           catalog=catalog_item.catalog,
                                           name=catalog_item.name)
        provision_request = service_catalogs.order()
        provision_request.wait_for_request()
        request.addfinalizer(provision_request.remove_request)
def test_dialog_dynamic_field_refresh_in_log(appliance, import_datastore, import_data,
                                             generic_catalog_item_with_imported_dialog):
    """
    Bugzilla:
        1559999

    Polarion:
        assignee: nansari
        startsin: 5.10
        casecomponent: Services
        initialEstimate: 1/16h
        setup:
            1. Import Datastore and dialog
        testSteps:
            1. Add service catalog with above imported dialog
            2. Navigate to order page of service
            3. In service Order page
            4. Click on "refresh" for field 2
        expectedResults:
            1.
            2.
            3.
            4. Only text Field2 should be refreshed in automation log
    """
    catalog_item, sd, ele_label = generic_catalog_item_with_imported_dialog

    service_catalogs = ServiceCatalogs(appliance, catalog_item.catalog, catalog_item.name)
    view = navigate_to(service_catalogs, "Order")
    with LogValidator(
            "/var/www/miq/vmdb/log/automation.log",
            matched_patterns=['.*Refreshing field : RefreshField2.*'],
            failure_patterns=[".*Refreshing field : RefreshField1.*"]).waiting(timeout=120):
        view.fields('Refresh2').refresh.click()
Esempio n. 38
0
def test_database_wildcard_should_work_and_be_included_in_the_query(
        appliance, request, provider):
    """ Database wildcard should work and be included in the query
    Bugzilla:
        1581853

    Polarion:
        assignee: pvala
        casecomponent: Rest
        testtype: functional
        initialEstimate: 1/4h
        startsin: 5.10
        testSteps:
            1. Create a VM with some name, for e.g test-25-xyz.
            2. Filter VM with wild character and substring of the name, for e.g. "%25%"
        expectedResults:
            1. VM is created successfully.
            2. VM is obtained without any error.
    """
    vm_name = _vm(request,
                  provider,
                  appliance,
                  name=fauxfactory.gen_alpha(start="test-25-", length=12))
    with LogValidator("/var/www/miq/vmdb/log/production.log",
                      failure_patterns=[".*FATAL.*"]).waiting(timeout=20):
        result = appliance.rest_api.collections.vms.filter(
            Q("name", "=", "%25%"))

    assert result.subcount
    assert vm_name in [vm.name for vm in result.resources]
Esempio n. 39
0
def test_read_dialog_timeout_ec2_stack(order_stack):
    """
    Bugzilla:
        1698439
    Polarion:
        assignee: nansari
        startsin: 5.10
        casecomponent: Services
        initialEstimate: 1/6h
        testSteps:
            1. create an aws template with an optional value "timeout"
            2. create a dialog that will offer an option to overwrite "timeout"
               with a custom value typed at input
            3. Navigate to order page of service
            4. provision using a non-zero value in timeout
        expectedResults:
            1.
            2.
            3.
            4. the value input should be passed
    """
    msg = "<AEMethod groupsequencecheck>.*dialog_stack_timeout: 20"
    with LogValidator(
            '/var/www/miq/vmdb/log/evm.log',
            matched_patterns=[msg],
    ).waiting(timeout=450):
        provision_request = order_stack
        provision_request.wait_for_request(method='ui')
        provision_request.is_succeeded()
Esempio n. 40
0
def test_external_auth(auth_type, ipa_crud, app_creds):
    evm_tail = LogValidator('/var/www/miq/vmdb/log/evm.log',
                            matched_patterns=['.*{} to true.*'.format(auth_type)],
                            hostname=ipa_crud.hostname,
                            username=app_creds['sshlogin'],
                            password=app_creds['password'])
    evm_tail.fix_before_start()
    command = 'appliance_console_cli --extauth-opts="/authentication/{}=true"'.format(auth_type)
    ipa_crud.ssh_client.run_command(command)
    evm_tail.validate_logs()

    evm_tail = LogValidator('/var/www/miq/vmdb/log/evm.log',
                            matched_patterns=['.*{} to false.*'.format(auth_type)],
                            hostname=ipa_crud.hostname,
                            username=app_creds['sshlogin'],
                            password=app_creds['password'])

    evm_tail.fix_before_start()
    command2 = 'appliance_console_cli --extauth-opts="/authentication/{}=false"'.format(auth_type)
    ipa_crud.ssh_client.run_command(command2)
    evm_tail.validate_logs()
Esempio n. 41
0
def test_action_prevent_ssa(request, appliance, configure_fleecing, vm, vm_on, policy_for_testing):
    """Tests preventing Smart State Analysis.

    This test sets the policy that prevents VM analysis.

    Bugzilla:
        1433084

    Metadata:
        test_flag: actions, provision

    Polarion:
        assignee: jdupuy
        initialEstimate: 1/4h
        casecomponent: Control
    """
    policy_for_testing.assign_actions_to_event("VM Analysis Request",
        ["Prevent current event from proceeding"])

    @request.addfinalizer
    def _cleanup():
        policy_for_testing.unassign_events("VM Analysis Request")

    policy_result = LogValidator(
        "/var/www/miq/vmdb/log/policy.log",
        matched_patterns=[
            '.*Prevent current event from proceeding.*VM Analysis Request.*{}'.format(vm.name)
        ]
    )
    policy_result.fix_before_start()

    wait_for_ssa_enabled(vm)

    try:
        do_scan(vm)
    except TimedOutError:
        policy_result.validate_logs()
    else:
        pytest.fail("CFME did not prevent analysing the VM {}".format(vm.name))
Esempio n. 42
0
def test_task_id_for_method_automation_log(request, generic_catalog_item):
    """
    Polarion:
        assignee: ghubale
        initialEstimate: 1/30h
        caseimportance: medium
        caseposneg: positive
        testtype: functional
        startsin: 5.10
        casecomponent: Automate
        tags: automate
        setup:
            1. Add existing or new automate method to newly created domain or create generic service
        testSteps:
            1. Run that instance using simulation or order service catalog item
            2. See automation log
        expectedResults:
            1.
            2. Task id should be included in automation log for method logs.

    Bugzilla:
        1592428
    """
    result = LogValidator(
        "/var/www/miq/vmdb/log/automation.log", matched_patterns=[".*Q-task_id.*"]
    )
    result.fix_before_start()
    service_request = generic_catalog_item.appliance.rest_api.collections.service_templates.get(
        name=generic_catalog_item.name
    ).action.order()
    request.addfinalizer(service_request.action.delete)

    # Need to wait until automation logs with 'Q-task_id' are generated, which happens after the
    # service_request becomes active.
    wait_for(lambda: service_request.request_state == "active", fail_func=service_request.reload,
             timeout=60, delay=3)
    result.validate_logs()
Esempio n. 43
0
def test_appliance_console_external_auth_all(configured_appliance):
    """ Commands:
    1. 'ap' launches appliance_console,
    2. RETURN clears info screen,
    3. '13' change ext auth options,
    4. 'auth_type' auth type to change,
    5. '5' apply changes.

    Polarion:
        assignee: mnadeem
        caseimportance: high
        casecomponent: Auth
        initialEstimate: 1/4h
    """

    evm_tail = LogValidator('/var/www/miq/vmdb/log/evm.log',
                            matched_patterns=['.*sso_enabled to true.*',
                                              '.*saml_enabled to true.*',
                                              '.*local_login_disabled to true.*'],
                            hostname=configured_appliance.hostname)
    evm_tail.fix_before_start()
    command_set = ('ap', RETURN, TimedCommand('13', 20), '1', '2', TimedCommand('5', 20),
                   RETURN, RETURN)
    configured_appliance.appliance_console.run_commands(command_set)
    evm_tail.validate_logs()

    evm_tail = LogValidator('/var/www/miq/vmdb/log/evm.log',
                            matched_patterns=['.*sso_enabled to false.*',
                                              '.*saml_enabled to false.*',
                                              '.*local_login_disabled to false.*'],
                            hostname=configured_appliance.hostname)

    evm_tail.fix_before_start()
    command_set = ('ap', RETURN, TimedCommand('13', 20), '1', '2', TimedCommand('5', 20),
                   RETURN, RETURN)
    configured_appliance.appliance_console.run_commands(command_set)
    evm_tail.validate_logs()
Esempio n. 44
0
def test_codename_in_log(appliance):
    """
    check whether logs contains a mention of appliance codename

    Polarion:
        assignee: jhenner
        casecomponent: Appliance
        initialEstimate: 1/60h
    """
    log = '/var/www/miq/vmdb/log/evm.log'
    lv = LogValidator(log,
                      matched_patterns=[r'.*Codename: \w+$'],
                      hostname=appliance.hostname)
    lv.fix_before_start()
    appliance.ssh_client.run_command('appliance_console_cli --server=restart')

    @wait_for_decorator
    def codename_in_log():
        try:
            lv.validate_logs()
        except pytest.Fail:
            return False
        else:
            return True
Esempio n. 45
0
def test_send_email_method(smtp_test, klass):
    """
    Polarion:
        assignee: ghubale
        initialEstimate: 1/20h
        startsin: 5.10
        casecomponent: Automate

    Bugzilla:
        1688500
        1702304
    """
    mail_to = fauxfactory.gen_email()
    mail_cc = fauxfactory.gen_email()
    mail_bcc = fauxfactory.gen_email()

    # Ruby code to send emails
    script = (
        'to = "{mail_to}"\n'
        'subject = "Hello"\n'
        'body = "Hi"\n'
        'bcc = "{mail_bcc}"\n'
        'cc = "{mail_cc}"\n'
        'content_type = "message"\n'
        'from = "*****@*****.**"\n'
        "$evm.execute(:send_email, to, from, subject, body, {{:bcc => bcc, :cc => cc,"
        ":content_type => content_type}})"
    )
    script = script.format(mail_cc=mail_cc, mail_bcc=mail_bcc, mail_to=mail_to)

    # Adding schema for executing method - send_email which helps to send emails
    klass.schema.add_fields({'name': 'execute', 'type': 'Method', 'data_type': 'String'})

    # Adding method - send_email for sending mails
    method = klass.methods.create(
        name=fauxfactory.gen_alphanumeric(),
        display_name=fauxfactory.gen_alphanumeric(),
        location='inline',
        script=script)

    # Adding instance to call automate method - send_email
    instance = klass.instances.create(
        name=fauxfactory.gen_alphanumeric(),
        display_name=fauxfactory.gen_alphanumeric(),
        description=fauxfactory.gen_alphanumeric(),
        fields={'execute': {'value': method.name}}
    )

    result = LogValidator(
        "/var/www/miq/vmdb/log/evm.log",
        matched_patterns=[
            '.*:to=>"{mail_to}".*.*:cc=>"{mail_cc}".*.*:bcc=>"{mail_bcc}".*'.format(
                mail_to=mail_to, mail_cc=mail_cc, mail_bcc=mail_bcc
            )
        ],
    )
    result.fix_before_start()

    # Executing automate method - send_email using simulation
    simulate(
        appliance=klass.appliance,
        attributes_values={
            "namespace": klass.namespace.name,
            "class": klass.name,
            "instance": instance.name,
        },
        message="create",
        request="Call_Instance",
        execute_methods=True,
    )
    result.validate_logs()

    # TODO(GH-8820): This issue should be fixed to check mails sent to person in 'cc' and 'bcc'
    # Check whether the mail sent via automate method really arrives
    wait_for(lambda: len(smtp_test.get_emails(to_address=mail_to)) > 0, num_sec=60, delay=10)
Esempio n. 46
0
def test_embedded_ansible_executed_with_data_upon_event(request,
                                                        ansible_repository,
                                                        copy_ae_instance_to_new_domain,
                                                        networks_provider):
    """
    Test that Nuage events trigger Embedded Ansible automation and that playbook has access to
    authentication attributes and event data.

    Specifically, we copy AE Instance 'ManageIQ/System/Event/EmsEvent/Nuage/nuage_enterprise_create'
    from default domain into our own domain and customize it's 'meth5' attribute to invoke
    Embedded Ansible playbook which prints authentication attributes and event data into evm.log.
    This test then triggers a 'nuage_enterprise_create' event and waits for appropriate line
    to appear in evm.log.

    Prerequisites:
    Following content needs to be present in cfme_data.yaml in order to fetch correct
    Ansible repository:

    ansible_links:
      playbook_repositories:
        embedded_ansible: https://github.com/xlab-si/integration-tests-nuage-automation.git
    """
    ae_instance = copy_ae_instance_to_new_domain
    ae_class = ae_instance.klass
    ae_method = ae_class.methods.create(
        name='printout',
        location='playbook',
        repository=ansible_repository.name,
        playbook='printout.yaml',
        machine_credential='CFME Default Credential',
        logging_output='Always')

    username = random_name()
    with update(ae_instance):
        ae_instance.fields = {
            'nuage_username': {'value': username},
            'nuage_enterprise': {'value': 'csp'},
            'nuage_url': {'value': 'https://nuage:8443'},
            'nuage_api_version': {'value': 'v5_0'},
            'meth5': {'value': ae_method.name}
        }

    enterprise = networks_provider.mgmt.create_enterprise()
    request.addfinalizer(lambda: networks_provider.mgmt.delete_enterprise(enterprise))
    evm_tail = LogValidator(
        '/var/www/miq/vmdb/log/evm.log',
        matched_patterns=[
            r'.*I confirm that username is {} and event is raised for {}.*'.format(username,
                                                                                   enterprise.id)
        ]
    )
    evm_tail.fix_before_start()

    # LogValidator.validate_logs throws `Failed` exception which inherits `BaseException`.
    # wait_for function checks for `Exception` which inherits `BaseException` when
    # 'handle_exception' parameter is used. We can't make use of this functionality because
    # `Failed` exception is not caught with `Exception` class, hence the helper function.
    def validate_logs():
        try:
            evm_tail.validate_logs()
            return True
        except Failed:
            return False

    wait_for(validate_logs, timeout=300, delay=10, fail_condition=False)
def test_pause_and_resume_single_provider_api(appliance, provider, from_collections, app_creds,
                                              soft_assert, request):
    """
    Test enabling and disabling a single provider via the CFME API through the ManageIQ API Client
    collection and entity classes.

    RFE: BZ 1507812

    Polarion:
        assignee: juwatts
        caseimportance: medium
        initialEstimate: 1/6h
    """
    evm_tail_disable = LogValidator('/var/www/miq/vmdb/log/evm.log',
                                    matched_patterns=['.*Disabling EMS \[{}\] id \[{}\].*'
                                                      .format(provider.name, str(provider.id))],
                                    hostname=appliance.hostname,
                                    username=app_creds['sshlogin'],
                                    password=app_creds['password'])
    evm_tail_disable.fix_before_start()
    if from_collections:
        rep_disable = appliance.collections.containers_providers.pause_providers(provider)
        # collections class returns a list of dicts containing the API response.
        soft_assert(rep_disable[0].get('success'), 'Disabling provider {} failed'
                    .format(provider.name))
    else:
        rep_disable = provider.pause()
        # entity class returns a dict containing the API response
        soft_assert(rep_disable.get('success'), 'Disabling provider {} failed'
                    .format(provider.name))
    soft_assert(not provider.is_provider_enabled, 'Provider {} is still enabled'
                .format(provider.name))
    evm_tail_disable.validate_logs()
    # Verify all monitoring workers have been shut down
    assert wait_for(lambda: not check_ems_state_in_diagnostics(appliance, provider))
    # Create a project on the OpenShift provider via wrapanapi
    project_name = fauxfactory.gen_alpha(8).lower()
    provider.mgmt.create_project(name=project_name)

    @request.addfinalizer
    def _finalize():
        provider.mgmt.delete_project(name=project_name)

    project = appliance.collections.container_projects.instantiate(name=project_name,
                                                                   provider=provider)
    # Trigger an appliance refresh
    provider.refresh_provider_relationships()
    soft_assert(not project.exists, 'Project {} exists even though provider has been disabled'
                .format(project_name))
    evm_tail_enable = LogValidator('/var/www/miq/vmdb/log/evm.log',
                                   matched_patterns=['.*Enabling EMS \[{}\] id \[{}\].*'
                                                     .format(provider.name, str(provider.id))],
                                   hostname=appliance.hostname,
                                   username=app_creds['sshlogin'],
                                   password=app_creds['password'])
    evm_tail_enable.fix_before_start()
    if from_collections:
        rep_enable = appliance.collections.containers_providers.resume_providers(provider)
        soft_assert(rep_enable[0].get('success'), 'Enabling provider {} failed'
                    .format(provider.name))
    else:
        rep_enable = provider.resume()
        soft_assert(rep_enable.get('success'), 'Enabling provider {} failed'.format(provider.name))
    soft_assert(provider.is_provider_enabled, 'Provider {} is still disabled'.format(provider.name))
    evm_tail_enable.validate_logs()
    provider.refresh_provider_relationships()
    soft_assert(project.exists, 'Project {} does not exists even though provider has been enabled'
                .format(project_name))
def test_blacklisted_container_events(request, appliance, provider, app_creds):
    """
        Test that verifies that container events can be blacklisted.

        Polarion:
            assignee: juwatts
            caseimportance: medium
            casecomponent: Containers
            initialEstimate: 1/6h
    """

    project_name = fauxfactory.gen_alpha(8).lower()

    # Create a project namespace
    provider.mgmt.create_project(name=project_name)
    provider.mgmt.wait_project_exist(name=project_name)

    request.addfinalizer(lambda: appliance_cleanup(provider=provider,
                                                   appliance=appliance,
                                                   namespace=project_name))

    evm_tail_no_blacklist = LogValidator(
        '/var/www/miq/vmdb/log/evm.log',
        matched_patterns=['.*event\_type\=\>\"POD\_CREATED\".*'],
        hostname=appliance.hostname,
        username=app_creds['sshlogin'],
        password=app_creds['password'])
    evm_tail_no_blacklist.fix_before_start()

    create_pod(provider=provider, namespace=project_name)

    rails_result_no_blacklist = get_blacklisted_event_names(appliance)

    assert "POD_CREATED" not in rails_result_no_blacklist

    evm_tail_no_blacklist.validate_logs()

    delete_pod(provider=provider, namespace=project_name)

    appliance.update_advanced_settings(
        {"ems": {"ems_openshift": {"blacklisted_event_names": ["POD_CREATED"]}}}
    )
    appliance.evmserverd.restart()
    appliance.wait_for_web_ui()

    rails_result_blacklist = get_blacklisted_event_names(appliance)

    assert "POD_CREATED" in rails_result_blacklist

    evm_tail_blacklist = LogValidator(
        '/var/www/miq/vmdb/log/evm.log',
        failure_patterns=['.*event\_type\=\>\"POD\_CREATED\".*'],
        hostname=appliance.hostname,
        username=app_creds['sshlogin'],
        password=app_creds['password'])

    evm_tail_blacklist.fix_before_start()

    create_pod(provider=provider, namespace=project_name)

    evm_tail_blacklist.validate_logs()

    delete_pod(provider=provider, namespace=project_name)

    restore_advanced_settings(appliance=appliance)
    rails_destroy_blacklist = appliance.ssh_client.run_rails_console(
        "BlacklistedEvent.where(:event_name => 'POD_CREATED').destroy_all")
    assert rails_destroy_blacklist.success
    rails_result_default = get_blacklisted_event_names(appliance)

    assert "POD_CREATED" not in rails_result_default

    appliance.evmserverd.restart()
    appliance.wait_for_web_ui()

    evm_tail_no_blacklist.fix_before_start()

    create_pod(provider=provider, namespace=project_name)

    # After restarting evm, there was a delay in logging for a brief period. validate_logs() was
    # being called before the log event was created and causing the test to fail. validate_logs()
    # calls pytest.fail, so using wait_for() here was not possible since there is no exception to
    # catch. Only option was to add a short sleep here.
    time.sleep(10)

    evm_tail_no_blacklist.validate_logs()