def a_provider(request):
    pf = ProviderFilter(classes=[InfraProvider], required_fields=[
        ['provisioning', 'template'],
        ['provisioning', 'host'],
        ['provisioning', 'datastore'],
        ['provisioning', 'vlan'],
        ['provisioning', 'catalog_item_type']])
    return setup_one_or_skip(request, filters=[pf])
示例#2
0
def setup_one_or_skip(request, filters=None, use_global_filters=True):
    """ Sets up one of matching providers or skips the test

    Args:
        filters: List of :py:class:`ProviderFilter` or None
        request: Needed for logging a potential skip correctly in artifactor
        use_global_filters: Will apply global filters as well if `True`, will not otherwise
    """

    filters = filters or []
    providers = list_providers(filters=filters, use_global_filters=use_global_filters)

    # All providers filtered out?
    if not providers:
        global_providers = list_providers(filters=None, use_global_filters=use_global_filters)
        if not global_providers:
            # This can also mean that there simply are no providers in the yamls!
            pytest.skip("No provider matching global filters found")
        else:
            pytest.skip("No provider matching test-specific filters found")

    # Are all providers marked as problematic?
    if _problematic_providers.issuperset(providers):
        skip_msg = "All providers marked as problematic: {}".format([p.key for p in providers])
        _artifactor_skip_providers(request, providers, skip_msg)

    # If there is a provider already set up matching the user's requirements, reuse it
    for provider in providers:
        if provider.exists:
            return provider

    # If we have more than one provider, we create two separate groups of providers, preferred
    # and not preferred, that we shuffle separately and then join together
    if len(providers) > 1:
        only_preferred_filter = ProviderFilter(required_fields=[("do_not_prefer", True)],
                                               inverted=True)
        preferred_providers = list_providers(
            filters=filters + [only_preferred_filter], use_global_filters=use_global_filters)
        not_preferred_providers = [p for p in providers if p not in preferred_providers]
        random.shuffle(preferred_providers)
        random.shuffle(not_preferred_providers)
        providers = preferred_providers + not_preferred_providers

    # Try to set up one of matching providers
    non_existing = [prov for prov in providers if not prov.exists]
    for provider in non_existing:
        if _setup_provider_verbose(request, provider):
            return provider

    skip_msg = "Failed to set up any matching providers: {}", [p.key for p in providers]
    _artifactor_skip_providers(request, non_existing, skip_msg)
示例#3
0
def pytest_configure(config):
    """ Filters the list of providers as part of pytest configuration

    Note:
        Additional filter is added to the global_filters dict of active filters here.
    """

    cmd_filter = config.getvalueorskip('use_provider')
    if not cmd_filter:
        cmd_filter = ["default"]

    new_filter = ProviderFilter(keys=cmd_filter, required_tags=cmd_filter, conjunctive=False)
    global_filters['use_provider'] = new_filter

    logger.debug('Filtering providers with {}, leaves {}'.format(
        cmd_filter, [prov.key for prov in list_providers()]))
示例#4
0
def providers_by_class(metafunc, classes, required_fields=None):
    """ Gets providers by their class

    Args:
        metafunc: Passed in by pytest
        classes: List of classes to fetch
        required_fields: See :py:class:`cfme.utils.provider.ProviderFilter`

    Usage:
        # In the function itself
        def pytest_generate_tests(metafunc):
            argnames, argvalues, idlist = testgen.providers_by_class(
                [GCEProvider, AzureProvider], required_fields=['provisioning']
            )
        metafunc.parametrize(argnames, argvalues, ids=idlist, scope='module')

        # Using the parametrize wrapper
        pytest_generate_tests = testgen.parametrize([GCEProvider], scope='module')
    """
    pf = ProviderFilter(classes=classes, required_fields=required_fields)
    return providers(metafunc, filters=[pf])
示例#5
0
def providers(metafunc, filters=None):
    """ Gets providers based on given (+ global) filters

    Note:
        Using the default 'function' scope, each test will be run individually for each provider
        before moving on to the next test. To group all tests related to single provider together,
        parametrize tests in the 'module' scope.

    Note:
        testgen for providers now requires the usage of test_flags for collection to work.
        Please visit http://cfme-tests.readthedocs.org/guides/documenting.html#documenting-tests
        for more details.
    """
    filters = filters or []
    argnames = []
    argvalues = []
    idlist = []

    # Obtains the test's flags in form of a ProviderFilter
    meta = getattr(metafunc.function, 'meta', None)
    test_flag_str = getattr(meta, 'kwargs', {}).get('from_docs',
                                                    {}).get('test_flag')
    if test_flag_str:
        test_flags = test_flag_str.split(',')
        flags_filter = ProviderFilter(required_flags=test_flags)
        filters = filters + [flags_filter]

    for provider in list_providers(filters):
        argvalues.append([provider])
        # Use the provider key for idlist, helps with readable parametrized test output
        idlist.append(provider.key)
        # Add provider to argnames if missing
        if 'provider' in metafunc.fixturenames and 'provider' not in argnames:
            metafunc.function = pytest.mark.uses_testgen()(metafunc.function)
            argnames.append('provider')
        if metafunc.config.getoption('sauce'):
            break

    return argnames, argvalues, idlist
def a_provider(request):
    pf = ProviderFilter(classes=[InfraProvider], required_fields=['large'])
    setup_one_or_skip(request, filters=[pf])
from cfme.infrastructure.provider.scvmm import SCVMMProvider
from cfme.rest.gen_data import a_provider as _a_provider
from cfme.rest.gen_data import vm as _vm
from cfme.web_ui import InfoBlock
from utils import version, testgen
from utils.appliance.implementations.ui import navigate_to
from utils.generators import random_vm_name
from utils.log import logger
from utils.providers import ProviderFilter
from utils.wait import wait_for

pytestmark = [
    pytest.mark.tier(2),
    pytest.mark.usefixtures("setup_provider_modscope")
]
not_scvmm = ProviderFilter(classes=[SCVMMProvider],
                           inverted=True)  # scvmm doesn't provide events
all_prov = ProviderFilter(classes=[InfraProvider])
pytest_generate_tests = testgen.generate(gen_func=testgen.providers,
                                         filters=[not_scvmm, all_prov],
                                         scope='module')


@pytest.fixture(scope="module")
def new_vm(request, provider):
    vm = VM.factory(random_vm_name("timelines", max_length=16), provider)

    request.addfinalizer(vm.delete_from_provider)

    if not provider.mgmt.does_vm_exist(vm.name):
        logger.info("deploying %s on provider %s", vm.name, provider.key)
        vm.create_on_provider(allow_skip="default", find_in_cfme=True)
示例#8
0
def a_provider(request):
    pf = ProviderFilter(classes=[VMwareProvider, RHEVMProvider])
    return setup_one_or_skip(request, filters=[pf])
示例#9
0
def providers(metafunc, filters=None, selector=ALL):
    """ Gets providers based on given (+ global) filters

    Note:
        Using the default 'function' scope, each test will be run individually for each provider
        before moving on to the next test. To group all tests related to single provider together,
        parametrize tests in the 'module' scope.

    Note:
        testgen for providers now requires the usage of test_flags for collection to work.
        Please visit http://cfme-tests.readthedocs.org/guides/documenting.html#documenting-tests
        for more details.
    """
    filters = filters or []
    argnames = []
    argvalues = []
    idlist = []

    # Obtains the test's flags in form of a ProviderFilter
    meta = getattr(metafunc.function, 'meta', None)
    test_flag_str = getattr(meta, 'kwargs', {}).get('from_docs', {}).get('test_flag')
    if test_flag_str:
        test_flags = test_flag_str.split(',')
        flags_filter = ProviderFilter(required_flags=test_flags)
        filters = filters + [flags_filter]

    potential_providers = list_providers(filters)

    if selector == ONE:
        allowed_providers = [potential_providers[0]]
    elif selector == LATEST:
        allowed_providers = [sorted(
            potential_providers, key=lambda k:LooseVersion(
                str(k.data.get('version', 0))), reverse=True
        )[0]]
    elif selector == ONE_PER_TYPE:
        types = set()

        def add_prov(prov):
            types.add(prov.type)
            return prov

        allowed_providers = [
            add_prov(prov) for prov in potential_providers if prov.type not in types
        ]
    elif selector == ONE_PER_CATEGORY:
        categories = set()

        def add_prov(prov):
            categories.add(prov.category)
            return prov

        allowed_providers = [
            add_prov(prov) for prov in potential_providers if prov.category not in categories
        ]
    elif selector == ONE_PER_VERSION:
        versions = set()

        def add_prov(prov):
            versions.add(prov.data.get('version', 0))
            return prov

        allowed_providers = [
            add_prov(prov) for prov in potential_providers if prov.data.get(
                'version', 0) not in versions
        ]
    else:
        allowed_providers = potential_providers

    for provider in allowed_providers:
        argvalues.append([provider])
        # Use the provider key for idlist, helps with readable parametrized test output
        idlist.append(provider.key)
        # Add provider to argnames if missing
        if 'provider' in metafunc.fixturenames and 'provider' not in argnames:
            metafunc.function = pytest.mark.uses_testgen()(metafunc.function)
            argnames.append('provider')
        if metafunc.config.getoption('sauce') or selector == ONE:
            break

    return argnames, argvalues, idlist
示例#10
0
def a_provider(request):
    prov_filter = ProviderFilter(classes=[VMwareProvider])
    return setup_one_or_skip(request, filters=[prov_filter])
from cfme.common.vm import VM
from cfme.infrastructure.provider import InfraProvider
from cfme.web_ui import toolbar as tb
from utils import testgen
from utils.blockers import BZ
from utils.generators import random_vm_name
from utils.log import logger
from utils.providers import ProviderFilter
from utils.timeutil import parsetime
from utils.wait import wait_for
from utils.version import pick, current_version

pytest_generate_tests = testgen.generate(
    gen_func=testgen.providers,
    filters=[
        ProviderFilter(classes=[CloudInfraProvider],
                       required_flags=['provision', 'retire'])
    ])

pytestmark = [
    pytest.mark.usefixtures('setup_provider'),
    pytest.mark.tier(2), pytest.mark.long_running
]

RetirementWarning = namedtuple('RetirementWarning', ['id', 'string'])

warnings = [
    RetirementWarning('no_warning', 'None'),
    RetirementWarning('1_week_warning', '1 Week before retirement'),
    RetirementWarning('2_week_warning', '2 Weeks before retirement'),
    RetirementWarning('30_day_warning', '30 Days before retirement')
]
示例#12
0
from cfme.common.vm import VM
from cfme.configure.configuration import VMwareConsoleSupport
from cfme.infrastructure.provider import InfraProvider
from utils import testgen, version, ssh
from utils.appliance.implementations.ui import navigate_to
from utils.conf import credentials
from utils.log import logger
from utils.providers import ProviderFilter
from wait_for import wait_for

pytestmark = pytest.mark.usefixtures('setup_provider')

pytest_generate_tests = testgen.generate(
    gen_func=testgen.providers,
    filters=[
        ProviderFilter(classes=[InfraProvider],
                       required_flags=['webmks_console'])
    ],
    scope='module')


@pytest.yield_fixture(scope="function")
def vm_obj(request, provider, setup_provider, console_template, vm_name):
    """VM creation/deletion fixture.

    Create a VM on the provider with the given template, and return the vm_obj.
    Also, remove VM from provider using nested function _delete_vm
    after the test is completed.
    """
    vm_obj = VM.factory(vm_name, provider, template_name=console_template)
    vm_obj.create_on_provider(timeout=2400,
                              find_in_cfme=True,
示例#13
0
def setup_perf_provider(request, use_global_filters=True):
    pf = ProviderFilter(required_tags=['perf'])
    return setup_one_or_skip(request,
                             filters=[pf],
                             use_global_filters=use_global_filters)
示例#14
0
from cfme.configure.configuration import VMwareConsoleSupport
from cfme.infrastructure.provider.virtualcenter import VMwareProvider
from cfme.configure import configuration
from cfme.common.vm import VM
from utils import testgen, version, ssh
from utils.appliance.implementations.ui import navigate_to
from utils.log import logger
from utils.conf import credentials
from utils.providers import ProviderFilter
from wait_for import wait_for

pytestmark = pytest.mark.usefixtures('setup_provider')

pytest_generate_tests = testgen.generate(
    gen_func=testgen.providers,
    filters=[ProviderFilter(classes=[CloudInfraProvider], required_flags=['html5_console'])],
    scope='module'
)


@pytest.fixture(scope="function")
def vm_obj(request, provider, setup_provider, console_template, vm_name):
    """
    Create a VM on the provider with the given template, and return the vm_obj.

    Also, it will remove VM from provider using nested function _delete_vm
    after the test is completed.

    """
    vm_obj = VM.factory(vm_name, provider, template_name=console_template)
def a_provider(request):
    prov_filter = ProviderFilter(classes=[InfraProvider],
                                 required_fields=['datacenters', 'clusters'])
    return setup_one_or_skip(request, filters=[prov_filter])
示例#16
0
def setup_one_by_class_or_skip(request, prov_class, use_global_filters=True):
    pf = ProviderFilter(classes=[prov_class])
    return setup_one_or_skip(request,
                             filters=[pf],
                             use_global_filters=use_global_filters)
示例#17
0
def a_provider(request):
    prov_filter = ProviderFilter(classes=[CloudProvider],
                                 required_fields=[['provisioning', 'stack']])
    return setup_one_or_skip(request, filters=[prov_filter])
示例#18
0
from utils.log import logger
from utils.net import net_check
from utils.providers import ProviderFilter
from utils.ssh import SSHClient
from utils.update import update
from utils.wait import wait_for

pytestmark = [
    pytest.mark.long_running,
    pytest.mark.meta(server_roles=["+automate", "+notifier"]),
    pytest.mark.tier(3), test_requirements.alert
]

CANDU_PROVIDER_TYPES = [VMwareProvider]  # TODO: rhevm

pf1 = ProviderFilter(classes=[InfraProvider])
pf2 = ProviderFilter(classes=[SCVMMProvider], inverted=True)
pytest_generate_tests = testgen.generate(gen_func=testgen.providers,
                                         filters=[pf1, pf2],
                                         scope="module")


def wait_for_alert(smtp, alert, delay=None, additional_checks=None):
    """DRY waiting function

    Args:
        smtp: smtp_test funcarg
        alert: Alert name
        delay: Optional delay to pass to wait_for
        additional_checks: Additional checks to perform on the mails. Keys are names of the mail
            sections, values the values to look for.